diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index be4acda785..0000000000 --- a/.eslintignore +++ /dev/null @@ -1,11 +0,0 @@ -node_modules -dist -dist-dts -examples -**/*.js -**/*.mjs -**/*.cjs -**/playground -integration-tests/tests/prisma/*/client -integration-tests/tests/prisma/*/drizzle -drizzle-kit/* diff --git a/.eslintrc.yaml b/.eslintrc.yaml deleted file mode 100644 index 906d73ffac..0000000000 --- a/.eslintrc.yaml +++ /dev/null @@ -1,84 +0,0 @@ -root: true -extends: - - 'eslint:recommended' - - 'plugin:@typescript-eslint/recommended' - - 'plugin:unicorn/recommended' -parser: '@typescript-eslint/parser' -parserOptions: - project: './tsconfig.json' -plugins: - - import - - unused-imports - - no-instanceof - - drizzle-internal -overrides: - - files: - - '**/tests/**/*.ts' - - '**/type-tests/**/*.ts' - rules: - import/extensions: 'off' - no-instanceof: 'off' - - files: 'eslint-plugin-drizzle/**/*' - rules: - import/extensions: 'off' -rules: - '@typescript-eslint/consistent-type-imports': - - error - - disallowTypeAnnotations: false - fixStyle: separate-type-imports - '@typescript-eslint/no-import-type-side-effects': 'error' - import/no-cycle: error - import/no-self-import: error - import/no-empty-named-blocks: error - unused-imports/no-unused-imports: error - import/no-useless-path-segments: error - import/newline-after-import: error - import/no-duplicates: error - import/extensions: - - error - - always - - ignorePackages: true - '@typescript-eslint/no-explicit-any': 'off' - '@typescript-eslint/no-non-null-assertion': 'off' - '@typescript-eslint/no-namespace': 'off' - '@typescript-eslint/no-unused-vars': - - error - - argsIgnorePattern: '^_' - varsIgnorePattern: '^_' - '@typescript-eslint/ban-types': - - error - - extendDefaults: true - types: - '{}' : false - '@typescript-eslint/no-this-alias': 'off' - '@typescript-eslint/no-var-requires': 'off' - 'unicorn/prefer-node-protocol': 'off' - 'unicorn/prefer-top-level-await': 'off' - 'unicorn/prevent-abbreviations': 'off' - 'unicorn/prefer-switch': 'off' - 'unicorn/catch-error-name': 'off' - 'unicorn/no-null': 'off' - 'unicorn/numeric-separators-style': 'off' - 'unicorn/explicit-length-check': 'off' - 'unicorn/filename-case': 'off' - 'unicorn/prefer-module': 'off' - 'unicorn/no-array-reduce': 'off' - 'unicorn/no-nested-ternary': 'off' - 'unicorn/no-useless-undefined': - - error - - checkArguments: false - 'unicorn/no-this-assignment': 'off' - 'unicorn/empty-brace-spaces': 'off' - 'unicorn/no-thenable': 'off' - 'unicorn/consistent-function-scoping': 'off' - 'unicorn/prefer-type-error': 'off' - 'unicorn/relative-url-style': 'off' - 'eqeqeq': 'error' - 'no-instanceof/no-instanceof': 'error' - 'drizzle-internal/require-entity-kind': 'error' - 'unicorn/prefer-string-replace-all': 'off' - 'unicorn/no-process-exit': 'off' - '@typescript-eslint/ban-ts-comment': 'off' - '@typescript-eslint/no-empty-interface': 'off' - '@typescript-eslint/no-unsafe-declaration-merging': 'off' - 'no-inner-declarations': 'off' diff --git a/.github/workflows/release-feature-branch.yaml b/.github/workflows/release-feature-branch.yaml index 7940bd4748..7f391d10d9 100644 --- a/.github/workflows/release-feature-branch.yaml +++ b/.github/workflows/release-feature-branch.yaml @@ -21,413 +21,388 @@ on: required: true SQLITE_CLOUD_CONNECTION_STRING: required: true + SQLITE_MANY_CLOUD_CONNECTION_STRING: + required: true + +concurrency: + group: feature-${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: + prepare: + runs-on: ubuntu-24.04 + timeout-minutes: 25 + env: + TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} + TURBO_TEAM: 'team_KbvYUtAn1Tqytsj8HbNcYDqV' + steps: + - uses: actions/checkout@v5 + - uses: pnpm/action-setup@v4 + with: { run_install: false } + - uses: actions/setup-node@v6 + with: { node-version: '24', registry-url: 'https://registry.npmjs.org', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } + - run: pnpm install --frozen-lockfile --prefer-offline + - name: Compute version suffix + id: meta + shell: bash + run: echo "suffix=$(git rev-parse --short HEAD)" >> "$GITHUB_OUTPUT" + - name: Build Prisma client + working-directory: drizzle-orm + run: pnpm prisma generate --schema src/prisma/schema.prisma + - name: Build all + run: pnpm build:artifact + - name: Upload build-dist + uses: actions/upload-artifact@v4 + with: + name: build-dist + path: | + **/dist + **/*.tsbuildinfo + - name: Pack + run: pnpm pack:artifact + - uses: actions/upload-artifact@v4 + with: + name: packages + path: | + drizzle-orm/package.tgz + drizzle-kit/package.tgz + drizzle-zod/package.tgz + drizzle-seed/package.tgz + drizzle-typebox/package.tgz + drizzle-valibot/package.tgz + drizzle-arktype/package.tgz + eslint-plugin-drizzle/package.tgz + + # Tiny marker so other jobs can wait without failing + - name: Upload build-ready marker + run: mkdir -p .gh && echo "ok" > .gh/build-ready + - uses: actions/upload-artifact@v4 + with: + name: build-ready + path: .gh/build-ready + - name: test:types & lint + run: pnpm test:types-lint + test: - # only run on all pushes or pull requests from forks + # NOTE: no 'needs: [prepare]' on purpose — start early, warm DBs, then wait for artifacts if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-24.04 + timeout-minutes: 45 strategy: + fail-fast: false matrix: - shard: - - gel - # - planetscale - - singlestore-core - - singlestore-proxy - - singlestore-prefixed - - singlestore-custom - - neon-http - - neon-serverless - - drizzle-orm - - drizzle-kit - - drizzle-zod - - drizzle-seed - - drizzle-typebox - - drizzle-valibot - - drizzle-arktype - - other - runs-on: ubuntu-22.04 - services: - postgres-postgis: - image: postgis/postgis:16-3.4 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 54322:5432 - postgres-vector: - image: pgvector/pgvector:pg16 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 54321:5432 - postgres: - image: postgres:14 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 55433:5432 - mysql: - image: mysql:8 - env: - MYSQL_ROOT_PASSWORD: root - MYSQL_DATABASE: drizzle - options: >- - --health-cmd "mysqladmin ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 33306:3306 - singlestore: - image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 - env: - ROOT_PASSWORD: singlestore - ports: - - 33307:3306 + include: + - shard: int:gel + dbs: [gel] + - shard: int:singlestore + dbs: [singlestore] + - shard: int:singlestore-core + dbs: [singlestore-many] + - shard: int:singlestore-proxy + dbs: [singlestore-many] + - shard: int:mysql + dbs: [mysql] + - shard: int:postgres + dbs: [postgres] + - shard: int:sqlite + dbs: [] + - shard: int:other + dbs: [mysql, mssql, cockroach, singlestore, postgres, postgres-postgis, postgres-vector] + - shard: int:planetscale + dbs: [] + - shard: int:cockroach + dbs: [cockroach] + # TODO: + # - shard: int:bun + # dbs: [postgres, mysql] + - shard: int:mssql + dbs: [mssql] + - shard: orm + dbs: [] + - shard: kit:other + dbs: [mysql] + - shard: kit:postgres + dbs: [postgres, postgres-postgis] + - shard: kit:cockroach + dbs: [cockroach] + - shard: kit:mssql + dbs: [mssql] + - shard: seed + dbs: [cockroach, mysql, mssql, postgres-postgis, singlestore] + - shard: validators + dbs: [] + + name: ${{ matrix.shard }} steps: - - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - with: - node-version: '20.19' - registry-url: 'https://registry.npmjs.org' - - - uses: pnpm/action-setup@v3 - name: Install pnpm - id: pnpm-install - with: - version: latest - run_install: false + - uses: actions/checkout@v5 + - uses: pnpm/action-setup@v4 + with: { run_install: false } + - uses: actions/setup-node@v6 + with: { node-version: '24', registry-url: 'https://registry.npmjs.org', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } + - run: pnpm install --frozen-lockfile --prefer-offline - - name: Get pnpm store directory - id: pnpm-cache + - name: Start DBs needed by shard (pre-warm) + if: ${{ matrix.dbs && join(matrix.dbs, ',') != '' }} shell: bash run: | - echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - - - uses: actions/cache@v4 - name: Setup pnpm cache + set -euxo pipefail + + compose_files=() + for db in ${{ join(matrix.dbs, ' ') }}; do + case "$db" in + postgres) compose_files+=("-f" "compose/postgres.yml") ;; + postgres-postgis) compose_files+=("-f" "compose/postgres-postgis.yml") ;; + postgres-vector) compose_files+=("-f" "compose/postgres-vector.yml") ;; + mysql) compose_files+=("-f" "compose/mysql.yml") ;; + singlestore) compose_files+=("-f" "compose/singlestore.yml") ;; + singlestore-many) compose_files+=("-f" "compose/singlestore-many.yml") ;; + mssql) compose_files+=("-f" "compose/mssql.yml") ;; + cockroach) compose_files+=("-f" "compose/cockroach.yml") ;; + gel) compose_files+=("-f" "compose/gel.yml") ;; + *) echo "Unknown db '$db'"; exit 1 ;; + esac + done + docker compose "${compose_files[@]}" up -d + + chmod +x compose/wait.sh + compose/wait.sh ${{ join(matrix.dbs, ' ') }} + + - name: Wait for 'prepare' to finish (poll artifact) + env: + GH_TOKEN: ${{ github.token }} + shell: bash + run: | + set -euo pipefail + run_id="${{ github.run_id }}" + repo="${{ github.repository }}" + echo "Waiting for 'build-ready' artifact from prepare job in run $run_id..." + for i in $(seq 1 120); do + artifacts_json="$(curl -fsSL -H "Authorization: Bearer $GH_TOKEN" \ + -H "X-GitHub-Api-Version: 2022-11-28" \ + -H "Accept: application/vnd.github+json" \ + "https://api.github.com/repos/${repo}/actions/runs/${run_id}/artifacts")" + echo "$artifacts_json" | jq -e '.artifacts[] | select(.name=="build-ready")' >/dev/null 2>&1 && { echo "build-ready found"; break; } + echo "…still waiting ($i/120)" + sleep 5 + done + + - name: Download build-dist (compiled JS) + uses: actions/download-artifact@v4 with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install + name: build-dist + path: . - - name: Build Prisma client - working-directory: drizzle-orm - run: pnpm prisma generate --schema src/prisma/schema.prisma - - - name: Build - run: pnpm build + # Prisma client was generated in prepare -> build outputs already contain it + # No `pnpm build` here — we reuse dist to save time + - uses: oven-sh/setup-bun@v2 - name: Run tests env: PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55433/drizzle PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle - MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle + POSTGIS_URL: postgres://postgres:postgres@localhost:54322/drizzle + MYSQL_CONNECTION_STRING: mysql://root:mysql@localhost:3306/drizzle PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} - # NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres - NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} - NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} LIBSQL_URL: file:local.db LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} + GEL_CONNECTION_STRING: gel://admin:password@localhost:56565/main SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/ + SINGLESTORE_MANY_CONNECTION_STRING: singlestore://root:singlestore@localhost:3308/;singlestore://root:singlestore@localhost:3309/;singlestore://root:singlestore@localhost:3310/;singlestore://root:singlestore@localhost:3311/ + COCKROACH_CONNECTION_STRING: postgresql://root@127.0.0.1:26257/defaultdb?sslmode=disable + MSSQL_CONNECTION_STRING: mssql://SA:drizzle123PASSWORD!@localhost:1433?encrypt=true&trustServerCertificate=true + TEST_CONFIG_PATH_PREFIX: ./tests/cli/ SQLITE_CLOUD_CONNECTION_STRING: ${{ secrets.SQLITE_CLOUD_CONNECTION_STRING }} + SQLITE_MANY_CLOUD_CONNECTION_STRING: ${{ secrets.SQLITE_MANY_CLOUD_CONNECTION_STRING }} working-directory: integration-tests + shell: bash run: | + set -euxo pipefail if [[ ${{ github.event_name }} != "push" && "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]]; then export SKIP_EXTERNAL_DB_TESTS=1 fi - case ${{ matrix.shard }} in - - gel) - if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then - pnpm vitest run tests/gel + int:gel) + if [[ -z "${SKIP_EXTERNAL_DB_TESTS:-}" ]]; then + pnpm --stream vitest --reporter=verbose --silent=false run tests/gel fi ;; - - planetscale) - if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then - pnpm vitest run \ - tests/mysql/mysql-planetscale.test.ts \ - tests/relational/mysql.planetscale-v1.test.ts \ - tests/relational/mysql.planetscale.test.ts + int:singlestore) pnpm --stream vitest --reporter=verbose --silent=false run ./singlestore/singlestore-prefixed.test.ts ./singlestore/singlestore-custom.test.ts ;; + int:singlestore-core) pnpm --stream vitest --reporter=verbose --silent=false run ./singlestore/singlestore.test.ts ;; + int:singlestore-proxy) pnpm --stream vitest --reporter=verbose --silent=false run ./singlestore/singlestore-proxy.test.ts ;; + int:postgres) + if [[ -z "${SKIP_EXTERNAL_DB_TESTS:-}" ]]; then + pnpm --stream vitest --reporter=verbose --silent=false run tests/pg/ fi ;; - - singlestore-core) - pnpm vitest run tests/singlestore/singlestore.test.ts + int:mysql) + pnpm --stream test:mysql ;; - - singlestore-proxy) - pnpm vitest run tests/singlestore/singlestore-proxy.test.ts - ;; - - singlestore-prefixed) - pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts - ;; - - singlestore-custom) - pnpm vitest run tests/singlestore/singlestore-custom.test.ts + int:tidb) + if [[ -z "${SKIP_EXTERNAL_DB_TESTS:-}" ]]; then + pnpm --stream vitest --reporter=verbose --silent=false tests/mysql/tidb + fi ;; - - neon-http) - if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then - pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts + int:planetscale) + if [[ -z "${SKIP_EXTERNAL_DB_TESTS:-}" ]]; then + pnpm --stream test:planetscale fi ;; - - neon-serverless) - docker compose -f docker-neon.yml up -d - pnpm vitest run --config=./vitest-ci.config.ts tests/pg/neon-serverless.test.ts - docker compose -f docker-neon.yml down + int:cockroach) pnpm --stream vitest --reporter=verbose --silent=false run tests/cockroach ;; + int:mssql) pnpm --stream vitest --reporter=verbose --silent=false run tests/mssql ;; + int:sqlite) pnpm --stream vitest --reporter=verbose --silent=false run tests/sqlite ;; + + kit:other) cd ../drizzle-kit && pnpm --stream run test:other ;; + kit:postgres) cd ../drizzle-kit && pnpm --stream run test:postgres ;; + kit:cockroach) cd ../drizzle-kit && pnpm --stream run test:cockroach ;; + kit:mssql) cd ../drizzle-kit && pnpm --stream run test:mssql ;; + validators) + (cd ../drizzle-zod && pnpm --stream test --reporter=verbose --silent=false) + (cd ../drizzle-valibot && pnpm --stream test --reporter=verbose --silent=false) + (cd ../drizzle-arktype && pnpm --stream test --reporter=verbose --silent=false) + (cd ../drizzle-typebox && pnpm --stream test --reporter=verbose --silent=false) ;; - - drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype) - (cd .. && pnpm test --filter ${{ matrix.shard }}) + orm|seed) + (cd ../drizzle-${{ matrix.shard }} && pnpm --stream test --reporter=verbose --silent=false) ;; - - other) - pnpm vitest run \ - --exclude tests/gel \ - --exclude tests/mysql/mysql-planetscale.test.ts \ - --exclude tests/relational/mysql.planetscale-v1.test.ts \ - --exclude tests/relational/mysql.planetscale.test.ts \ - --exclude tests/singlestore/singlestore.test.ts \ - --exclude tests/singlestore/singlestore-proxy.test.ts \ - --exclude tests/singlestore/singlestore-prefixed.test.ts \ - --exclude tests/singlestore/singlestore-custom.test.ts \ - --exclude tests/pg/neon-http.test.ts \ - --exclude tests/pg/neon-http-batch.test.ts \ - --exclude tests/pg/neon-serverless.test.ts + + int:bun) bun test ./tests/bun/ ;; + + int:other) + pnpm --stream vitest --reporter=verbose --silent=false run tests \ + --exclude ./tests/gel/ \ + --exclude ./tests/mysql/ \ + --exclude ./tests/cockroach/ \ + --exclude ./tests/singlestore/ \ + --exclude ./tests/mssql/ \ + --exclude ./tests/pg/ \ + --exclude ./tests/sqlite/ \ + --exclude ./tests/bun/ ;; - + *) echo "Unknown shard: ${{matrix.shard}}"; exit 1 ;; esac attw: - # only run on all pushes or pull requests from forks + needs: [prepare] if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - strategy: - matrix: - package: - - drizzle-orm - - drizzle-kit - - drizzle-zod - - drizzle-seed - - drizzle-typebox - - drizzle-valibot - - drizzle-arktype - - eslint-plugin-drizzle - runs-on: ubuntu-22.04 - permissions: - id-token: write - # force empty so npm can use OIDC - env: - NODE_AUTH_TOKEN: "" - NPM_TOKEN: "" + runs-on: ubuntu-24.04 + timeout-minutes: 20 steps: - uses: actions/checkout@v4 - - - uses: actions/setup-node@v4 - with: - node-version: '22' - registry-url: 'https://registry.npmjs.org' - - - uses: pnpm/action-setup@v3 - name: Install pnpm - id: pnpm-install + - uses: pnpm/action-setup@v4 + with: { run_install: false } + - uses: actions/setup-node@v6 + with: { node-version: '24', registry-url: 'https://registry.npmjs.org', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } + - run: pnpm install --frozen-lockfile --prefer-offline + - uses: oven-sh/setup-bun@v2 + - name: Download package tarball + uses: actions/download-artifact@v4 with: - version: latest - run_install: false - - - name: Get pnpm store directory - id: pnpm-cache - shell: bash + name: packages + path: ./artifacts + - name: Run @arethetypeswrong/cli run: | - echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - - - uses: actions/cache@v4 - name: Setup pnpm cache + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-kit/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-zod/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-seed/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-typebox/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-valibot/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/drizzle-arktype/package.tgz + bun --bun run ./attw-fork/src/run.ts ./artifacts/eslint-plugin-drizzle/package.tgz + + attw-orm: + needs: [prepare] + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + runs-on: ubuntu-24.04 + timeout-minutes: 20 + strategy: + matrix: + package: [node10, node16-cjs, node16-esm, bundler] + steps: + - uses: actions/checkout@v4 + - uses: pnpm/action-setup@v4 + with: { run_install: false } + - uses: actions/setup-node@v6 + with: { node-version: '24', registry-url: 'https://registry.npmjs.org', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } + - run: pnpm fetch && pnpm install --frozen-lockfile --prefer-offline + - uses: oven-sh/setup-bun@v2 + - name: Download drizzle-orm tarball + uses: actions/download-artifact@v4 with: - path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - - name: Install dependencies - run: pnpm install - - - name: Install Bun - uses: oven-sh/setup-bun@v2 - - # >= 11.5.1 for trusted publishing - - name: Update NPM - run: npm install -g npm@latest - - # nuke, so npm can use OIDC - - name: Remove temp npmrc - run: rm -f "$NPM_CONFIG_USERCONFIG" - - - name: Check preconditions - id: checks - shell: bash - working-directory: ${{ matrix.package }} - run: | - old_version="$(jq -r .version package.json)" - version="$old_version-$(git rev-parse --short HEAD)" - npm version $version - tag="${{ github.ref_name }}" - is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" - - if [[ "$is_version_published" == "true" ]]; then - echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY - npm dist-tag add ${{ matrix.package }}@$version $tag - else - { - echo "version=$version" - echo "tag=$tag" - echo "has_new_release=true" - } >> $GITHUB_OUTPUT - fi - - - name: Build Prisma client - if: steps.checks.outputs.has_new_release == 'true' - working-directory: drizzle-orm - run: pnpm prisma generate --schema src/prisma/schema.prisma - - - name: Build - if: steps.checks.outputs.has_new_release == 'true' - run: pnpm build - - - name: Pack - if: steps.checks.outputs.has_new_release == 'true' - working-directory: ${{ matrix.package }} - run: npm run pack - + name: packages + path: ./artifacts - name: Run @arethetypeswrong/cli - if: steps.checks.outputs.has_new_release == 'true' - working-directory: ${{ matrix.package }} - run: bunx attw package.tgz + working-directory: drizzle-orm + run: bun --bun run ../attw-fork/src/run.ts ../artifacts/drizzle-orm/package.tgz ${{ matrix.package }} release: - # only run on all pushes or pull requests from forks + needs: [test, prepare, attw, attw-orm] if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - needs: - - test - - attw - strategy: - matrix: - package: - - drizzle-orm - - drizzle-kit - - drizzle-zod - - drizzle-seed - - drizzle-typebox - - drizzle-valibot - - drizzle-arktype - - eslint-plugin-drizzle - runs-on: ubuntu-22.04 - permissions: - contents: read - id-token: write + runs-on: ubuntu-24.04 + timeout-minutes: 20 + permissions: { contents: read, id-token: write } + # force empty so npm can use OIDC env: NODE_AUTH_TOKEN: "" NPM_TOKEN: "" + strategy: + matrix: + package: [drizzle-orm, drizzle-kit, drizzle-zod, drizzle-seed, drizzle-typebox, drizzle-valibot, drizzle-arktype, eslint-plugin-drizzle] steps: - uses: actions/checkout@v5 - - uses: pnpm/action-setup@v4 - with: { run_install: false } + + # don't specify registry url, so there's no .npmrc config file - uses: actions/setup-node@v6 - with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } - - run: pnpm install --frozen-lockfile --prefer-offline + with: { node-version: '24' } + + # nuke, so npm can use OIDC + - name: Remove temp npmrc + run: rm -f "$NPM_CONFIG_USERCONFIG" # >= 11.5.1 for trusted publishing - name: Update NPM run: npm install -g npm@latest - - # nuke, so npm can use OIDC - - name: Remove temp npmrc - run: rm -f "$NPM_CONFIG_USERCONFIG" - - - name: Check preconditions + - name: Download package tarball + uses: actions/download-artifact@v4 + with: + name: packages + path: ./artifacts + - name: Check preconditions (from tarball) id: checks shell: bash - working-directory: ${{ matrix.package }} run: | - old_version="$(jq -r .version package.json)" - version="$old_version-$(git rev-parse --short HEAD)" - npm version $version + set -euxo pipefail + + _version="$(tar -xOf ./artifacts/${{ matrix.package }}/package.tgz package/package.json | jq -r .version)" tag="${{ github.ref_name }}" - is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" + suffix=$(git rev-parse --short HEAD) + version="$_version-$suffix" + tmpdir="$(mktemp -d)" + tar -xzf ./artifacts/${{ matrix.package }}/package.tgz -C "$tmpdir" - if [[ "$is_version_published" == "true" ]]; then - echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY - else - { - echo "version=$version" - echo "tag=$tag" - echo "has_new_release=true" - } >> $GITHUB_OUTPUT - fi + jq --arg v "$version" '.version = $v' \ + "$tmpdir/package/package.json" > "$tmpdir/package/package.json.tmp" + mv "$tmpdir/package/package.json.tmp" "$tmpdir/package/package.json" - - name: Build Prisma client - working-directory: drizzle-orm - run: pnpm prisma generate --schema src/prisma/schema.prisma - - - name: Build - if: steps.checks.outputs.has_new_release == 'true' - run: pnpm build + tar -czf ./artifacts/${{ matrix.package }}/package.tgz -C "$tmpdir" package + rm -rf "$tmpdir" - - name: Pack + is_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" + if [[ "$is_published" == "true" ]]; then + echo "\`${{ matrix.package }}@$version\` already published, tagging \`$tag\`" >> $GITHUB_STEP_SUMMARY + else + { echo "version=$version"; echo "tag=$tag"; echo "has_new_release=true"; } >> $GITHUB_OUTPUT + fi + - name: Publish (from tarball) if: steps.checks.outputs.has_new_release == 'true' - working-directory: ${{ matrix.package }} - shell: bash - run: npm run pack - - - name: Publish - if: github.event_name == 'push' && steps.checks.outputs.has_new_release == 'true' - working-directory: ${{ matrix.package }} shell: bash run: | - tag="${{ steps.checks.outputs.tag }}" - version="${{ steps.checks.outputs.version }}" - - echo "Publishing ${{ matrix.package }}@$tag using version $version" - npm run publish -- --tag $tag - - echo "npm: \`${{ matrix.package }}@$tag | ${{ matrix.package }}@$version\`" >> $GITHUB_STEP_SUMMARY - - # Post release message to Discord - # curl -X POST -H "Content-Type: application/json" -d "{\"embeds\": [{\"title\": \"New \`${{ matrix.package }}\` release! 🎉\", \"url\": \"https://www.npmjs.com/package/${{ matrix.package }}/v/$version\", \"color\": \"12907856\", \"fields\": [{\"name\": \"Version\", \"value\": \"\`$version\`\"}, {\"name\": \"Tag\", \"value\": \"\`$tag\`\"}]}]}" ${{ secrets.DISCORD_DEV_RELEASE_WEBHOOK_URL }} - + set -euxo pipefail + npm publish ./artifacts/${{ matrix.package }}/package.tgz --tag "${{ steps.checks.outputs.tag }}" + echo "npm: \`${{ matrix.package }}@${{ steps.checks.outputs.tag }} | ${{ steps.checks.outputs.version }}\`" >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/release-latest.yaml b/.github/workflows/release-latest.yaml index 905520f147..ab5278bbe7 100644 --- a/.github/workflows/release-latest.yaml +++ b/.github/workflows/release-latest.yaml @@ -35,8 +35,12 @@ jobs: - singlestore-custom - neon-http - neon-serverless + - cockroach + - mssql - drizzle-orm - drizzle-kit + - drizzle-kit-cockroach + - drizzle-kit-mssql - drizzle-zod - drizzle-seed - drizzle-typebox @@ -102,6 +106,17 @@ jobs: ROOT_PASSWORD: singlestore ports: - 33307:3306 + mssql: + image: mcr.microsoft.com/azure-sql-edge + env: + ACCEPT_EULA: 1 + MSSQL_SA_PASSWORD: drizzle123PASSWORD! + ports: + - 1433:1433 + cockroachdb: + image: sukairo02/cockroachdb-launched:latest + ports: + - 26257:26257 steps: - uses: actions/checkout@v4 @@ -151,7 +166,7 @@ jobs: NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} # NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} - NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres + # NEON_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} @@ -159,54 +174,83 @@ jobs: LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/ + COCKROACH_CONNECTION_STRING: postgresql://root@127.0.0.1:26257/defaultdb?sslmode=disable + MSSQL_CONNECTION_STRING: mssql://SA:drizzle123PASSWORD!@localhost:1433?encrypt=true&trustServerCertificate=true + TEST_CONFIG_PATH_PREFIX: ./tests/cli/ SQLITE_CLOUD_CONNECTION_STRING: ${{ secrets.SQLITE_CLOUD_CONNECTION_STRING }} working-directory: integration-tests run: | case ${{ matrix.shard }} in gel) - pnpm vitest run tests/gel + pnpm --stream vitest --reporter=verbose --silent=false run tests/gel ;; planetscale) - pnpm vitest run \ + pnpm --stream vitest --reporter=verbose --silent=false run \ tests/mysql/mysql-planetscale.test.ts \ tests/relational/mysql.planetscale-v1.test.ts \ tests/relational/mysql.planetscale.test.ts ;; singlestore-core) - pnpm vitest run tests/singlestore/singlestore.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/singlestore/singlestore.test.ts ;; singlestore-proxy) - pnpm vitest run tests/singlestore/singlestore-proxy.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/singlestore/singlestore-proxy.test.ts ;; singlestore-prefixed) - pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/singlestore/singlestore-prefixed.test.ts ;; singlestore-custom) - pnpm vitest run tests/singlestore/singlestore-custom.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/singlestore/singlestore-custom.test.ts ;; neon-http) - pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts ;; neon-serverless) docker compose -f docker-neon.yml up -d - pnpm vitest run tests/pg/neon-serverless.test.ts + pnpm --stream vitest --reporter=verbose --silent=false run tests/pg/neon-serverless.test.ts docker compose -f docker-neon.yml down ;; - drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype) + cockroach) + pnpm --stream vitest --reporter=verbose --silent=false run tests/cockroach + ;; + + mssql) + pnpm --stream vitest --reporter=verbose --silent=false run tests/mssql + ;; + + drizzle-kit) + cd ../drizzle-kit + pnpm test:types + pnpm --stream vitest --reporter=verbose --silent=false run\ + --exclude tests/cockroach \ + --exclude tests/mssql + ;; + + drizzle-kit-cockroach) + cd ../drizzle-kit + pnpm --stream vitest --reporter=verbose --silent=false run tests/cockroach + ;; + + drizzle-kit-mssql) + cd ../drizzle-kit + pnpm --stream vitest --reporter=verbose --silent=false run tests/mssql + ;; + + drizzle-orm|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype) (cd .. && pnpm test --filter ${{ matrix.shard }}) ;; other) - pnpm vitest run \ + pnpm --stream vitest --reporter=verbose --silent=false run \ --exclude tests/gel \ --exclude tests/mysql/mysql-planetscale.test.ts \ --exclude tests/relational/mysql.planetscale-v1.test.ts \ @@ -217,7 +261,9 @@ jobs: --exclude tests/singlestore/singlestore-custom.test.ts \ --exclude tests/pg/neon-http.test.ts \ --exclude tests/pg/neon-http-batch.test.ts \ - --exclude tests/pg/neon-serverless.test.ts + --exclude tests/pg/neon-serverless.test.ts \ + --exclude tests/cockroach \ + --exclude tests/mssql ;; esac @@ -226,7 +272,6 @@ jobs: strategy: matrix: package: - - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed @@ -235,12 +280,6 @@ jobs: - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 - permissions: - id-token: write - # force empty so npm can use OIDC - env: - NODE_AUTH_TOKEN: "" - NPM_TOKEN: "" steps: - uses: actions/checkout@v4 @@ -276,14 +315,6 @@ jobs: - name: Install Bun uses: oven-sh/setup-bun@v2 - # >= 11.5.1 for trusted publishing - - name: Update NPM - run: npm install -g npm@latest - - # nuke, so npm can use OIDC - - name: Remove temp npmrc - run: rm -f "$NPM_CONFIG_USERCONFIG" - - name: Check preconditions id: checks shell: bash @@ -318,7 +349,93 @@ jobs: - name: Run @arethetypeswrong/cli if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} - run: bunx attw package.tgz + run: bun --bun run ../attw-fork/src/run.ts package.tgz + + + attw-orm: + strategy: + matrix: + shard: + - node10 + - node16-cjs + - node16-esm + - bundler + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: '22' + registry-url: 'https://registry.npmjs.org' + + - uses: pnpm/action-setup@v3 + name: Install pnpm + id: pnpm-install + with: + version: latest + run_install: false + + - name: Get pnpm store directory + id: pnpm-cache + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT + + - uses: actions/cache@v4 + name: Setup pnpm cache + with: + path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: Install dependencies + run: pnpm install + + - name: Install Bun + uses: oven-sh/setup-bun@v2 + + - name: Check preconditions + id: checks + shell: bash + working-directory: drizzle-orm + run: | + old_version="$(jq -r .version package.json)" + version="$old_version-$(git rev-parse --short HEAD)" + npm version $version + tag="${{ github.ref_name }}" + is_version_published="$(npm view drizzle-orm versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" + + if [[ "$is_version_published" == "true" ]]; then + echo "\`drizzle-orm$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY + npm dist-tag add drizzle-orm@$version $tag + else + { + echo "version=$version" + echo "tag=$tag" + echo "has_new_release=true" + } >> $GITHUB_OUTPUT + fi + + - name: Build Prisma client + if: steps.checks.outputs.has_new_release == 'true' + working-directory: drizzle-orm + run: pnpm prisma generate --schema src/prisma/schema.prisma + + - name: Build + if: steps.checks.outputs.has_new_release == 'true' + run: pnpm build + + - name: Pack + if: steps.checks.outputs.has_new_release == 'true' + working-directory: drizzle-orm + run: npm run pack + + - name: Run @arethetypeswrong/cli + if: steps.checks.outputs.has_new_release == 'true' + working-directory: drizzle-orm + run: bun --bun run ../attw-fork/src/run.ts package.tgz ${{ matrix.shard }} release: permissions: @@ -327,6 +444,7 @@ jobs: needs: - test - attw + - attw-orm strategy: fail-fast: false matrix: @@ -340,77 +458,36 @@ jobs: - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 - # force empty so npm can use OIDC - env: - NODE_AUTH_TOKEN: "" - NPM_TOKEN: "" - services: - postgres-postgis: - image: postgis/postgis:16-3.4 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 54322:5432 - postgres-vector: - image: pgvector/pgvector:pg16 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 54321:5432 - postgres: - image: postgres:14 - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: drizzle - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 55433:5432 - mysql: - image: mysql:8 - env: - MYSQL_ROOT_PASSWORD: root - MYSQL_DATABASE: drizzle - options: >- - --health-cmd "mysqladmin ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 33306:3306 steps: - uses: actions/checkout@v5 - - uses: pnpm/action-setup@v4 - with: { run_install: false } + + # don't specify registry url, so there's no .npmrc config file - uses: actions/setup-node@v6 - with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } - - run: pnpm install --frozen-lockfile --prefer-offline - - # >= 11.5.1 for trusted publishing - - name: Update NPM - run: npm install -g npm@latest - - # nuke, so npm can use OIDC - - name: Remove temp npmrc - run: rm -f "$NPM_CONFIG_USERCONFIG" + with: { node-version: '24' } + + - uses: pnpm/action-setup@v3 + name: Install pnpm + id: pnpm-install + with: + version: latest + run_install: false + + - name: Get pnpm store directory + id: pnpm-cache + shell: bash + run: | + echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT + + - uses: actions/cache@v4 + name: Setup pnpm cache + with: + path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + - name: Install dependencies + run: pnpm install - name: Check preconditions id: checks diff --git a/.github/workflows/router.yaml b/.github/workflows/router.yaml index fb94279be4..5dca824e38 100644 --- a/.github/workflows/router.yaml +++ b/.github/workflows/router.yaml @@ -16,10 +16,14 @@ jobs: - name: Route release id: route run: | + HEAD_REPO="${{ github.event.pull_request.head.repo.full_name }}" if [[ "$GITHUB_EVENT_NAME" == "workflow_dispatch" && "${GITHUB_REF##*/}" == "main" ]]; then echo "target=latest" >> $GITHUB_OUTPUT - else + # only run on all pushes or pull requests from forks + elif [[ "$GITHUB_EVENT_NAME" == "push" ]] || [[ "$HEAD_REPO" != "$GITHUB_REPOSITORY" ]]; then echo "target=feature" >> $GITHUB_OUTPUT + else + echo "target=skip" >> $GITHUB_OUTPUT fi run-feature: @@ -36,6 +40,7 @@ jobs: LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} SQLITE_CLOUD_CONNECTION_STRING: ${{ secrets.SQLITE_CLOUD_CONNECTION_STRING }} + SQLITE_MANY_CLOUD_CONNECTION_STRING: ${{ secrets.SQLITE_MANY_CLOUD_CONNECTION_STRING }} run-latest: needs: switch diff --git a/.gitignore b/.gitignore index b07c6dcf5f..34186fc2ac 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ node_modules -.vscode dist dist.new *.tsbuildinfo diff --git a/.husky/pre-commit b/.husky/pre-commit new file mode 100644 index 0000000000..ea8c532e7e --- /dev/null +++ b/.husky/pre-commit @@ -0,0 +1,6 @@ +if command -v pnpm >/dev/null 2>&1; then + pnpm lint-staged +else + echo "pnpm unavailable, trying via docker..." + sh ./compose/lint.sh +fi diff --git a/.npmrc b/.npmrc deleted file mode 100644 index e6335e9d83..0000000000 --- a/.npmrc +++ /dev/null @@ -1 +0,0 @@ -# prefer-workspace-packages = true diff --git a/.nvmrc b/.nvmrc deleted file mode 100644 index 8fdd954df9..0000000000 --- a/.nvmrc +++ /dev/null @@ -1 +0,0 @@ -22 \ No newline at end of file diff --git a/.oxlintrc.json b/.oxlintrc.json new file mode 100644 index 0000000000..2d9840be7e --- /dev/null +++ b/.oxlintrc.json @@ -0,0 +1,129 @@ +{ + "jsPlugins": ["./eslint/eslint-plugin-drizzle-internal/index.mjs"], + "lint-staged": { + "!**/eslint/eslint-plugin-drizzle-internal/**": "echo skip" + }, + "rules": { + "typescript/consistent-type-imports": [ + "error", + { + "prefer": "type-imports", + "disallowTypeAnnotations": true, + "fixStyle": "separate-type-imports" + } + ], + "typescript/no-import-type-side-effects": "error", + "import/consistent-type-specifier-style Style": "error", + "import/no-cycle": "error", + "import/no-self-import": "error", + "import/no-empty-named-blocks": "error", + "import/no-duplicates": "error", + "import/extensions": [ + "error", + "always", + { + "ignorePackages": true + } + ], + "typescript/no-explicit-any": "off", + "typescript/no-non-null-assertion": "off", + "typescript/no-namespace": "off", + "no-unused-vars": [ + "error", + { + "argsIgnorePattern": "^_", + "varsIgnorePattern": "^_" + } + ], + "typescript/no-this-alias": "off", + "typescript/no-var-requires": "off", + "unicorn/prefer-node-protocol": "off", + "unicorn/prefer-top-level-await": "off", + "unicorn/catch-error-name": "off", + "unicorn/no-null": "off", + "unicorn/numeric-separators-style": "off", + "unicorn/explicit-length-check": "off", + "unicorn/filename-case": "off", + "unicorn/no-array-reduce": "off", + "unicorn/no-nested-ternary": "off", + "unicorn/no-useless-undefined": [ + "error", + { + "checkArguments": false + } + ], + "unicorn/no-this-assignment": "off", + "unicorn/empty-brace-spaces": "off", + "unicorn/no-thenable": "off", + "unicorn/consistent-function-scoping": "off", + "unicorn/prefer-type-error": "off", + "eqeqeq": "error", + "unicorn/no-instanceof-builtins": "error", + "unicorn/prefer-string-replace-all": "off", + "unicorn/no-process-exit": "off", + "unicorn/no-empty-file": "off", + "typescript/ban-ts-comment": "off", + "typescript/no-empty-interface": "off", + "typescript/no-unsafe-declaration-merging": "off", + "no-inner-declarations": "off", + "drizzle-internal/require-entity-kind": "error" + }, + "overrides": [ + { + "files": ["drizzle-orm/**/*"], + "rules": { + "drizzle-internal/no-instanceof": "error" + } + }, + { + "files": [ + "**/tests/**/*.ts", + "**/type-tests/**/*.ts", + "**/typeperf-test/**/*.ts" + ], + "rules": { + "import/extensions": "off", + "no-useless-escape": "off", + "consistent-type-imports": "off", + "no-unused-vars": "off", + "no-unused-expressions": "off" + } + }, + { + "files": ["**/type-tests/**/*.ts", "**/integration-tests/tests/**/*.ts"], + "rules": { + "no-unused-expressions": "off" + } + }, + { + "files": ["eslint-plugin-drizzle/**/*"], + "rules": { + "import/extensions": "off" + } + }, + { + "files": ["**/*.d.ts"], + "rules": { + "typescript/consistent-type-imports": "off" + } + }, + { + "files": ["drizzle-kit/**/*"], + "rules": { + "drizzle-internal/require-entity-kind": "off" + } + } + ], + "ignorePatterns": [ + "node_modules", + "dist", + "dist-dts", + "examples", + "**/*.js", + "**/*.mjs", + "**/*.cjs", + "**/playground", + "integration-tests/tests/prisma/*/client", + "integration-tests/tests/prisma/*/drizzle" + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..6557b0b37e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,17 @@ +{ + "typescript.tsdk": "node_modules/typescript/lib", + "editor.defaultFormatter": "dprint.dprint", + "[javascript]": { + "editor.defaultFormatter": "dprint.dprint" + }, + "[typescript]": { + "editor.defaultFormatter": "dprint.dprint" + }, + "[json]": { + "editor.defaultFormatter": "dprint.dprint" + }, + "[markdown]": { + "editor.defaultFormatter": "dprint.dprint" + }, + "oxc.enable": true +} diff --git a/attw-fork/LICENSE b/attw-fork/LICENSE new file mode 100644 index 0000000000..fee6e3657d --- /dev/null +++ b/attw-fork/LICENSE @@ -0,0 +1,7 @@ +Copyright 2023 Andrew Branch + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/attw-fork/package.json b/attw-fork/package.json new file mode 100644 index 0000000000..6909b406ee --- /dev/null +++ b/attw-fork/package.json @@ -0,0 +1,53 @@ +{ + "name": "attw-fork", + "version": "0.18.2-drizzlefork", + "description": "Fork of arethetypeswrong.github.io that allows for resolving packages in only specified modes for performance reasons", + "author": "Andrew Branch", + "license": "MIT", + "repository": { + "type": "git", + "url": "https://github.com/arethetypeswrong/arethetypeswrong.github.io.git", + "directory": "packages/core" + }, + "files": [ + "LICENSE", + "src" + ], + "publishConfig": { + "access": "public" + }, + "scripts": { + "run": "bun --bun run src/run.ts", + "build": "echo skip...", + "build:artifact": "pnpm run build", + "pack": "echo skip...", + "pack:artifact": "pnpm run pack", + "test": "echo skip...", + "test:types": "echo skip..." + }, + "type": "module", + "dependencies": { + "@andrewbranch/untar.js": "^1.0.3", + "@loaderkit/resolve": "^1.0.2", + "chalk": "^4.1.2", + "cjs-module-lexer": "^1.2.3", + "cli-table3": "^0.6.3", + "fflate": "^0.8.2", + "lru-cache": "^11.0.1", + "marked": "9.1.2", + "marked-terminal": "7.1.0", + "semver": "^7.5.4", + "typescript": "5.9.2", + "validate-npm-package-name": "^5.0.0" + }, + "devDependencies": { + "@types/marked-terminal": "3.1.3", + "@types/node": "^24.5.0", + "@types/semver": "^7.5.0", + "@types/validate-npm-package-name": "^4.0.0", + "ts-expose-internals": "5.6.3" + }, + "engines": { + "node": ">=20" + } +} diff --git a/attw-fork/src/checkPackage.ts b/attw-fork/src/checkPackage.ts new file mode 100644 index 0000000000..87ab44b0b8 --- /dev/null +++ b/attw-fork/src/checkPackage.ts @@ -0,0 +1,140 @@ +import { init as initCjsLexer } from 'cjs-module-lexer'; +import type { Package } from './createPackage.ts'; +import checks from './internal/checks/index.ts'; +import type { AnyCheck, CheckDependenciesContext } from './internal/defineCheck.ts'; +import { getBuildTools, getEntrypointInfo, getModuleKinds } from './internal/getEntrypointInfo.ts'; +import { createCompilerHosts } from './internal/multiCompilerHost.ts'; +import type { + AnalysisTypes, + CheckResult, + EntrypointResolutionAnalysis, + Problem, + ProgramInfo, + ResolutionOption, +} from './types.ts'; +import { getResolutionOption, visitResolutions } from './utils.ts'; + +export interface CheckPackageOptions { + /** + * Exhaustive list of entrypoints to check. The package root is `"."`. + * Specifying this option disables automatic entrypoint discovery, + * and overrides the `includeEntrypoints` and `excludeEntrypoints` options. + */ + entrypoints?: string[]; + /** + * Entrypoints to check in addition to automatically discovered ones. + */ + includeEntrypoints?: string[]; + /** + * Entrypoints to exclude from checking. + */ + excludeEntrypoints?: (string | RegExp)[]; + + /** + * Whether to automatically consider all published files as entrypoints + * in the absence of any other detected or configured entrypoints. + */ + entrypointsLegacy?: boolean; + + /** + * Resolution modes that package's files will be loaded in. + * Unwanted mode must be set to `false` to exclude. + */ + modes?: { + bundler?: boolean; + node10?: boolean; + 'node16-cjs'?: boolean; + 'node16-esm'?: boolean; + }; +} + +export async function checkPackage(pkg: Package, options?: CheckPackageOptions): Promise { + const types: AnalysisTypes | false = pkg.typesPackage + ? { + kind: '@types', + ...pkg.typesPackage, + definitelyTypedUrl: JSON.parse(pkg.readFile(`/node_modules/${pkg.typesPackage.packageName}/package.json`)) + .homepage, + } + : pkg.containsTypes() + ? { kind: 'included' } + : false; + const { packageName, packageVersion } = pkg; + if (!types) { + return { packageName, packageVersion, types }; + } + + const hosts = createCompilerHosts(pkg); + const entrypointResolutions = getEntrypointInfo(packageName, pkg, hosts, options); + const programInfo: Record = { + node10: {}, + node16: { moduleKinds: getModuleKinds(entrypointResolutions, 'node16', hosts) }, + bundler: {}, + }; + + await initCjsLexer(); + const problems: Problem[] = []; + const problemIdsToIndices = new Map(); + visitResolutions(entrypointResolutions, (analysis, info) => { + for (const check of checks) { + const context = { + pkg, + hosts, + entrypoints: entrypointResolutions, + programInfo, + subpath: info.subpath, + resolutionKind: analysis.resolutionKind, + resolutionOption: getResolutionOption(analysis.resolutionKind), + fileName: undefined, + }; + if (check.enumerateFiles) { + for (const fileName of analysis.files ?? []) { + runCheck(check, { ...context, fileName }, analysis); + } + if (analysis.implementationResolution) { + runCheck(check, { ...context, fileName: analysis.implementationResolution.fileName }, analysis); + } + } else { + runCheck(check, context, analysis); + } + } + }); + + return { + packageName, + packageVersion, + types, + buildTools: getBuildTools(JSON.parse(pkg.readFile(`/node_modules/${packageName}/package.json`))), + entrypoints: entrypointResolutions, + programInfo, + problems, + }; + + function runCheck( + check: AnyCheck, + context: CheckDependenciesContext, + analysis: EntrypointResolutionAnalysis, + ) { + const dependencies = check.dependencies(context); + const id = check.name + + JSON.stringify(dependencies, (_, value) => { + if (typeof value === 'function') { + throw new Error('Encountered unexpected function in check dependencies'); + } + return value; + }); + let indices = problemIdsToIndices.get(id); + if (indices) { + (analysis.visibleProblems ??= []).push(...indices); + } else { + indices = []; + const checkProblems = check.execute(dependencies, context); + for (const problem of Array.isArray(checkProblems) ? checkProblems : checkProblems ? [checkProblems] : []) { + indices.push(problems.length); + problems.push(problem); + } + problemIdsToIndices.set(id, indices); + (analysis.visibleProblems ??= []).push(...indices); + } + } +} diff --git a/attw-fork/src/cli/asciiTable.ts b/attw-fork/src/cli/asciiTable.ts new file mode 100644 index 0000000000..e8d27a5a33 --- /dev/null +++ b/attw-fork/src/cli/asciiTable.ts @@ -0,0 +1,19 @@ +import chalk from 'chalk'; +import type { GenericTable, HorizontalTableRow } from 'cli-table3'; + +export function asciiTable(table: GenericTable) { + return table.options.head + .slice(1) + .map((entryPoint, i) => { + const keyValuePairs = table.reduce((acc, cur) => { + const key = cur[0]?.toString(); + const value = cur[i + 1]?.toString(); + return acc + `${key}: ${value}\n`; + }, ''); + return `${chalk.bold.blue(entryPoint)} + +${keyValuePairs} +***********************************`; + }) + .join('\n\n'); +} diff --git a/attw-fork/src/cli/getExitCode.ts b/attw-fork/src/cli/getExitCode.ts new file mode 100644 index 0000000000..39f9b73f65 --- /dev/null +++ b/attw-fork/src/cli/getExitCode.ts @@ -0,0 +1,20 @@ +import type { CheckResult } from '../types.ts'; +import { problemFlags } from './problemUtils.ts'; +import type { RenderOptions } from './renderOptions.ts'; + +export function getExitCode(analysis: CheckResult, opts?: RenderOptions): number { + if (!analysis.types) { + return 0; + } + const ignoreRules = opts?.ignoreRules ?? []; + const ignoreResolutions = opts?.ignoreResolutions ?? []; + return analysis.problems.some((problem) => { + const notRuleIgnored = !ignoreRules.includes(problemFlags[problem.kind]); + const notResolutionIgnored = 'resolutionKind' in problem + ? !ignoreResolutions.includes(problem.resolutionKind) + : true; + return notRuleIgnored && notResolutionIgnored; + }) + ? 1 + : 0; +} diff --git a/attw-fork/src/cli/problemUtils.ts b/attw-fork/src/cli/problemUtils.ts new file mode 100644 index 0000000000..e502c389c6 --- /dev/null +++ b/attw-fork/src/cli/problemUtils.ts @@ -0,0 +1,30 @@ +import type * as core from '../index.ts'; +import type { ProblemKind } from '../index.ts'; + +export const problemFlags = { + NoResolution: 'no-resolution', + UntypedResolution: 'untyped-resolution', + FalseCJS: 'false-cjs', + FalseESM: 'false-esm', + CJSResolvesToESM: 'cjs-resolves-to-esm', + FallbackCondition: 'fallback-condition', + CJSOnlyExportsDefault: 'cjs-only-exports-default', + NamedExports: 'named-exports', + FalseExportDefault: 'false-export-default', + MissingExportEquals: 'missing-export-equals', + UnexpectedModuleSyntax: 'unexpected-module-syntax', + InternalResolutionError: 'internal-resolution-error', +} as const satisfies Record; + +export const resolutionKinds: Record = { + node10: 'node10', + 'node16-cjs': 'node16 (from CJS)', + 'node16-esm': 'node16 (from ESM)', + bundler: 'bundler', +}; + +export const moduleKinds = { + 1: '(CJS)', + 99: '(ESM)', + '': '', +}; diff --git a/attw-fork/src/cli/renderOptions.ts b/attw-fork/src/cli/renderOptions.ts new file mode 100644 index 0000000000..27c45b7571 --- /dev/null +++ b/attw-fork/src/cli/renderOptions.ts @@ -0,0 +1,11 @@ +import type { problemFlags, resolutionKinds } from './problemUtils.ts'; + +export type Format = 'auto' | 'table' | 'table-flipped' | 'ascii' | 'json'; +export interface RenderOptions { + ignoreRules?: (typeof problemFlags)[keyof typeof problemFlags][]; + ignoreResolutions?: (keyof typeof resolutionKinds)[]; + format?: Format; + color?: boolean; + summary?: boolean; + emoji?: boolean; +} diff --git a/attw-fork/src/cli/typed.ts b/attw-fork/src/cli/typed.ts new file mode 100644 index 0000000000..c92dd10bde --- /dev/null +++ b/attw-fork/src/cli/typed.ts @@ -0,0 +1,187 @@ +import chalk from 'chalk'; +import Table, { type GenericTable, type HorizontalTableRow } from 'cli-table3'; +import { marked } from 'marked'; +import TerminalRenderer from 'marked-terminal'; +import type * as core from '../index.ts'; +import { + filterProblems, + problemAffectsEntrypoint, + problemAffectsResolutionKind, + problemKindInfo, +} from '../problems.ts'; +import { allResolutionKinds, getResolutionOption, groupProblemsByKind } from '../utils.ts'; +import { asciiTable } from './asciiTable.ts'; +import { moduleKinds, problemFlags, resolutionKinds } from './problemUtils.ts'; +import type { RenderOptions } from './renderOptions.ts'; + +export async function typed( + analysis: core.Analysis, + { emoji = true, summary = true, format = 'auto', ignoreRules = [], ignoreResolutions = [] }: RenderOptions, +): Promise { + let output = ''; + const problems = analysis.problems.filter( + (problem) => !ignoreRules || !ignoreRules.includes(problemFlags[problem.kind]), + ); + // sort resolutions with required (impacts result) first and ignored after + const requiredResolutions = allResolutionKinds.filter((kind) => !ignoreResolutions.includes(kind)); + const ignoredResolutions = allResolutionKinds.filter((kind) => ignoreResolutions.includes(kind)); + const resolutions = requiredResolutions.concat(ignoredResolutions); + const entrypoints = Object.keys(analysis.entrypoints); + marked.setOptions({ + renderer: new TerminalRenderer(), + }); + + out(`${analysis.packageName} v${analysis.packageVersion}`); + if (analysis.types.kind === '@types') { + out(`${analysis.types.packageName} v${analysis.types.packageVersion}`); + } + out(); + if (Object.keys(analysis.buildTools).length) { + out('Build tools:'); + out( + Object.entries(analysis.buildTools) + .map(([tool, version]) => { + return `- ${tool}@${version}`; + }) + .join('\n'), + ); + out(); + } + + if (ignoreRules && ignoreRules.length) { + out(chalk.gray(` (ignoring rules: ${ignoreRules.map((rule) => `'${rule}'`).join(', ')})\n`)); + } + if (ignoreResolutions && ignoreResolutions.length) { + out( + chalk.gray(` (ignoring resolutions: ${ignoreResolutions.map((resolution) => `'${resolution}'`).join(', ')})\n`), + ); + } + + if (summary) { + const defaultSummary = marked(!emoji ? ' No problems found' : ' No problems found 🌟'); + const grouped = groupProblemsByKind(problems); + const summaryTexts = Object.entries(grouped).map(([kind, kindProblems]) => { + const info = problemKindInfo[kind as core.ProblemKind]; + const affectsRequiredResolution = kindProblems.some((p) => + requiredResolutions.some((r) => problemAffectsResolutionKind(p, r, analysis)) + ); + const description = marked( + `${info.description}${info.details ? ` Use \`-f json\` to see ${info.details}.` : ''} ${info.docsUrl}`, + ); + return `${affectsRequiredResolution ? '' : '(ignored per resolution) '}${ + emoji ? `${info.emoji} ` : '' + }${description}`; + }); + + out(summaryTexts.join('') || defaultSummary); + } + + const entrypointNames = entrypoints.map( + (s) => `"${s === '.' ? analysis.packageName : `${analysis.packageName}/${s.substring(2)}`}"`, + ); + const entrypointHeaders = entrypoints.map((s, i) => { + const hasProblems = problems.some((p) => problemAffectsEntrypoint(p, s, analysis)); + const color = hasProblems ? 'redBright' : 'greenBright'; + return chalk.bold[color](entrypointNames[i]); + }); + + const getCellContents = memo((subpath: string, resolutionKind: core.ResolutionKind) => { + const ignoredPrefix = ignoreResolutions.includes(resolutionKind) ? '(ignored) ' : ''; + const problemsForCell = groupProblemsByKind( + filterProblems(problems, analysis, { entrypoint: subpath, resolutionKind }), + ); + const entrypoint = analysis.entrypoints[subpath]!.resolutions[resolutionKind]; + const resolution = entrypoint.resolution; + const kinds = Object.keys(problemsForCell) as core.ProblemKind[]; + if (kinds.length) { + return kinds + .map( + (kind) => + ignoredPrefix + (emoji ? `${problemKindInfo[kind].emoji} ` : '') + problemKindInfo[kind].shortDescription, + ) + .join('\n'); + } + + const jsonResult = !emoji ? 'OK (JSON)' : '🟢 (JSON)'; + const moduleResult = entrypoint.isWildcard + ? '(wildcard)' + : (!emoji ? 'OK ' : '🟢 ') + + moduleKinds[ + analysis.programInfo[getResolutionOption(resolutionKind)].moduleKinds?.[resolution?.fileName ?? ''] + ?.detectedKind || '' + ]; + return ignoredPrefix + (resolution?.isJson ? jsonResult : moduleResult); + }); + + const flippedTable = format === 'auto' || format === 'table-flipped' + ? new Table({ + head: [ + '', + ...resolutions.map((kind) => + chalk.reset(resolutionKinds[kind] + (ignoreResolutions.includes(kind) ? ' (ignored)' : '')) + ), + ], + }) + : undefined; + if (flippedTable) { + entrypoints.forEach((subpath, i) => { + flippedTable.push([ + entrypointHeaders[i], + ...resolutions.map((resolutionKind) => getCellContents(subpath, resolutionKind)), + ]); + }); + } + + const table = format === 'auto' || !flippedTable + ? (new Table({ + head: ['', ...entrypointHeaders], + }) as GenericTable) + : undefined; + if (table) { + resolutions.forEach((kind) => { + table.push([resolutionKinds[kind], ...entrypoints.map((entrypoint) => getCellContents(entrypoint, kind))]); + }); + } + + switch (format) { + case 'table': + out(table!.toString()); + break; + case 'table-flipped': + out(flippedTable!.toString()); + break; + case 'ascii': + out(asciiTable(table!)); + break; + case 'auto': + const terminalWidth = process.stdout.columns || 133; // This looks like GitHub Actions' width + if (table!.width <= terminalWidth) { + out(table!.toString()); + } else if (flippedTable!.width <= terminalWidth) { + out(flippedTable!.toString()); + } else { + out(asciiTable(table!)); + } + break; + } + + return output.trimEnd(); + + function out(s: string = '') { + output += s + '\n'; + } +} + +function memo(fn: (...args: Args) => Result): (...args: Args) => Result { + const cache = new Map(); + return (...args) => { + const key = '' + args; + if (cache.has(key)) { + return cache.get(key); + } + + const result = fn(...args); + cache.set(key, result); + return result; + }; +} diff --git a/attw-fork/src/cli/untyped.ts b/attw-fork/src/cli/untyped.ts new file mode 100644 index 0000000000..80f42c43bb --- /dev/null +++ b/attw-fork/src/cli/untyped.ts @@ -0,0 +1,5 @@ +import type { UntypedResult } from '../types.ts'; + +export function untyped(analysis: UntypedResult) { + return 'This package does not contain types.\nDetails: ' + JSON.stringify(analysis, null, 2); +} diff --git a/attw-fork/src/cli/write.ts b/attw-fork/src/cli/write.ts new file mode 100644 index 0000000000..32066cc890 --- /dev/null +++ b/attw-fork/src/cli/write.ts @@ -0,0 +1,27 @@ +import { Readable, type Writable } from 'node:stream'; + +// JSON output is often longer than 64 kb, so we need to use streams to write it to stdout +// in order to avoid truncation when piping to other commands. +export async function write(data: string, out: Writable): Promise { + return new Promise((resolve, reject) => { + const stream = new Readable({ + read() { + this.push(data); + this.push('\n'); + this.push(null); + }, + }); + + stream.on('data', (chunk) => { + out.write(chunk); + }); + + stream.on('end', () => { + resolve(); + }); + + out.on('error', (err) => { + reject(err); + }); + }); +} diff --git a/attw-fork/src/createPackage.ts b/attw-fork/src/createPackage.ts new file mode 100644 index 0000000000..a50ce7baab --- /dev/null +++ b/attw-fork/src/createPackage.ts @@ -0,0 +1,310 @@ +/* eslint-disable unicorn/no-array-callback-reference */ +/* eslint-disable drizzle-internal/require-entity-kind */ +import { untar } from '@andrewbranch/untar.js'; +import { Gunzip } from 'fflate'; +import { major, maxSatisfying, minor, valid, validRange } from 'semver'; +import ts from 'typescript'; +import { type ParsedPackageSpec, parsePackageSpec } from './utils.ts'; + +export class Package { + #files: Record = {}; + readonly packageName: string; + readonly packageVersion: string; + readonly resolvedUrl?: string; + readonly typesPackage?: { + packageName: string; + packageVersion: string; + resolvedUrl?: string; + }; + + constructor( + files: Record, + packageName: string, + packageVersion: string, + resolvedUrl?: string, + typesPackage?: Package['typesPackage'], + ) { + this.#files = files; + this.packageName = packageName; + this.packageVersion = packageVersion; + this.resolvedUrl = resolvedUrl; + this.typesPackage = typesPackage; + } + + tryReadFile(path: string): string | undefined { + const file = this.#files[path]; + if (file === undefined) { + return undefined; + } + if (typeof file === 'string') { + return file; + } + const content = new TextDecoder().decode(file); + this.#files[path] = content; + return content; + } + + readFile(path: string): string { + const content = this.tryReadFile(path); + if (content === undefined) { + throw new Error(`File not found: ${path}`); + } + return content; + } + + fileExists(path: string): boolean { + return path in this.#files; + } + + directoryExists(path: string): boolean { + path = ts.ensureTrailingDirectorySeparator(path); + for (const file in this.#files) { + if (file.startsWith(path)) { + return true; + } + } + return false; + } + + containsTypes(directory = '/'): boolean { + return this.listFiles(directory).some(ts.hasTSFileExtension); + } + + listFiles(directory = '/'): string[] { + directory = ts.ensureTrailingDirectorySeparator(directory); + return directory === '/' + ? Object.keys(this.#files) + : Object.keys(this.#files).filter((f) => f.startsWith(directory)); + } + + mergedWithTypes(typesPackage: Package): Package { + const files = { ...this.#files, ...typesPackage.#files }; + return new Package(files, this.packageName, this.packageVersion, this.resolvedUrl, { + packageName: typesPackage.packageName, + packageVersion: typesPackage.packageVersion, + resolvedUrl: typesPackage.resolvedUrl, + }); + } +} + +export interface CreatePackageFromNpmOptions { + /** + * Controls inclusion of a corresponding `@types` package. Ignored if the implementation + * package contains TypeScript files. The value is the version or SemVer range of the + * `@types` package to include, `true` to infer the version from the implementation + * package version, or `false` to prevent inclusion of a `@types` package. + * @default true + */ + definitelyTyped?: string | boolean; + before?: Date; + allowDeprecated?: boolean; +} + +export async function createPackageFromNpm( + packageSpec: string, + { definitelyTyped = true, ...options }: CreatePackageFromNpmOptions = {}, +): Promise { + const parsed = parsePackageSpec(packageSpec); + if (parsed.status === 'error') { + throw new Error(parsed.error); + } + const packageName = parsed.data.name; + const typesPackageName = ts.getTypesPackageName(packageName); + const { tarballUrl, packageVersion } = parsed.data.versionKind === 'none' && typeof definitelyTyped === 'string' + ? await resolveImplementationPackageForTypesPackage(typesPackageName, definitelyTyped, options) + : await getNpmTarballUrl([parsed.data], options); + const pkg = await createPackageFromTarballUrl(tarballUrl); + if (!definitelyTyped || pkg.containsTypes()) { + return pkg; + } + + const typesPackageData = await (definitelyTyped === true + ? resolveTypesPackageForPackage(packageName, packageVersion, options) + : getNpmTarballUrl( + [ + { + name: typesPackageName, + versionKind: valid(definitelyTyped) ? 'exact' : validRange(definitelyTyped) ? 'range' : 'tag', + version: definitelyTyped, + }, + ], + options, + )); + + if (typesPackageData) { + return pkg.mergedWithTypes(await createPackageFromTarballUrl(typesPackageData.tarballUrl)); + } + return pkg; +} + +export async function resolveImplementationPackageForTypesPackage( + typesPackageName: string, + typesPackageVersion: string, + options?: Omit, +): Promise { + if (!typesPackageName.startsWith('@types/')) { + throw new Error(`'resolveImplementationPackageForTypesPackage' expects an @types package name and version`); + } + const packageName = ts.unmangleScopedPackageName(typesPackageName.slice('@types/'.length)); + const version = valid(typesPackageVersion); + if (version) { + return getNpmTarballUrl( + [ + parsePackageSpec(`${packageName}@${major(version)}.${minor(version)}`).data!, + parsePackageSpec(`${packageName}@${major(version)}`).data!, + parsePackageSpec(`${packageName}@latest`).data!, + ], + options, + ); + } + + const range = validRange(typesPackageVersion); + if (range) { + return getNpmTarballUrl( + [ + { name: packageName, versionKind: 'range', version: range }, + { name: packageName, versionKind: 'tag', version: 'latest' }, + ], + options, + ); + } + + throw new Error(`'resolveImplementationPackageForTypesPackage' expects a valid SemVer version or range`); +} + +export async function resolveTypesPackageForPackage( + packageName: string, + packageVersion: string, + options?: Omit, +): Promise { + const typesPackageName = ts.getTypesPackageName(packageName); + try { + return await getNpmTarballUrl( + [ + { + name: typesPackageName, + versionKind: 'range', + version: `${major(packageVersion)}.${minor(packageVersion)}`, + }, + { + name: typesPackageName, + versionKind: 'range', + version: `${major(packageVersion)}`, + }, + { + name: typesPackageName, + versionKind: 'tag', + version: 'latest', + }, + ], + options, + ); + } catch {} + + return undefined; +} + +export interface ResolvedPackageId { + packageName: string; + packageVersion: string; + tarballUrl: string; +} + +async function getNpmTarballUrl( + packageSpecs: readonly ParsedPackageSpec[], + { before, allowDeprecated }: Omit = {}, +): Promise { + const fetchPackument = packageSpecs.some( + (spec) => spec.versionKind === 'range' || (spec.versionKind === 'tag' && spec.version !== 'latest'), + ); + const packumentUrl = `https://registry.npmjs.org/${packageSpecs[0]!.name}`; + const includeTimes = before !== undefined && packageSpecs.some((spec) => spec.versionKind !== 'exact'); + const Accept = includeTimes ? 'application/json' : 'application/vnd.npm.install-v1+json'; + const packument = fetchPackument + ? await fetch(packumentUrl, { headers: { Accept } }).then((r) => r.json()) + : undefined; + + for (const packageSpec of packageSpecs) { + const manifestUrl = `https://registry.npmjs.org/${packageSpec.name}/${packageSpec.version || 'latest'}`; + const doc = packument || (await fetch(manifestUrl).then((r) => r.json())); + if (typeof doc !== 'object' || (doc.error && doc.error !== 'Not found')) { + throw new Error(`Unexpected response from ${manifestUrl}: ${JSON.stringify(doc)}`); + } + const isManifest = !!doc.version; + let tarballUrl, packageVersion; + if (packageSpec.versionKind === 'range') { + packageVersion = doc.versions + && maxSatisfying( + Object.keys(doc.versions).filter( + (v) => + (allowDeprecated || !doc.versions[v].deprecated) + && (!before || !doc.time || new Date(doc.time[v]) <= before), + ), + packageSpec.version, + ); + if (!packageVersion) { + continue; + } + tarballUrl = doc.versions[packageVersion].dist.tarball; + } else if (packageSpec.versionKind === 'tag' && packageSpec.version !== 'latest') { + packageVersion = doc['dist-tags'][packageSpec.version]; + if (!packageVersion) { + continue; + } + if (before && doc.time && new Date(doc.time[packageVersion]) > before) { + continue; + } + tarballUrl = doc.versions[packageVersion].dist.tarball; + } else if (isManifest) { + packageVersion = doc.version; + tarballUrl = doc.dist?.tarball; + } else { + packageVersion = doc['dist-tags']?.latest; + tarballUrl = doc.versions?.[packageVersion].dist.tarball; + } + + if (packageVersion && tarballUrl) { + return { packageName: packageSpec.name, packageVersion, tarballUrl }; + } + } + throw new Npm404Error(packageSpecs); +} + +export class Npm404Error extends Error { + kind = 'Npm404Error'; + constructor(public packageSpecs: readonly ParsedPackageSpec[]) { + super(`Failed to find a matching version for ${packageSpecs[0]!.name}`); + } +} + +export async function createPackageFromTarballUrl(tarballUrl: string): Promise { + const tarball = await fetchTarball(tarballUrl); + const { files, packageName, packageVersion } = extractTarball(tarball); + return new Package(files, packageName, packageVersion, tarballUrl); +} + +async function fetchTarball(tarballUrl: string) { + return new Uint8Array((await fetch(tarballUrl).then((r) => r.arrayBuffer())) satisfies ArrayBuffer); +} + +export function createPackageFromTarballData(tarball: Uint8Array): Package { + const { files, packageName, packageVersion } = extractTarball(tarball); + return new Package(files, packageName, packageVersion); +} + +function extractTarball(tarball: Uint8Array) { + // Use streaming API to work around https://github.com/101arrowz/fflate/issues/207 + let unzipped: Uint8Array; + new Gunzip((chunk) => (unzipped = chunk)).push(tarball, /*final*/ true); + const data = untar(unzipped!.buffer as ArrayBuffer); + const prefix = data[0]!.filename.slice(0, Math.max(0, data[0]!.filename.indexOf('/') + 1)); + const packageJsonText = data.find((f) => f.filename === `${prefix}package.json`)?.fileData; + const packageJson = JSON.parse(new TextDecoder().decode(packageJsonText)); + const packageName = packageJson.name; + const packageVersion = packageJson.version; + const files = data.reduce((acc: Record, file) => { + acc[ts.combinePaths('/node_modules/' + packageName, file.filename.slice(prefix.length))] = file.fileData; + return acc; + }, {}); + return { files, packageName, packageVersion }; +} diff --git a/attw-fork/src/index.ts b/attw-fork/src/index.ts new file mode 100644 index 0000000000..b0a56e30d8 --- /dev/null +++ b/attw-fork/src/index.ts @@ -0,0 +1,3 @@ +export * from './checkPackage.ts'; +export * from './createPackage.ts'; +export type * from './types.ts'; diff --git a/attw-fork/src/internal/checks/cjsOnlyExportsDefault.ts b/attw-fork/src/internal/checks/cjsOnlyExportsDefault.ts new file mode 100644 index 0000000000..8c80f85844 --- /dev/null +++ b/attw-fork/src/internal/checks/cjsOnlyExportsDefault.ts @@ -0,0 +1,54 @@ +import ts from 'typescript'; +import { defineCheck } from '../defineCheck.ts'; + +export default defineCheck({ + name: 'CJSOnlyExportsDefault', + dependencies: ({ entrypoints, subpath, resolutionKind }) => { + const entrypoint = entrypoints[subpath]!.resolutions[resolutionKind]; + const implementationFileName = entrypoint.implementationResolution?.fileName; + return [implementationFileName, resolutionKind]; + }, + execute: ([implementationFileName, resolutionKind], context) => { + if (!implementationFileName) { + return; + } + if (resolutionKind === 'node10' || resolutionKind === 'node16-cjs') { + // Here, we have a CJS file (most likely transpiled ESM) resolving to a + // CJS transpiled ESM file. This is fine when considered in isolation. + // The pattern of having `module.exports.default = ...` is a problem + // primarily because ESM-detected files in Node (and the same files in + // Webpack/esbuild) will treat `module.exports` as the default export, + // which is both unexpected and different from Babel-style interop seen + // in transpiled default imports and most bundler scenarios. But if Node, + // Webpack, and esbuild never see this file, then it's fine. So, while + // the problematic pattern is a feature of the file alone, the bad outcome + // comes from a combination of the file and the module system that imports + // it. For dual packages that point Node imports and bundlers to a true + // ESM default export, while pointing requires to this CJS "default export," + // we don't want to report a problem. + // + // TODO: It would be nice to report this information *somehow*, as neutral + // metadata attached to the file (c.f. `Analysis["programInfo"]`). + return; + } + const host = context.hosts.findHostForFiles([implementationFileName]) ?? context.hosts.bundler; + const sourceFile = host.getSourceFile(implementationFileName)!; + if ( + !sourceFile.externalModuleIndicator + && sourceFile.commonJsModuleIndicator + && sourceFile.symbol?.exports?.has(ts.InternalSymbolName.Default) + && sourceFile.symbol.exports.has(ts.escapeLeadingUnderscores('__esModule')) + && !sourceFile.symbol.exports.has(ts.InternalSymbolName.ExportEquals) + ) { + const decl = sourceFile.symbol.exports.get(ts.InternalSymbolName.Default)!.declarations![0]; + return { + kind: 'CJSOnlyExportsDefault', + fileName: implementationFileName, + pos: decl!.getStart(sourceFile), + end: decl!.end, + }; + } + + return; + }, +}); diff --git a/attw-fork/src/internal/checks/entrypointResolutions.ts b/attw-fork/src/internal/checks/entrypointResolutions.ts new file mode 100644 index 0000000000..c4d3dff8fb --- /dev/null +++ b/attw-fork/src/internal/checks/entrypointResolutions.ts @@ -0,0 +1,57 @@ +import ts from 'typescript'; +import type { Problem } from '../../types.ts'; +import { resolvedThroughFallback } from '../../utils.ts'; +import { defineCheck } from '../defineCheck.ts'; + +export default defineCheck({ + name: 'EntrypointResolutions', + dependencies: ({ subpath, resolutionKind }) => [subpath, resolutionKind], + execute: ([subpath, resolutionKind], context) => { + const problems: Problem[] = []; + const entrypoint = context.entrypoints[subpath]!.resolutions[resolutionKind]; + if (entrypoint.isWildcard) { + return; + } + + if (!entrypoint.resolution) { + problems.push({ + kind: 'NoResolution', + entrypoint: subpath, + resolutionKind, + }); + } else if (!entrypoint.resolution.isTypeScript && !entrypoint.resolution.isJson) { + problems.push({ + kind: 'UntypedResolution', + entrypoint: subpath, + resolutionKind, + }); + } + + if ( + resolutionKind === 'node16-cjs' + && ((!entrypoint.implementationResolution + && entrypoint.resolution + && context.programInfo['node16'].moduleKinds![entrypoint.resolution.fileName]?.detectedKind + === ts.ModuleKind.ESNext) + || (entrypoint.implementationResolution + && context.programInfo['node16'].moduleKinds![entrypoint.implementationResolution.fileName]?.detectedKind + === ts.ModuleKind.ESNext)) + ) { + problems.push({ + kind: 'CJSResolvesToESM', + entrypoint: subpath, + resolutionKind, + }); + } + + if (entrypoint.resolution && resolvedThroughFallback(entrypoint.resolution.trace)) { + problems.push({ + kind: 'FallbackCondition', + entrypoint: subpath, + resolutionKind, + }); + } + + return problems; + }, +}); diff --git a/attw-fork/src/internal/checks/exportDefaultDisagreement.ts b/attw-fork/src/internal/checks/exportDefaultDisagreement.ts new file mode 100644 index 0000000000..a3ff7ddec1 --- /dev/null +++ b/attw-fork/src/internal/checks/exportDefaultDisagreement.ts @@ -0,0 +1,348 @@ +import ts from 'typescript'; +import { getResolutionOption } from '../../utils.ts'; +import { defineCheck } from '../defineCheck.ts'; +import { type Export, getProbableExports } from '../getProbableExports.ts'; + +const bindOptions: ts.CompilerOptions = { + target: ts.ScriptTarget.Latest, + allowJs: true, + checkJs: true, +}; + +export default defineCheck({ + name: 'ExportDefaultDisagreement', + dependencies: ({ entrypoints, subpath, resolutionKind, programInfo }) => { + const entrypoint = entrypoints[subpath]!.resolutions[resolutionKind]; + const typesFileName = entrypoint.resolution?.fileName; + const implementationFileName = entrypoint.implementationResolution?.fileName; + if ( + (typesFileName + && programInfo[getResolutionOption(resolutionKind)].moduleKinds?.[typesFileName]?.detectedKind + === ts.ModuleKind.ESNext) + || (implementationFileName + && programInfo[getResolutionOption(resolutionKind)].moduleKinds?.[implementationFileName]?.detectedKind + === ts.ModuleKind.ESNext) + ) { + return []; + } + return [typesFileName, implementationFileName]; + }, + execute: ([typesFileName, implementationFileName], context) => { + // Technically, much of this implementation should go in `dependencies`, since + // different resolution modes can result in different program graphs, resulting + // in different types, which are queried heavily here. However, it would be much + // more expensive to run this type-heavy code in `dependencies`, where it would + // reevaluate for every entrypoint/resolution matrix cell, when chances are + // extremely high that a given pair of types/implementation files are intended + // to act the same under all resolution modes. + if (!typesFileName || !implementationFileName || !ts.hasTSFileExtension(typesFileName)) { + return; + } + const host = context.hosts.findHostForFiles([typesFileName])!; + const typesSourceFile = host.getSourceFile(typesFileName)!; + ts.bindSourceFile(typesSourceFile, bindOptions); + if (!typesSourceFile.symbol?.exports) { + return; + } + const implementationSourceFile = host.getSourceFile(implementationFileName)!; + ts.bindSourceFile(implementationSourceFile, bindOptions); + if (!implementationSourceFile.symbol?.exports || implementationSourceFile.externalModuleIndicator) { + return; + } + + // FalseExportDefault: types have a default, JS doesn't. + // For this check, we're going to require the types to have a top-level + // default export, which means we might miss something like: + // + // declare namespace foo { + // const _default: string; + // export { _default as default }; + // } + // export = foo; + // + // But that's not a mistake people really make. If we don't need to + // recognize that pattern, we can avoid creating a program and checker + // for this error. + const typesHaveSyntacticDefault = typesSourceFile.symbol.exports.has(ts.InternalSymbolName.Default); + if (typesHaveSyntacticDefault && !getImplHasDefault() && implIsAnalyzable()) { + return { + kind: 'FalseExportDefault', + typesFileName, + implementationFileName, + }; + } + + // MissingExportEquals: types and JS have a default, but JS also has a + // module.exports = not reflected in the types. + // There are a few variations of this problem. The most straightforward + // is when the types declare *only* a default export, and the JS declares + // a module.exports and a module.exports.default in different declarations: + // + // module.exports = SomeClass; + // module.exports.default = SomeClass; + // + // Then, there's the slight variation on this where the `default` property + // is separately declared on `SomeClass`. This requires the type checker. + // Finally, there's the case where the types declare a default export along + // with other named exports. That *could* accurately represent a + // `module.exports = { default, ... }` in JS, but only if the named exports + // are values, not types. It also *couldn't* accurately represent a + // `module.exports = SomeClass`, where the exported value is callable, + // constructable, or a primitive. + + if (!getImplHasDefault() || !implIsAnalyzable()) { + // The implementation not having a default doesn't necessarily mean the + // following checks are irrelevant, but this rule is designed primarily + // to catch cases where type definition authors correctly notice that + // their implementation has a `module.exports.default`, but don't realize + // that the same object is exposed as `module.exports`. We bail early + // here primarily because these checks are expensive. + return; + } + + if ( + !typesSourceFile.symbol.exports.has(ts.InternalSymbolName.ExportEquals) + && implementationSourceFile.symbol.exports.has(ts.InternalSymbolName.ExportEquals) + && getTypesDefaultSymbol() + && ((getImplExportEqualsIsExportDefault() + && getTypesChecker().typeHasCallOrConstructSignatures(getTypesTypeOfDefault())) + || getImplChecker().typeHasCallOrConstructSignatures(getImplTypeOfModuleExports())) + ) { + return { + kind: 'MissingExportEquals', + typesFileName, + implementationFileName, + }; + } + + // TODO: does not account for export * + const typesHaveNonDefaultValueExport = [...typesSourceFile.symbol.exports.values()].some((s) => { + if (s.escapedName === 'default') { + return false; + } + if (s.flags & ts.SymbolFlags.Value) { + return true; + } + while (s.flags & ts.SymbolFlags.Alias) { + s = getTypesChecker().getAliasedSymbol(s); + if (s.flags & ts.SymbolFlags.Value) { + return true; + } + } + + return; + }); + + if ( + !typesHaveNonDefaultValueExport + && typeIsObjecty(getTypesTypeOfDefault(), getTypesChecker()) + && ([...implementationSourceFile.symbol.exports.keys()].some((name) => + isNotDefaultOrEsModule(ts.unescapeLeadingUnderscores(name)) + ) + || getImplProbableExports().some(({ name }) => isNotDefaultOrEsModule(name))) + && getTypesDefaultSymbol() + ) { + // Here, the types have a lone default export of a non-callable object, + // and the implementation has multiple named exports along with `default`. + // This is the biggest heuristic leap for this rule, but the assumption is + // that the default export in the types was intended to represent the object + // shape of `module.exports`, not `module.exports.default`. This may result + // in false positives, but those false positives can be silenced by adding + // exports in the types for other named exports in the JS. It's detecting + // a definite problem; it's just not always accurate about the diagnosis. + return { + kind: 'MissingExportEquals', + typesFileName, + implementationFileName, + }; + } + + // eslint-disable-next-line no-var + var implProbableExports: unknown, + implChecker: unknown, + implHasDefault: unknown, + implTypeOfModuleExports: unknown, + implExportEqualsIsExportDefault: unknown, + typesChecker: unknown, + typesDefaultSymbol: unknown, + typesTypeOfDefault: unknown; + function getImplProbableExports(): Export[] { + return ((implProbableExports as Export[]) ??= getProbableExports(implementationSourceFile)); + } + function getImplChecker(): ts.TypeChecker { + return ((implChecker as ts.TypeChecker) ??= host + .createAuxiliaryProgram([implementationFileName!]) + .getTypeChecker()); + } + function getImplHasDefault(): boolean { + return ((implHasDefault as boolean) ??= + implementationSourceFile?.symbol?.exports?.has(ts.InternalSymbolName.Default) + || getImplProbableExports()?.some((s) => s.name === 'default') + || (!!implementationSourceFile.symbol?.exports?.size + && getImplChecker() + .getExportsAndPropertiesOfModule(implementationSourceFile.symbol) + .some((s) => s.name === 'default'))); + } + function getTypesChecker(): ts.TypeChecker { + return ((typesChecker as ts.TypeChecker) ??= host.createAuxiliaryProgram([typesFileName!]).getTypeChecker()); + } + function getTypesDefaultSymbol(): ts.Symbol | undefined { + return ((typesDefaultSymbol as ts.Symbol | undefined) ??= + typesSourceFile.symbol.exports!.get(ts.InternalSymbolName.Default) + ?? getTypesChecker() + .getExportsAndPropertiesOfModule(typesSourceFile.symbol) + .find((s) => s.escapedName === 'default')); + } + function getTypesTypeOfDefault(): ts.Type { + const symbol = getTypesDefaultSymbol(); + return ((typesTypeOfDefault as ts.Type) ??= symbol + ? getTypesChecker().getTypeOfSymbol(symbol) + : getTypesChecker().getAnyType()); + } + function getImplTypeOfModuleExports(): ts.Type { + if (implTypeOfModuleExports) { + return implTypeOfModuleExports as ts.Type; + } + const type = getImplChecker().getTypeOfSymbol( + getImplChecker().resolveExternalModuleSymbol(implementationSourceFile.symbol), + ); + if (type.flags & ts.TypeFlags.Any && getImplExportEqualsIsExportDefault()) { + return (implTypeOfModuleExports = getImplChecker().getTypeOfSymbol( + implementationSourceFile.symbol.exports!.get(ts.InternalSymbolName.Default)!, + )); + } + return (implTypeOfModuleExports = type); + } + function getImplExportEqualsIsExportDefault(): boolean { + // TypeScript has a circularity error on `module.exports = exports.default`, so + // detect that pattern syntactically. + if (implExportEqualsIsExportDefault !== undefined) { + return implExportEqualsIsExportDefault as boolean; + } + const exportEquals = implementationSourceFile.symbol.exports!.get(ts.InternalSymbolName.ExportEquals); + if (!exportEquals) { + return (implExportEqualsIsExportDefault = false); + } + const exportDefault = implementationSourceFile.symbol.exports!.get(ts.InternalSymbolName.Default); + if (!exportDefault) { + return (implExportEqualsIsExportDefault = false); + } + for ( + const assignment of [ + exportEquals.valueDeclaration, + ts.findAncestor(exportDefault.declarations?.[0], ts.isBinaryExpression), + ] + ) { + let seenModuleExports = false, + seenExportsDefault = false; + if ( + assignment + && ts.isBinaryExpression(assignment) + && assignment.operatorToken.kind === ts.SyntaxKind.EqualsToken + ) { + const res = !!forEachAssignmentTarget(assignment, (target) => { + if (!seenExportsDefault && isExportsDefault(target)) { + seenExportsDefault = true; + } else if (!seenModuleExports && isModuleExports(target)) { + seenModuleExports = true; + } + + return seenExportsDefault && seenModuleExports; + }); + if (res) { + return (implExportEqualsIsExportDefault = true); + } + } + } + return (implExportEqualsIsExportDefault = false); + } + function implIsAnalyzable(): boolean { + if (implementationSourceFile.symbol.exports!.get(ts.InternalSymbolName.ExportEquals)!.declarations!.length > 1) { + // Multiple assignments in different function bodies is probably a bundle we can't analyze. + // Multiple assignments in the same function body might just be an environment-conditional + // module.exports inside an IIFE. + let commonContainer; + for ( + const decl of implementationSourceFile.symbol.exports!.get(ts.InternalSymbolName.ExportEquals)! + .declarations! + ) { + const container = ts.findAncestor(decl, (node) => ts.isFunctionBlock(node) || ts.isSourceFile(node)); + if (commonContainer === undefined) { + commonContainer = container; + } else if (commonContainer !== container) { + return false; + } + } + } + return !!(implementationSourceFile.symbol.exports!.size || getImplProbableExports()?.length); + } + return; + }, +}); + +function typeIsObjecty(type: ts.Type, checker: ts.TypeChecker) { + return ( + type.flags & ts.TypeFlags.Object + && !(type.flags & ts.TypeFlags.Primitive) + && !checker.typeHasCallOrConstructSignatures(type) + ); +} + +function isModuleExports(target: ts.Expression) { + return ( + (ts.isAccessExpression(target) + && ts.isIdentifier(target.expression) + && target.expression.text === 'module' + && getNameOfAccessExpression(target) === 'exports') + || (ts.isIdentifier(target) && target.text === 'exports') + ); +} + +function isExportsDefault(target: ts.Expression) { + return ( + (ts.isAccessExpression(target) + && ts.isIdentifier(target.expression) + && target.expression.text === 'exports' + && getNameOfAccessExpression(target) === 'default') + || (ts.isAccessExpression(target) + && ts.isAccessExpression(target.expression) + && ts.isIdentifier(target.expression.expression) + && target.expression.expression.text === 'module' + && getNameOfAccessExpression(target.expression) === 'exports' + && getNameOfAccessExpression(target) === 'default') + ); +} + +function isNotDefaultOrEsModule(name: string) { + return name !== 'default' && name !== '__esModule'; +} + +function forEachAssignmentTarget( + assignment: ts.BinaryExpression, + cb: (target: ts.Expression) => ReturnT | undefined, +): ReturnT | undefined { + // For `module.exports = exports = exports.default`, fires `cb` once for + // `exports.default`, once for `exports`, and once for `module.exports`. + const target = ts.skipParentheses(assignment.right); + if (ts.isBinaryExpression(target) && target.operatorToken.kind === ts.SyntaxKind.EqualsToken) { + const res = forEachAssignmentTarget(target, cb); + if (res) { + return res; + } + } else { + const res = cb(target); + if (res) { + return res; + } + } + return cb(ts.skipParentheses(assignment.left)); +} + +function getNameOfAccessExpression(accessExpression: ts.AccessExpression): string | undefined { + const node = ts.getNameOfAccessExpression(accessExpression); + if (ts.isIdentifier(node) || ts.isStringLiteralLike(node)) { + return node.text; + } + + return undefined; +} diff --git a/attw-fork/src/internal/checks/index.ts b/attw-fork/src/internal/checks/index.ts new file mode 100644 index 0000000000..4bdc72e1d3 --- /dev/null +++ b/attw-fork/src/internal/checks/index.ts @@ -0,0 +1,17 @@ +import cjsOnlyExportsDefault from './cjsOnlyExportsDefault.ts'; +import entrypointResolutions from './entrypointResolutions.ts'; +import exportDefaultDisagreement from './exportDefaultDisagreement.ts'; +import internalResolutionError from './internalResolutionError.ts'; +import moduleKindDisagreement from './moduleKindDisagreement.ts'; +import namedExports from './namedExports.ts'; +import unexpectedModuleSyntax from './unexpectedModuleSyntax.ts'; + +export default [ + entrypointResolutions, + moduleKindDisagreement, + exportDefaultDisagreement, + namedExports, + cjsOnlyExportsDefault, + unexpectedModuleSyntax, + internalResolutionError, +]; diff --git a/attw-fork/src/internal/checks/internalResolutionError.ts b/attw-fork/src/internal/checks/internalResolutionError.ts new file mode 100644 index 0000000000..3906c68856 --- /dev/null +++ b/attw-fork/src/internal/checks/internalResolutionError.ts @@ -0,0 +1,53 @@ +import ts from 'typescript'; +import type { InternalResolutionErrorProblem } from '../../types.ts'; +import { defineCheck } from '../defineCheck.ts'; + +export default defineCheck({ + name: 'InternalResolutionError', + enumerateFiles: true, + dependencies: ({ resolutionOption, fileName }) => [resolutionOption, fileName], + execute: ([resolutionOption, fileName], context) => { + if (!ts.hasTSFileExtension(fileName)) { + return; + } + const host = context.hosts[resolutionOption]; + const sourceFile = host.getSourceFile(fileName); + if (sourceFile?.imports) { + const problems: InternalResolutionErrorProblem[] = []; + for (const moduleSpecifier of sourceFile.imports) { + const reference = moduleSpecifier.text; + if ( + reference !== context.pkg.packageName + && !reference.startsWith(`${context.pkg.packageName}/`) + && reference[0] !== '#' + && !ts.pathIsRelative(reference) + ) { + // Probably a reference to something we'd have to npm install. + // These can definitely be errors, but I'm not installing a whole + // graph for now. + continue; + } + const resolutionMode = ts.getModeForUsageLocation(sourceFile, moduleSpecifier, host.getCompilerOptions()); + const resolution = host.getResolvedModule(sourceFile, moduleSpecifier.text, resolutionMode); + if (!resolution) { + throw new Error(`Expected resolution for '${moduleSpecifier.text}' in ${fileName}`); + } + + if (!resolution.resolvedModule) { + problems.push({ + kind: 'InternalResolutionError', + resolutionOption, + fileName, + moduleSpecifier: reference, + pos: moduleSpecifier.pos, + end: moduleSpecifier.end, + resolutionMode, + trace: host.getTrace(fileName, moduleSpecifier.text, resolutionMode)!, + }); + } + } + return problems; + } + return; + }, +}); diff --git a/attw-fork/src/internal/checks/moduleKindDisagreement.ts b/attw-fork/src/internal/checks/moduleKindDisagreement.ts new file mode 100644 index 0000000000..cba042738b --- /dev/null +++ b/attw-fork/src/internal/checks/moduleKindDisagreement.ts @@ -0,0 +1,45 @@ +import ts from 'typescript'; +import { defineCheck } from '../defineCheck.ts'; + +export default defineCheck({ + name: 'ModuleKindDisagreement', + dependencies: ({ entrypoints, subpath, resolutionKind, resolutionOption, programInfo }) => { + const entrypoint = entrypoints[subpath]!.resolutions[resolutionKind]; + const typesFileName = entrypoint.resolution?.fileName; + const implementationFileName = entrypoint.implementationResolution?.fileName; + return [ + typesFileName, + implementationFileName, + typesFileName ? programInfo[resolutionOption]?.moduleKinds?.[typesFileName] : undefined, + implementationFileName ? programInfo[resolutionOption]?.moduleKinds?.[implementationFileName] : undefined, + ]; + }, + execute: ([typesFileName, implementationFileName, typesModuleKind, implementationModuleKind]) => { + if (typesFileName && implementationFileName && typesModuleKind && implementationModuleKind) { + if ( + typesModuleKind.detectedKind === ts.ModuleKind.ESNext + && implementationModuleKind.detectedKind === ts.ModuleKind.CommonJS + ) { + return { + kind: 'FalseESM', + typesFileName, + implementationFileName, + typesModuleKind, + implementationModuleKind, + }; + } else if ( + typesModuleKind.detectedKind === ts.ModuleKind.CommonJS + && implementationModuleKind.detectedKind === ts.ModuleKind.ESNext + ) { + return { + kind: 'FalseCJS', + typesFileName, + implementationFileName, + typesModuleKind, + implementationModuleKind, + }; + } + } + return; + }, +}); diff --git a/attw-fork/src/internal/checks/namedExports.ts b/attw-fork/src/internal/checks/namedExports.ts new file mode 100644 index 0000000000..a2799de800 --- /dev/null +++ b/attw-fork/src/internal/checks/namedExports.ts @@ -0,0 +1,86 @@ +import ts from 'typescript'; +import { getResolutionOption } from '../../utils.ts'; +import { defineCheck } from '../defineCheck.ts'; +import { getEsmModuleNamespace } from '../esm/esmNamespace.ts'; + +export default defineCheck({ + name: 'NamedExports', + dependencies: ({ entrypoints, subpath, resolutionKind, programInfo }) => { + const entrypoint = entrypoints[subpath]!.resolutions[resolutionKind]; + const typesFileName = entrypoint.resolution?.isTypeScript && entrypoint.resolution.fileName; + const resolutionOption = getResolutionOption(resolutionKind); + const typesModuleKind = typesFileName ? programInfo[resolutionOption].moduleKinds?.[typesFileName] : undefined; + const implementationFileName = entrypoint.implementationResolution?.fileName; + const implementationModuleKind = implementationFileName + ? programInfo[resolutionOption].moduleKinds?.[implementationFileName] + : undefined; + return [implementationFileName, implementationModuleKind, typesFileName, typesModuleKind, resolutionKind]; + }, + execute: ( + [implementationFileName, implementationModuleKind, typesFileName, typesModuleKind, resolutionKind], + context, + ) => { + if ( + !implementationFileName + || !typesFileName + || resolutionKind !== 'node16-esm' + || typesModuleKind?.detectedKind !== ts.ModuleKind.CommonJS + || implementationModuleKind?.detectedKind !== ts.ModuleKind.CommonJS + ) { + return; + } + + // Get declared exported names from TypeScript + const host = context.hosts.findHostForFiles([typesFileName])!; + const typesSourceFile = host.getSourceFile(typesFileName)!; + if (typesSourceFile.scriptKind === ts.ScriptKind.JSON || !typesSourceFile.symbol) { + return; + } + + const typeChecker = host.createAuxiliaryProgram([typesFileName]).getTypeChecker(); + const moduleType = typeChecker.getTypeOfSymbol(typeChecker.resolveExternalModuleSymbol(typesSourceFile.symbol)); + if (typeChecker.isArrayLikeType(moduleType) || typeChecker.getPropertyOfType(moduleType, '0')) { + return; + } + const expectedNames = [ + ...new Set( + typeChecker + .getExportsAndPropertiesOfModule(typesSourceFile.symbol) + .filter((symbol) => { + return ( + // TS treats `prototype` and other static class members as exports. There's possibly + // a fix to be done in TS itself, since these show up as auto-imports. + symbol.name !== 'prototype' + // @ts-expect-error `getSymbolFlags` extra arguments are not declared on TypeChecker + && typeChecker.getSymbolFlags(symbol, /*excludeTypeOnlyMeanings*/ true) & ts.SymbolFlags.Value + ); + }) + .map((symbol) => symbol.name), + ), + ]; + + // Get actual exported names as seen by nodejs + let exports: readonly string[] | undefined; + try { + exports = getEsmModuleNamespace(context.pkg, implementationFileName); + } catch { + // If this fails then the result is indeterminate. This could happen in many cases, but + // a common one would be for packages which re-export from another another package. + return; + } + + const missing = expectedNames.filter((name) => !exports.includes(name)); + if (missing.length > 0) { + const lengthWithoutDefault = (names: readonly string[]) => names.length - (names.includes('default') ? 1 : 0); + return { + kind: 'NamedExports', + implementationFileName, + typesFileName, + isMissingAllNamed: lengthWithoutDefault(missing) === lengthWithoutDefault(expectedNames), + missing, + }; + } + + return; + }, +}); diff --git a/attw-fork/src/internal/checks/unexpectedModuleSyntax.ts b/attw-fork/src/internal/checks/unexpectedModuleSyntax.ts new file mode 100644 index 0000000000..361c10b25c --- /dev/null +++ b/attw-fork/src/internal/checks/unexpectedModuleSyntax.ts @@ -0,0 +1,35 @@ +import ts from 'typescript'; +import { defineCheck } from '../defineCheck.ts'; + +export default defineCheck({ + name: 'UnexpectedModuleSyntax', + enumerateFiles: true, + dependencies: ({ fileName, resolutionOption, programInfo }) => { + return [fileName, programInfo[resolutionOption].moduleKinds?.[fileName]]; + }, + execute: ([fileName, expectedModuleKind], context) => { + if (!expectedModuleKind || !ts.hasJSFileExtension(fileName)) { + return; + } + const host = context.hosts.findHostForFiles([fileName]) ?? context.hosts.bundler; + const sourceFile = host.getSourceFile(fileName)!; + const syntaxImpliedModuleKind = sourceFile.externalModuleIndicator + ? ts.ModuleKind.ESNext + : sourceFile.commonJsModuleIndicator + ? ts.ModuleKind.CommonJS + : undefined; + if (syntaxImpliedModuleKind !== undefined && expectedModuleKind.detectedKind !== syntaxImpliedModuleKind) { + // Value cannot be `true` because we set `moduleDetection: "legacy"` + const syntax = (sourceFile.externalModuleIndicator ?? sourceFile.commonJsModuleIndicator) as ts.Node; + return { + kind: 'UnexpectedModuleSyntax', + fileName, + moduleKind: expectedModuleKind, + syntax: syntaxImpliedModuleKind, + pos: syntax.getStart(sourceFile), + end: syntax.end, + }; + } + return; + }, +}); diff --git a/attw-fork/src/internal/defineCheck.ts b/attw-fork/src/internal/defineCheck.ts new file mode 100644 index 0000000000..6b36f9df79 --- /dev/null +++ b/attw-fork/src/internal/defineCheck.ts @@ -0,0 +1,52 @@ +import type { Package } from '../createPackage.ts'; +import type { Analysis, Problem, ResolutionKind, ResolutionOption } from '../types.ts'; +import type { CompilerHosts } from './multiCompilerHost.ts'; + +export interface CheckDependenciesContext extends CheckExecutionContext { + subpath: string; + resolutionKind: ResolutionKind; + resolutionOption: ResolutionOption; + fileName: EnumerateFiles extends true ? string : undefined; +} + +export interface CheckExecutionContext { + pkg: Package; + hosts: CompilerHosts; + entrypoints: Analysis['entrypoints']; + programInfo: Analysis['programInfo']; +} + +// Interface types are not assignable to Serializable due to missing index signature. +// This breaks them down into an equivalently structured object type, which have +// implicit index signatures for assignability purposes. +type Structure = T extends (...args: never) => any ? T : { [K in keyof T]: Structure }; + +export type EnsureSerializable = [T] extends [Serializable] ? T + : [T] extends [object] ? Structure extends Serializable ? T + : never + : never; + +export type Serializable = + | string + | number + | null + | undefined + | boolean + | { [key: string]: Serializable } + | readonly Serializable[]; + +export interface AnyCheck { + name: string; + enumerateFiles?: boolean; + dependencies: (context: CheckDependenciesContext) => EnsureSerializable; + execute: (dependencies: any, context: CheckExecutionContext) => Problem[] | Problem | undefined; +} + +export function defineCheck(options: { + name: string; + enumerateFiles?: EnumerateFiles; + dependencies: (context: CheckDependenciesContext) => EnsureSerializable; + execute: (dependencies: Dependencies, context: CheckExecutionContext) => Problem[] | Problem | undefined; +}) { + return options; +} diff --git a/attw-fork/src/internal/esm/cjsBindings.ts b/attw-fork/src/internal/esm/cjsBindings.ts new file mode 100644 index 0000000000..b83d99afb1 --- /dev/null +++ b/attw-fork/src/internal/esm/cjsBindings.ts @@ -0,0 +1,6 @@ +import type { Exports } from 'cjs-module-lexer'; +import { parse as cjsParse } from 'cjs-module-lexer'; + +export function getCjsModuleBindings(sourceText: string): Exports { + return cjsParse(sourceText); +} diff --git a/attw-fork/src/internal/esm/cjsNamespace.ts b/attw-fork/src/internal/esm/cjsNamespace.ts new file mode 100644 index 0000000000..d285e9e5d9 --- /dev/null +++ b/attw-fork/src/internal/esm/cjsNamespace.ts @@ -0,0 +1,29 @@ +import type { Package } from '../../createPackage.ts'; +import { getCjsModuleBindings } from './cjsBindings.ts'; +import { cjsResolve } from './resolve.ts'; + +export function getCjsModuleNamespace(fs: Package, file: URL, seen = new Set()): Set { + seen.add(file.pathname); + const exports = new Set(); + const bindings = getCjsModuleBindings(fs.readFile(file.pathname)); + for (const name of bindings.exports) exports.add(name); + + // CJS always exports `default` + if (!exports.has('default')) { + exports.add('default'); + } + + // Additionally, resolve facade reexports + + for (const source of bindings.reexports.reverse()) { + try { + const { format, url } = cjsResolve(fs, source, file); + if (format === 'commonjs' && !seen.has(url.pathname)) { + const reexported = getCjsModuleNamespace(fs, url, seen); + for (const name of reexported) exports.add(name); + } + } catch {} + } + + return exports; +} diff --git a/attw-fork/src/internal/esm/esmBindings.ts b/attw-fork/src/internal/esm/esmBindings.ts new file mode 100644 index 0000000000..40fc80690b --- /dev/null +++ b/attw-fork/src/internal/esm/esmBindings.ts @@ -0,0 +1,114 @@ +import type { Exports } from 'cjs-module-lexer'; +import ts from 'typescript'; + +// Note: There is a pretty solid module `es-module-lexer` which performs a similar lexing operation +// as `cjs-module-lexer`, but has some limitations in what it can express. This implementation +// should be more complete. + +function* extractDestructedNames(node: ts.BindingName): Iterable { + switch (node.kind) { + case ts.SyntaxKind.ArrayBindingPattern: { + for (const element of node.elements) { + if (element.kind === ts.SyntaxKind.BindingElement) { + yield* extractDestructedNames(element.name); + } + } + break; + } + + case ts.SyntaxKind.Identifier: { + yield node.text; + break; + } + + case ts.SyntaxKind.ObjectBindingPattern: { + for (const element of node.elements) { + yield* extractDestructedNames(element.name); + } + break; + } + + default: { + node satisfies never; + } + } +} + +export function getEsmModuleBindings(sourceText: string): Exports { + const options: ts.CreateSourceFileOptions = { + languageVersion: ts.ScriptTarget.ESNext, + impliedNodeFormat: ts.ModuleKind.ESNext, + }; + const sourceFile = ts.createSourceFile('module.cjs', sourceText, options, false, ts.ScriptKind.JS); + + const exports: string[] = []; + const reexports: string[] = []; + for (const statement of sourceFile.statements) { + switch (statement.kind) { + case ts.SyntaxKind.ExportDeclaration: { + const declaration = statement as ts.ExportDeclaration; + const { exportClause, isTypeOnly, moduleSpecifier } = declaration; + if (!isTypeOnly) { + if (exportClause) { + if (exportClause.kind === ts.SyntaxKind.NamedExports) { + // `export { foo }`; + // `export { foo } from 'specifier'`; + for (const element of exportClause.elements) { + if (!element.isTypeOnly) { + exports.push(element.name.text); + } + } + } else { + // `export * as namespace from 'specifier'` + exports.push(exportClause.name.text); + } + } else if (moduleSpecifier && ts.isStringLiteral(moduleSpecifier)) { + // `export * from 'specifier'` + reexports.push(moduleSpecifier.text); + } + } + break; + } + + case ts.SyntaxKind.ExportAssignment: { + const assignment = statement as ts.ExportAssignment; + if (!assignment.isExportEquals) { + // `export default ...` + exports.push('default'); + } + break; + } + + case ts.SyntaxKind.ClassDeclaration: + case ts.SyntaxKind.FunctionDeclaration: { + const declaration = statement as ts.ClassDeclaration | ts.FunctionDeclaration; + if (ts.hasSyntacticModifier(declaration, ts.ModifierFlags.Export)) { + if (ts.hasSyntacticModifier(declaration, ts.ModifierFlags.Default)) { + // `export default class {}` + // `export default function () {}` + exports.push('default'); + } else if (declaration.name) { + // `export class Foo {}` + // `export function foo() {}` + exports.push(declaration.name.text); + } + } + break; + } + + case ts.SyntaxKind.VariableStatement: { + const declaration = statement as ts.VariableStatement; + if (ts.hasSyntacticModifier(declaration, ts.ModifierFlags.Export)) { + // `export const foo = null;` + // `export const { foo, bar } = null;` + for (const declarator of declaration.declarationList.declarations) { + exports.push(...extractDestructedNames(declarator.name)); + } + } + break; + } + } + } + + return { exports, reexports }; +} diff --git a/attw-fork/src/internal/esm/esmNamespace.ts b/attw-fork/src/internal/esm/esmNamespace.ts new file mode 100644 index 0000000000..5a22fb5f61 --- /dev/null +++ b/attw-fork/src/internal/esm/esmNamespace.ts @@ -0,0 +1,39 @@ +import type { Package } from '../../createPackage.ts'; +import { getCjsModuleNamespace } from './cjsNamespace.ts'; +import { getEsmModuleBindings } from './esmBindings.ts'; +import { esmResolve } from './resolve.ts'; + +// Note: this doesn't handle ambiguous indirect exports which probably isn't worth the +// implementation complexity. + +export function getEsmModuleNamespace( + fs: Package, + specifier: string, + parentURL = new URL('file:///'), + seen = new Set(), +): string[] { + // Resolve specifier + const { format, url } = esmResolve(fs, specifier, parentURL); + + // Don't recurse for circular indirect exports + if (seen.has(url.pathname)) { + return []; + } + seen.add(url.pathname); + + if (format === 'commonjs') { + return [...getCjsModuleNamespace(fs, url)]; + } + + // Parse module bindings + const bindings = (format ?? 'module') === 'module' + ? getEsmModuleBindings(fs.readFile(url.pathname)) + // Maybe JSON, WASM, etc + : { exports: ['default'], reexports: [] }; + + // Concat indirect exports + const indirect = bindings.reexports + .flatMap((specifier) => getEsmModuleNamespace(fs, specifier, url, seen)) + .filter((name) => name !== 'default'); + return [...new Set([...bindings.exports, ...indirect])]; +} diff --git a/attw-fork/src/internal/esm/resolve.ts b/attw-fork/src/internal/esm/resolve.ts new file mode 100644 index 0000000000..59a1e91af9 --- /dev/null +++ b/attw-fork/src/internal/esm/resolve.ts @@ -0,0 +1,21 @@ +import * as cjs from '@loaderkit/resolve/cjs'; +import * as esm from '@loaderkit/resolve/esm'; +import type { FileSystemSync } from '@loaderkit/resolve/fs'; +import type { Package } from '../../createPackage.ts'; + +function makeFileSystemAdapter(fs: Package): FileSystemSync { + return { + directoryExists: (url) => fs.directoryExists(url.pathname), + fileExists: (url) => fs.fileExists(url.pathname), + readFileJSON: (url) => JSON.parse(fs.readFile(url.pathname)), + readLink: (): undefined => {}, + }; +} + +export function cjsResolve(fs: Package, specifier: string, parentURL: URL) { + return cjs.resolveSync(makeFileSystemAdapter(fs), specifier, parentURL); +} + +export function esmResolve(fs: Package, specifier: string, parentURL: URL) { + return esm.resolveSync(makeFileSystemAdapter(fs), specifier, parentURL); +} diff --git a/attw-fork/src/internal/getEntrypointInfo.ts b/attw-fork/src/internal/getEntrypointInfo.ts new file mode 100644 index 0000000000..522e8f30d9 --- /dev/null +++ b/attw-fork/src/internal/getEntrypointInfo.ts @@ -0,0 +1,246 @@ +import ts from 'typescript'; +import type { CheckPackageOptions } from '../checkPackage.ts'; +import type { Package } from '../createPackage.ts'; +import type { + BuildTool, + EntrypointInfo, + EntrypointResolutionAnalysis, + ModuleKind, + Resolution, + ResolutionKind, + ResolutionOption, +} from '../types.ts'; +import { allBuildTools, getResolutionKinds } from '../utils.ts'; +import type { CompilerHosts, CompilerHostWrapper } from './multiCompilerHost.ts'; + +const extensions = new Set(['.jsx', '.tsx', '.js', '.ts', '.mjs', '.cjs', '.mts', '.cjs']); + +function getEntrypoints(fs: Package, exportsObject: unknown, options: CheckPackageOptions | undefined): string[] { + if (options?.entrypoints) { + return options.entrypoints.map((e) => formatEntrypointString(e, fs.packageName)); + } + if (exportsObject === undefined && fs) { + const rootDir = `/node_modules/${fs.packageName}`; + const proxies = getProxyDirectories(rootDir, fs); + if (proxies.length === 0) { + if (options?.entrypointsLegacy) { + return fs + .listFiles() + .filter((f) => !ts.isDeclarationFileName(f) && extensions.has(f.slice(f.lastIndexOf('.')))) + .map((f) => '.' + f.slice(rootDir.length)); + } + return ['.']; + } + return proxies; + } + const detectedSubpaths = getSubpaths(exportsObject); + if (detectedSubpaths.length === 0) { + detectedSubpaths.push('.'); + } + const included = unique([ + ...detectedSubpaths, + ...(options?.includeEntrypoints?.map((e) => formatEntrypointString(e, fs.packageName)) ?? []), + ]); + if (!options?.excludeEntrypoints) { + return included; + } + return included.filter((entrypoint) => { + return !options.excludeEntrypoints!.some((exclusion) => { + if (typeof exclusion === 'string') { + return formatEntrypointString(exclusion, fs.packageName) === entrypoint; + } + return exclusion.test(entrypoint); + }); + }); +} + +function formatEntrypointString(path: string, packageName: string) { + return ( + path === '.' || path.startsWith('./') + ? path + : path === packageName + ? '.' + : path.startsWith(`${packageName}/`) + ? `.${path.slice(packageName.length)}` + : `./${path}` + ).trim(); +} + +function getSubpaths(exportsObject: any): string[] { + if (!exportsObject || typeof exportsObject !== 'object' || Array.isArray(exportsObject)) { + return []; + } + const keys = Object.keys(exportsObject); + if (keys[0]!.startsWith('.')) { + return keys; + } + return keys.flatMap((key) => getSubpaths(exportsObject[key])); +} + +function getProxyDirectories(rootDir: string, fs: Package) { + const vendorDirectories = new Set(); + const proxyDirectories: string[] = []; + const files = fs.listFiles().sort((a, b) => a.length - b.length); + for (const file of files) { + if (file.startsWith(rootDir) && file.endsWith('/package.json')) { + try { + const packageJson = JSON.parse(fs.readFile(file)); + if (packageJson.name && !packageJson.name.startsWith(fs.packageName)) { + // Name unrelated to the root package, this is a vendored package + const vendorDir = file.slice(0, file.lastIndexOf('/')); + vendorDirectories.add(vendorDir); + } else if ('main' in packageJson && !isInsideVendorDirectory(file)) { + // No name or name starting with root package name, this is intended to be an entrypoint + const proxyDir = '.' + file.slice(rootDir.length, file.lastIndexOf('/')); + proxyDirectories.push(proxyDir); + } + } catch {} + } + } + + return proxyDirectories.sort((a, b) => { + return ts.comparePathsCaseInsensitive(a, b); + }); + + function isInsideVendorDirectory(file: string) { + return !!ts.forEachAncestorDirectory(file, (dir) => { + if (vendorDirectories.has(dir)) { + return true; + } + + return; + }); + } +} + +export function getEntrypointInfo( + packageName: string, + fs: Package, + hosts: CompilerHosts, + options: CheckPackageOptions | undefined, +): Record { + const packageJson = JSON.parse(fs.readFile(`/node_modules/${packageName}/package.json`)); + let entrypoints = getEntrypoints(fs, packageJson.exports, options); + if (fs.typesPackage) { + const typesPackageJson = JSON.parse(fs.readFile(`/node_modules/${fs.typesPackage.packageName}/package.json`)); + const typesEntrypoints = getEntrypoints(fs, typesPackageJson.exports, options); + entrypoints = unique([...entrypoints, ...typesEntrypoints]); + } + const result: Record = {}; + for (const entrypoint of entrypoints) { + const resolutions: Record = { + node10: options?.modes?.['node10'] === false + ? { name: entrypoint, resolutionKind: 'node10' } + : getEntrypointResolution(packageName, hosts.node10, 'node10', entrypoint), + 'node16-cjs': options?.modes?.['node16-cjs'] === false + ? { name: entrypoint, resolutionKind: 'node16-cjs' } + : getEntrypointResolution(packageName, hosts.node16, 'node16-cjs', entrypoint), + 'node16-esm': options?.modes?.['node16-esm'] === false + ? { name: entrypoint, resolutionKind: 'node16-esm' } + : getEntrypointResolution(packageName, hosts.node16, 'node16-esm', entrypoint), + bundler: options?.modes?.['bundler'] === false + ? { name: entrypoint, resolutionKind: 'bundler' } + : getEntrypointResolution(packageName, hosts.bundler, 'bundler', entrypoint), + }; + result[entrypoint] = { + subpath: entrypoint, + resolutions, + hasTypes: Object.values(resolutions).some((r) => r.resolution?.isTypeScript), + isWildcard: !!resolutions.bundler.isWildcard, + }; + } + return result; +} +function getEntrypointResolution( + packageName: string, + host: CompilerHostWrapper, + resolutionKind: ResolutionKind, + entrypoint: string, +): EntrypointResolutionAnalysis { + if (entrypoint.includes('*')) { + return { name: entrypoint, resolutionKind, isWildcard: true }; + } + const moduleSpecifier = packageName + entrypoint.slice(1); // remove leading . before slash + const importingFileName = resolutionKind === 'node16-esm' ? '/index.mts' : '/index.ts'; + const resolutionMode = resolutionKind === 'node16-esm' + ? ts.ModuleKind.ESNext + : resolutionKind === 'node16-cjs' + ? ts.ModuleKind.CommonJS + : undefined; + const resolution = tryResolve(); + const implementationResolution = tryResolve(/*noDtsResolution*/ true); + const files = resolution + ? host + .createPrimaryProgram(resolution.fileName) + .getSourceFiles() + .map((f) => f.fileName) + : undefined; + + return { + name: entrypoint, + resolutionKind, + resolution, + implementationResolution, + files, + }; + + function tryResolve(noDtsResolution?: boolean): Resolution | undefined { + const { resolution, trace } = host.resolveModuleName( + moduleSpecifier, + importingFileName, + resolutionMode, + noDtsResolution, + ); + const fileName = resolution.resolvedModule?.resolvedFileName; + if (!fileName) { + return undefined; + } + + return { + fileName, + isJson: resolution.resolvedModule.extension === ts.Extension.Json, + isTypeScript: ts.hasTSFileExtension(resolution.resolvedModule.resolvedFileName), + trace, + }; + } +} +function unique(array: readonly T[]): T[] { + return array.filter((value, index) => array.indexOf(value) === index); +} +export function getBuildTools(packageJson: any): Partial> { + if (!packageJson.devDependencies) { + return {}; + } + const result: Partial> = {}; + for (const buildTool of allBuildTools) { + if (buildTool in packageJson.devDependencies) { + result[buildTool] = packageJson.devDependencies[buildTool]; + } + } + return result; +} +export function getModuleKinds( + entrypoints: Record, + resolutionOption: ResolutionOption, + hosts: CompilerHosts, +): Record { + const host = hosts[resolutionOption]; + const result: Record = {}; + for (const resolutionKind of getResolutionKinds(resolutionOption)) { + for (const entrypoint of Object.values(entrypoints)) { + const resolution = entrypoint.resolutions[resolutionKind]; + for (const fileName of resolution.files ?? []) { + if (!result[fileName]) { + result[fileName] = host.getModuleKindForFile(fileName)!; + } + } + if (resolution.implementationResolution) { + const fileName = resolution.implementationResolution.fileName; + if (!result[fileName]) { + result[fileName] = host.getModuleKindForFile(fileName)!; + } + } + } + } + return result; +} diff --git a/attw-fork/src/internal/getProbableExports.ts b/attw-fork/src/internal/getProbableExports.ts new file mode 100644 index 0000000000..71c5464d84 --- /dev/null +++ b/attw-fork/src/internal/getProbableExports.ts @@ -0,0 +1,96 @@ +import ts from 'typescript'; + +const minifiedVariableAssignmentPattern = /\S;(?:var|let|const) \w=\S/; + +export interface Export { + name: string; + node: ts.Node; +} + +export function getProbableExports(sourceFile: ts.SourceFile): Export[] { + return getEsbuildBabelSwcExports(sourceFile) ?? []; +} + +function getEsbuildBabelSwcExports(sourceFile: ts.SourceFile): Export[] | undefined { + let possibleIndex = sourceFile.text.indexOf('\n__export('); + if (possibleIndex === -1) { + possibleIndex = sourceFile.text.indexOf('\n_export('); + } + if (possibleIndex === -1 && !isProbablyMinified(sourceFile.text)) { + return undefined; + } + + for (const statement of sourceFile.statements) { + if (possibleIndex !== -1 && statement.end < possibleIndex) { + continue; + } + if (possibleIndex !== -1 && statement.pos > possibleIndex) { + break; + } + if ( + ts.isExpressionStatement(statement) + && ts.isCallExpression(statement.expression) + && ts.isIdentifier(statement.expression.expression) + && statement.expression.arguments.length === 2 + && ts.isIdentifier(statement.expression.arguments[0]!) + && ts.isObjectLiteralExpression(statement.expression.arguments[1]!) + ) { + const callTarget = statement.expression.expression; + const isExport = ts.unescapeLeadingUnderscores(callTarget.escapedText) === '__export' + || callTarget.escapedText === '_export' + || isEsbuildExportFunction(sourceFile.locals?.get(callTarget.escapedText)?.valueDeclaration); + if (isExport) { + return statement.expression.arguments[1].properties.flatMap((prop): Export[] => { + if ( + ts.isPropertyAssignment(prop) + && (ts.isIdentifier(prop.name) || ts.isStringOrNumericLiteralLike(prop.name)) + ) { + return [{ name: prop.name.text, node: prop }]; + } + if (ts.isShorthandPropertyAssignment(prop)) { + return [{ name: prop.name.text, node: prop }]; + } + return []; + }); + } + } + } + + return undefined; +} + +function isEsbuildExportFunction(decl: ts.Declaration | undefined) { + /* + esbuild: + var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); + }; + + esbuild min: + b=(o,r)=>{for(var e in r)n(o,e,{get:r[e],enumerable:!0})} + + swc? + function _export(target, all) { + for(var name in all)Object.defineProperty(target, name, { + enumerable: true, + get: all[name] + }); + } + */ + if (!decl) { + return false; + } + return ( + ts.isVariableDeclaration(decl) + && decl.initializer + && ts.isFunctionExpressionOrArrowFunction(decl.initializer) + && ts.isBlock(decl.initializer.body) + && decl.initializer.body.statements.length === 1 + && ts.isForInStatement(decl.initializer.body.statements[0]!) + ); +} + +function isProbablyMinified(text: string): boolean { + return minifiedVariableAssignmentPattern.test(text); +} diff --git a/attw-fork/src/internal/minimalLibDts.ts b/attw-fork/src/internal/minimalLibDts.ts new file mode 100644 index 0000000000..68081ed182 --- /dev/null +++ b/attw-fork/src/internal/minimalLibDts.ts @@ -0,0 +1,78 @@ +// The contents of this string are derived from typescript/lib/lib.es5.d.ts. +// These types are all that are needed for the NamedExports check to work. + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. All rights reserved. +Licensed under the Apache License, Version 2.0 (the "License"); you may not use +this file except in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED +WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, +MERCHANTABLITY OR NON-INFRINGEMENT. + +See the Apache Version 2.0 License for specific language governing permissions +and limitations under the License. +***************************************************************************** */ +export default ` +interface ReadonlyArray { + readonly length: number; + toString(): string; + toLocaleString(): string; + concat(...items: ConcatArray[]): T[]; + concat(...items: (T | ConcatArray)[]): T[]; + join(separator?: string): string; + slice(start?: number, end?: number): T[]; + indexOf(searchElement: T, fromIndex?: number): number; + lastIndexOf(searchElement: T, fromIndex?: number): number; + every(predicate: (value: T, index: number, array: readonly T[]) => value is S, thisArg?: any): this is readonly S[]; + every(predicate: (value: T, index: number, array: readonly T[]) => unknown, thisArg?: any): boolean; + some(predicate: (value: T, index: number, array: readonly T[]) => unknown, thisArg?: any): boolean; + forEach(callbackfn: (value: T, index: number, array: readonly T[]) => void, thisArg?: any): void; + map(callbackfn: (value: T, index: number, array: readonly T[]) => U, thisArg?: any): U[]; + filter(predicate: (value: T, index: number, array: readonly T[]) => value is S, thisArg?: any): S[]; + filter(predicate: (value: T, index: number, array: readonly T[]) => unknown, thisArg?: any): T[]; + reduce(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: readonly T[]) => T): T; + reduce(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: readonly T[]) => T, initialValue: T): T; + reduce(callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: readonly T[]) => U, initialValue: U): U; + reduceRight(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: readonly T[]) => T): T; + reduceRight(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: readonly T[]) => T, initialValue: T): T; + reduceRight(callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: readonly T[]) => U, initialValue: U): U; + readonly [n: number]: T; +} + +interface Array { + length: number; + toString(): string; + toLocaleString(): string; + pop(): T | undefined; + push(...items: T[]): number; + concat(...items: ConcatArray[]): T[]; + concat(...items: (T | ConcatArray)[]): T[]; + join(separator?: string): string; + reverse(): T[]; + shift(): T | undefined; + slice(start?: number, end?: number): T[]; + sort(compareFn?: (a: T, b: T) => number): this; + splice(start: number, deleteCount?: number): T[]; + splice(start: number, deleteCount: number, ...items: T[]): T[]; + unshift(...items: T[]): number; + indexOf(searchElement: T, fromIndex?: number): number; + lastIndexOf(searchElement: T, fromIndex?: number): number; + every(predicate: (value: T, index: number, array: T[]) => value is S, thisArg?: any): this is S[]; + every(predicate: (value: T, index: number, array: T[]) => unknown, thisArg?: any): boolean; + some(predicate: (value: T, index: number, array: T[]) => unknown, thisArg?: any): boolean; + forEach(callbackfn: (value: T, index: number, array: T[]) => void, thisArg?: any): void; + map(callbackfn: (value: T, index: number, array: T[]) => U, thisArg?: any): U[]; + filter(predicate: (value: T, index: number, array: T[]) => value is S, thisArg?: any): S[]; + filter(predicate: (value: T, index: number, array: T[]) => unknown, thisArg?: any): T[]; + reduce(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: T[]) => T): T; + reduce(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: T[]) => T, initialValue: T): T; + reduce(callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: T[]) => U, initialValue: U): U; + reduceRight(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: T[]) => T): T; + reduceRight(callbackfn: (previousValue: T, currentValue: T, currentIndex: number, array: T[]) => T, initialValue: T): T; + reduceRight(callbackfn: (previousValue: U, currentValue: T, currentIndex: number, array: T[]) => U, initialValue: U): U; + [n: number]: T; +} +`; diff --git a/attw-fork/src/internal/multiCompilerHost.ts b/attw-fork/src/internal/multiCompilerHost.ts new file mode 100644 index 0000000000..c8bc71e089 --- /dev/null +++ b/attw-fork/src/internal/multiCompilerHost.ts @@ -0,0 +1,322 @@ +/* eslint-disable drizzle-internal/require-entity-kind */ +import { LRUCache } from 'lru-cache'; +import ts from 'typescript'; +import type { Package } from '../createPackage.ts'; +import type { ModuleKind } from '../types.ts'; +import minimalLibDts from './minimalLibDts.ts'; + +export interface ResolveModuleNameResult { + resolution: ts.ResolvedModuleWithFailedLookupLocations; + trace: string[]; +} + +export interface CompilerHosts { + node10: CompilerHostWrapper; + node16: CompilerHostWrapper; + bundler: CompilerHostWrapper; + findHostForFiles(files: string[]): CompilerHostWrapper | undefined; +} + +export function createCompilerHosts(fs: Package): CompilerHosts { + const node10 = new CompilerHostWrapper(fs, ts.ModuleResolutionKind.Node10, ts.ModuleKind.CommonJS); + const node16 = new CompilerHostWrapper(fs, ts.ModuleResolutionKind.Node16, ts.ModuleKind.Node16); + const bundler = new CompilerHostWrapper(fs, ts.ModuleResolutionKind.Bundler, ts.ModuleKind.ESNext); + + return { + node10, + node16, + bundler, + findHostForFiles(files: string[]) { + for (const host of [node10, node16, bundler]) { + if (files.every((f) => host.getSourceFileFromCache(f) !== undefined)) { + return host; + } + } + + return; + }, + }; +} + +const getCanonicalFileName = ts.createGetCanonicalFileName(false); +const toPath = (fileName: string) => ts.toPath(fileName, '/', getCanonicalFileName); + +export class CompilerHostWrapper { + private programCache = new LRUCache({ max: 2 }); + private compilerHost: ts.CompilerHost; + private compilerOptions: ts.CompilerOptions; + private normalModuleResolutionCache: ts.ModuleResolutionCache; + private noDtsResolutionModuleResolutionCache: ts.ModuleResolutionCache; + + private moduleResolutionCache: Record< + /*FromFileName*/ string, + Record + > = {}; + private traceCollector: TraceCollector = new TraceCollector(); + private sourceFileCache: Map = new Map(); + private resolvedModules: Exclude = new Map(); + private languageVersion = ts.ScriptTarget.Latest; + + constructor(fs: Package, moduleResolution: ts.ModuleResolutionKind, moduleKind: ts.ModuleKind) { + this.compilerOptions = { + moduleResolution, + module: moduleKind, + // So `sourceFile.externalModuleIndicator` is set to a node + moduleDetection: ts.ModuleDetectionKind.Legacy, + target: ts.ScriptTarget.Latest, + resolveJsonModule: true, + traceResolution: true, + }; + this.normalModuleResolutionCache = ts.createModuleResolutionCache('/', getCanonicalFileName, this.compilerOptions); + this.noDtsResolutionModuleResolutionCache = ts.createModuleResolutionCache( + '/', + getCanonicalFileName, + this.compilerOptions, + ); + this.compilerHost = this.createCompilerHost(fs, this.sourceFileCache); + } + + getCompilerOptions() { + return this.compilerOptions; + } + + getSourceFile(fileName: string): ts.SourceFile | undefined { + return this.compilerHost.getSourceFile(fileName, this.languageVersion); + } + + getSourceFileFromCache(fileName: string): ts.SourceFile | undefined { + return this.sourceFileCache.get(toPath(fileName)); + } + + getModuleKindForFile(fileName: string): ModuleKind | undefined { + const kind = this.getImpliedNodeFormatForFile(fileName); + if (kind) { + const extension = ts.getAnyExtensionFromPath(fileName); + const isExtension = extension === ts.Extension.Cjs + || extension === ts.Extension.Cts + || extension === ts.Extension.Dcts + || extension === ts.Extension.Mjs + || extension === ts.Extension.Mts + || extension === ts.Extension.Dmts; + const reasonPackageJsonInfo = isExtension ? undefined : this.getPackageScopeForPath(fileName); + const reasonFileName = isExtension + ? fileName + : reasonPackageJsonInfo + ? reasonPackageJsonInfo.packageDirectory + '/package.json' + : fileName; + const reasonPackageJsonType = reasonPackageJsonInfo?.contents?.packageJsonContent.type; + return { + detectedKind: kind, + detectedReason: isExtension ? 'extension' : reasonPackageJsonType ? 'type' : 'no:type', + reasonFileName, + }; + } + + return undefined; + } + + resolveModuleName( + moduleName: string, + containingFile: string, + resolutionMode?: ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS, + noDtsResolution?: boolean, + allowJs?: boolean, + ): ResolveModuleNameResult { + const moduleKey = this.getModuleKey(moduleName, resolutionMode, noDtsResolution, allowJs); + if (this.moduleResolutionCache[containingFile]?.[moduleKey]) { + const { resolution, trace } = this.moduleResolutionCache[containingFile][moduleKey]; + return { + resolution, + trace, + }; + } + this.traceCollector.clear(); + const resolution = ts.resolveModuleName( + moduleName, + containingFile, + noDtsResolution ? { ...this.compilerOptions, noDtsResolution, allowJs } : this.compilerOptions, + this.compilerHost, + noDtsResolution ? this.noDtsResolutionModuleResolutionCache : this.normalModuleResolutionCache, + /*redirectedReference*/ undefined, + resolutionMode, + ); + const trace = this.traceCollector.read(); + if (!this.moduleResolutionCache[containingFile]?.[moduleKey]) { + (this.moduleResolutionCache[containingFile] ??= {})[moduleKey] = { resolution, trace }; + } + return { + resolution, + trace, + }; + } + + getTrace( + fromFileName: string, + moduleSpecifier: string, + resolutionMode: ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS | undefined, + ): string[] | undefined { + return this.moduleResolutionCache[fromFileName]?.[ + this.getModuleKey(moduleSpecifier, resolutionMode, /*noDtsResolution*/ undefined, /*allowJs*/ undefined) + ]?.trace; + } + + private getModuleKey( + moduleSpecifier: string, + resolutionMode: ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS | undefined, + noDtsResolution: boolean | undefined, + allowJs: boolean | undefined, + ) { + return `${resolutionMode ?? 1}:${+!!noDtsResolution}:${+!!allowJs}:${moduleSpecifier}`; + } + + private getProgram(rootNames: readonly string[], options: ts.CompilerOptions) { + const key = programKey(rootNames, options); + let program = this.programCache.get(key); + if (!program) { + this.programCache.set(key, program = ts.createProgram({ rootNames, options, host: this.compilerHost })); + } + return program; + } + + createPrimaryProgram(rootName: string) { + const program = this.getProgram([rootName], this.compilerOptions); + + if (program.resolvedModules) { + for (const [path, cache] of program.resolvedModules.entries()) { + let ownCache = this.resolvedModules.get(path); + if (!ownCache) { + this.resolvedModules.set(path, ownCache = ts.createModeAwareCache()); + } + // eslint-disable-next-line unicorn/no-array-for-each + cache.forEach((resolution, key, mode) => { + ownCache!.set(key, mode, resolution); + }); + } + } + + return program; + } + + createAuxiliaryProgram(rootNames: string[], extraOptions?: ts.CompilerOptions): ts.Program { + if ( + extraOptions + && ts.changesAffectModuleResolution( + // allowJs and noDtsResolution are part of the cache key, but any other resolution-affecting options + // are assumed to be constant for the host. + { + ...this.compilerOptions, + allowJs: extraOptions.allowJs, + checkJs: extraOptions.checkJs, + noDtsResolution: extraOptions.noDtsResolution, + }, + { ...this.compilerOptions, ...extraOptions }, + ) + ) { + throw new Error('Cannot override resolution-affecting options for host due to potential cache pollution'); + } + const options = extraOptions ? { ...this.compilerOptions, ...extraOptions } : this.compilerOptions; + return this.getProgram(rootNames, options); + } + + getResolvedModule(sourceFile: ts.SourceFile, moduleName: string, resolutionMode: ts.ResolutionMode) { + return this.resolvedModules.get(sourceFile.path)?.get(moduleName, resolutionMode); + } + + private createCompilerHost(fs: Package, sourceFileCache: Map): ts.CompilerHost { + return { + fileExists: fs.fileExists.bind(fs), + readFile: fs.readFile.bind(fs), + directoryExists: fs.directoryExists.bind(fs), + getSourceFile: (fileName) => { + const path = toPath(fileName); + const cached = sourceFileCache.get(path); + if (cached) { + return cached; + } + const content = fileName === '/node_modules/typescript/lib/lib.d.ts' ? minimalLibDts : fs.tryReadFile(fileName); + if (content === undefined) { + return; + } + + const sourceFile = ts.createSourceFile( + fileName, + content, + { + languageVersion: this.languageVersion, + impliedNodeFormat: this.getImpliedNodeFormatForFile(fileName), + }, + /*setParentNodes*/ true, + ); + sourceFileCache.set(path, sourceFile); + return sourceFile; + }, + getDefaultLibFileName: () => '/node_modules/typescript/lib/lib.d.ts', + getCurrentDirectory: () => '/', + writeFile: () => { + throw new Error('Not implemented'); + }, + getCanonicalFileName, + useCaseSensitiveFileNames: () => false, + getNewLine: () => '\n', + trace: this.traceCollector.trace, + resolveModuleNameLiterals: ( + moduleLiterals, + containingFile, + _redirectedReference, + options, + containingSourceFile, + ) => { + return moduleLiterals.map( + (literal) => + this.resolveModuleName( + literal.text, + containingFile, + ts.getModeForUsageLocation(containingSourceFile, literal, this.compilerOptions), + options.noDtsResolution, + ).resolution, + ); + }, + }; + } + + private getImpliedNodeFormatForFile(fileName: string): ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS | undefined { + return ts.getImpliedNodeFormatForFile( + toPath(fileName), + this.normalModuleResolutionCache.getPackageJsonInfoCache(), + this.compilerHost, + this.compilerOptions, + ); + } + + private getPackageScopeForPath(fileName: string): ts.PackageJsonInfo | undefined { + return ts.getPackageScopeForPath( + fileName, + ts.getTemporaryModuleResolutionState( + // TODO: consider always using the node16 cache because package.json should be a hit + this.normalModuleResolutionCache.getPackageJsonInfoCache(), + this.compilerHost, + this.compilerOptions, + ), + ); + } +} + +class TraceCollector { + private traces: string[] = []; + + trace = (message: string) => { + this.traces.push(message); + }; + read() { + const result = [...this.traces]; + this.clear(); + return result; + } + clear() { + this.traces.length = 0; + } +} + +function programKey(rootNames: readonly string[], options: ts.CompilerOptions) { + return JSON.stringify([rootNames, Object.entries(options).sort(([k1], [k2]) => k1.localeCompare(k2))]); +} diff --git a/attw-fork/src/problems.ts b/attw-fork/src/problems.ts new file mode 100644 index 0000000000..ecd8e77c1f --- /dev/null +++ b/attw-fork/src/problems.ts @@ -0,0 +1,201 @@ +import type { Analysis, Problem, ProblemKind, ResolutionKind, ResolutionOption } from './types.ts'; +import { getResolutionKinds } from './utils.ts'; + +export interface ProblemKindInfo { + title: string; + emoji: string; + shortDescription: string; + description: string; + details?: string; + docsUrl: string; +} + +export const problemKindInfo: Record = { + NoResolution: { + emoji: '💀', + title: 'Resolution failed', + shortDescription: 'Resolution failed', + description: 'Import failed to resolve to type declarations or JavaScript files.', + docsUrl: 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/NoResolution.md', + }, + UntypedResolution: { + emoji: '❌', + title: 'Could not find types', + shortDescription: 'No types', + description: 'Import resolved to JavaScript files, but no type declarations were found.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/UntypedResolution.md', + }, + FalseCJS: { + emoji: '🎭', + title: 'Types are CJS, but implementation is ESM', + shortDescription: 'Masquerading as CJS', + description: 'Import resolved to a CommonJS type declaration file, but an ESM JavaScript file.', + docsUrl: 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/FalseCJS.md', + }, + FalseESM: { + emoji: '👺', + title: 'Types are ESM, but implementation is CJS', + shortDescription: 'Masquerading as ESM', + description: 'Import resolved to an ESM type declaration file, but a CommonJS JavaScript file.', + docsUrl: 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/FalseESM.md', + }, + NamedExports: { + emoji: '🕵️', + title: 'Named exports cannot be detected by Node.ts', + shortDescription: 'Named exports', + description: + 'TypeScript allows ESM named imports of the properties of this CommonJS module, but they will crash at runtime because they don’t exist or can’t be statically detected by Node.js in the JavaScript file.', + details: 'the list of exports TypeScript can see but Node.js cannot', + docsUrl: 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/NamedExports.md', + }, + CJSResolvesToESM: { + emoji: '⚠️', + title: 'Entrypoint is ESM-only', + shortDescription: 'ESM (dynamic import only)', + description: + 'A `require` call resolved to an ESM JavaScript file, which is an error in Node and some bundlers. CommonJS consumers will need to use a dynamic import.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/CJSResolvesToESM.md', + }, + FallbackCondition: { + emoji: '🐛', + title: 'Resolved through fallback condition', + shortDescription: 'Used fallback condition', + description: + 'Import resolved to types through a conditional package.json export, but only after failing to resolve through an earlier condition. This behavior is a [TypeScript bug](https://github.com/microsoft/TypeScript/issues/50762). It may misrepresent the runtime behavior of this import and should not be relied upon.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/FallbackCondition.md', + }, + CJSOnlyExportsDefault: { + emoji: '🤨', + title: 'CJS module uses default export', + shortDescription: 'CJS default export', + description: + 'CommonJS module simulates a default export with `exports.default` and `exports.__esModule`, but does not also set `module.exports` for compatibility with Node. Node, and [some bundlers under certain conditions](https://andrewbranch.github.io/interop-test/#synthesizing-default-exports-for-cjs-modules), do not respect the `__esModule` marker, so accessing the intended default export will require a `.default` property access on the default import.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/CJSOnlyExportsDefault.md', + }, + FalseExportDefault: { + emoji: '❗️', + title: 'Types incorrectly use default export', + shortDescription: 'Incorrect default export', + description: + 'The resolved types use `export default` where the JavaScript file appears to use `module.exports =`. This will cause TypeScript under the `node16` module mode to think an extra `.default` property access is required, but that will likely fail at runtime. These types should use `export =` instead of `export default`.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/FalseExportDefault.md', + }, + MissingExportEquals: { + emoji: '❓', + title: 'Types are missing an `export =`', + shortDescription: 'Missing `export =`', + description: + 'The JavaScript appears to set both `module.exports` and `module.exports.default` for improved compatibility, but the types only reflect the latter (by using `export default`). This will cause TypeScript under the `node16` module mode to think an extra `.default` property access is required, which will work at runtime but is not necessary. These types should `export =` an object with a `default` property instead of using `export default`.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/MissingExportEquals.md', + }, + UnexpectedModuleSyntax: { + emoji: '🚭', + title: 'Syntax is incompatible with detected module kind', + shortDescription: 'Unexpected module syntax', + description: + 'Syntax detected in the module is incompatible with the module kind according to the package.json or file extension. This is an error in Node and may cause problems in some bundlers.', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/UnexpectedModuleSyntax.md', + }, + InternalResolutionError: { + emoji: '🥴', + title: 'Internal resolution error', + shortDescription: 'Internal resolution error', + description: + 'Import found in a type declaration file failed to resolve. Either this indicates that runtime resolution errors will occur, or (more likely) the types misrepresent the contents of the JavaScript files.', + details: 'the imports that failed to resolve', + docsUrl: + 'https://github.com/arethetypeswrong/arethetypeswrong.github.io/blob/main/docs/problems/InternalResolutionError.md', + }, +}; + +export const allProblemKinds = Object.keys(problemKindInfo) as ProblemKind[]; + +export interface ProblemFilter { + kind?: readonly ProblemKind[]; + entrypoint?: string; + resolutionKind?: ResolutionKind; + resolutionOption?: ResolutionOption; +} + +export function filterProblems(analysis: Analysis, filter: ProblemFilter): Problem[]; +export function filterProblems(problems: readonly Problem[], analysis: Analysis, filter: ProblemFilter): Problem[]; +export function filterProblems( + ...args: + | [analysis: Analysis, filter: ProblemFilter] + | [problems: readonly Problem[], analysis: Analysis, filter: ProblemFilter] +) { + const [problems, analysis, filter] = args.length === 2 ? [args[0].problems, ...args] : args; + return problems.filter((p) => { + if (filter.kind && !filter.kind.includes(p.kind)) { + return false; + } + if (filter.entrypoint && filter.resolutionKind) { + return problemAffectsEntrypointResolution(p, filter.entrypoint, filter.resolutionKind, analysis); + } + if (filter.entrypoint && filter.resolutionOption) { + return getResolutionKinds(filter.resolutionOption).every((resolutionKind) => + problemAffectsEntrypointResolution(p, filter.entrypoint!, resolutionKind, analysis) + ); + } + if (filter.entrypoint) { + return problemAffectsEntrypoint(p, filter.entrypoint, analysis); + } + if (filter.resolutionKind) { + return problemAffectsResolutionKind(p, filter.resolutionKind, analysis); + } + return true; + }); +} + +export function problemAffectsResolutionKind( + problem: Problem, + resolutionKind: ResolutionKind, + analysis: Analysis, +): boolean { + const index = getProblemIndex(analysis, problem); + for (const entrypoint of Object.values(analysis.entrypoints)) { + if (entrypoint.resolutions[resolutionKind].visibleProblems?.includes(index)) { + return true; + } + } + return false; +} + +export function problemAffectsEntrypoint(problem: Problem, entrypoint: string, analysis: Analysis): boolean { + const index = getProblemIndex(analysis, problem); + for (const resolution of Object.values(analysis.entrypoints[entrypoint]!.resolutions)) { + if (resolution.visibleProblems?.includes(index)) { + return true; + } + } + return false; +} + +export function problemAffectsEntrypointResolution( + problem: Problem, + entrypoint: string, + resolutionKind: ResolutionKind, + analysis: Analysis, +): boolean { + const index = getProblemIndex(analysis, problem); + return analysis.entrypoints[entrypoint]!.resolutions[resolutionKind].visibleProblems?.includes(index) ?? false; +} + +function getProblemIndex(analysis: Analysis, problem: Problem) { + let index = analysis.problems.indexOf(problem); + if (index === -1) { + const serialized = JSON.stringify(problem); + index = analysis.problems.findIndex((p) => JSON.stringify(p) === serialized); + if (index === -1) { + throw new Error(`Could not find problem in analysis`); + } + } + return index; +} diff --git a/attw-fork/src/run.ts b/attw-fork/src/run.ts new file mode 100755 index 0000000000..598a94a9a2 --- /dev/null +++ b/attw-fork/src/run.ts @@ -0,0 +1,91 @@ +import { readFile } from 'fs/promises'; +import { checkPackage } from './checkPackage.ts'; +import { getExitCode } from './cli/getExitCode.ts'; +import { typed } from './cli/typed.ts'; +import { untyped } from './cli/untyped.ts'; +import { write } from './cli/write.ts'; +import { createPackageFromTarballData } from './createPackage.ts'; +import type { ResolutionKind, UntypedResult } from './types.ts'; + +try { + const path = process.argv[2]; + const mode = process.argv[3]; + const modes: Record | undefined = mode + ? mode === 'node10' + ? { + node10: true, + 'node16-cjs': false, + 'node16-esm': false, + bundler: false, + } + : mode === 'node16-esm' + ? { + node10: false, + 'node16-cjs': false, + 'node16-esm': true, + bundler: false, + } + : mode === 'node16-cjs' + ? { + node10: false, + 'node16-cjs': true, + 'node16-esm': false, + bundler: false, + } + : mode === 'bundler' + ? { + node10: false, + 'node16-cjs': false, + 'node16-esm': false, + bundler: true, + } + : undefined + : undefined; + + const ignoreResolutions = modes + ? Object.entries(modes) + .filter(([, v]) => v === false) + .map(([k]) => k as ResolutionKind) + : undefined; + + if (path === undefined) throw new Error('Missing target path'); + if (modes === undefined && mode !== undefined) { + throw new Error(`Invalid mode: '${mode}'. Allowed modes: 'bundler' | 'node10' | 'node16-cjs' | 'node16-esm'.`); + } + + const file = await readFile(path); + const data = new Uint8Array(file); + const pkg = createPackageFromTarballData(data); + + const analysis = await checkPackage(pkg, { + modes, + }); + + console.log('Mode:', mode); + console.log('Ignore:', ignoreResolutions); + + const out = process.stdout; + await write('', out); + if (analysis.types) { + await write( + await typed(analysis, { + ignoreResolutions, + }), + out, + ); + process.exitCode = getExitCode(analysis, { + ignoreResolutions, + }); + } else { + await write(untyped(analysis as UntypedResult), out); + } +} catch (error) { + console.error(error); + if (error && typeof error === 'object' && 'message' in error) { + console.error(`Error while checking package:\n${error.message}`); + } else { + console.error(`Unknown error while checking package:\n${error}`); + } + + process.exit(3); +} diff --git a/attw-fork/src/types.ts b/attw-fork/src/types.ts new file mode 100644 index 0000000000..79da5e7aa5 --- /dev/null +++ b/attw-fork/src/types.ts @@ -0,0 +1,188 @@ +import type ts from 'typescript'; + +export type ResolutionKind = 'node10' | 'node16-cjs' | 'node16-esm' | 'bundler'; +export type ResolutionOption = 'node10' | 'node16' | 'bundler'; +export interface EntrypointInfo { + subpath: string; + resolutions: Record; + hasTypes: boolean; + isWildcard: boolean; +} + +export interface IncludedTypes { + kind: 'included'; +} +export interface TypesPackage { + kind: '@types'; + packageName: string; + packageVersion: string; + definitelyTypedUrl?: string; +} +export type AnalysisTypes = IncludedTypes | TypesPackage; + +export type BuildTool = + | '@arethetypeswrong/cli' + | 'typescript' + | 'rollup' + | '@rollup/plugin-typescript' + | '@rollup/plugin-typescript2' + | 'webpack' + | 'esbuild' + | 'parcel-bundler' + | '@preconstruct/cli' + | 'vite' + | 'snowpack' + | 'microbundle' + | '@microsoft/api-extractor' + | 'tshy' + | '@rspack/cli' + | 'tsup' + | 'tsdown'; + +export interface Analysis { + packageName: string; + packageVersion: string; + buildTools: Partial>; + types: AnalysisTypes; + entrypoints: Record; + programInfo: Record; + problems: Problem[]; +} + +export interface UntypedResult { + packageName: string; + packageVersion: string; + types: false; +} + +export type CheckResult = Analysis | UntypedResult; + +export interface EntrypointResolutionAnalysis { + name: string; + resolutionKind: ResolutionKind; + isWildcard?: boolean; + resolution?: Resolution; + implementationResolution?: Resolution; + files?: string[]; + /** Indices into `analysis.problems` */ + visibleProblems?: number[]; +} + +export interface Resolution { + fileName: string; + isTypeScript: boolean; + isJson: boolean; + trace: string[]; +} + +export interface ProgramInfo { + moduleKinds?: Record; +} + +export type ModuleKindReason = 'extension' | 'type' | 'no:type'; +export interface ModuleKind { + detectedKind: ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS; + detectedReason: ModuleKindReason; + reasonFileName: string; +} + +export interface EntrypointResolutionProblem { + entrypoint: string; + resolutionKind: ResolutionKind; +} + +export interface FilePairProblem { + typesFileName: string; + implementationFileName: string; +} + +export interface ModuleKindPairProblem { + typesModuleKind: ModuleKind; + implementationModuleKind: ModuleKind; +} + +export interface FileTextRangeProblem { + fileName: string; + pos: number; + end: number; +} + +export interface NoResolutionProblem extends EntrypointResolutionProblem { + kind: 'NoResolution'; +} + +export interface UntypedResolutionProblem extends EntrypointResolutionProblem { + kind: 'UntypedResolution'; +} + +export interface FalseESMProblem extends FilePairProblem, ModuleKindPairProblem { + kind: 'FalseESM'; +} + +export interface FalseCJSProblem extends FilePairProblem, ModuleKindPairProblem { + kind: 'FalseCJS'; +} + +export interface CJSResolvesToESMProblem extends EntrypointResolutionProblem { + kind: 'CJSResolvesToESM'; +} + +export interface NamedExportsProblem extends FilePairProblem { + kind: 'NamedExports'; + isMissingAllNamed: boolean; + missing: string[]; +} + +export interface FallbackConditionProblem extends EntrypointResolutionProblem { + kind: 'FallbackCondition'; +} + +export interface FalseExportDefaultProblem extends FilePairProblem { + kind: 'FalseExportDefault'; +} + +export interface MissingExportEqualsProblem extends FilePairProblem { + kind: 'MissingExportEquals'; +} + +export interface InternalResolutionErrorProblem extends FileTextRangeProblem { + kind: 'InternalResolutionError'; + resolutionOption: ResolutionOption; + moduleSpecifier: string; + resolutionMode: ts.ResolutionMode; + trace: string[]; +} + +export interface UnexpectedModuleSyntaxProblem extends FileTextRangeProblem { + kind: 'UnexpectedModuleSyntax'; + syntax: ts.ModuleKind.ESNext | ts.ModuleKind.CommonJS; + moduleKind: ModuleKind; +} + +export interface CJSOnlyExportsDefaultProblem extends FileTextRangeProblem { + kind: 'CJSOnlyExportsDefault'; +} + +export type Problem = + | NoResolutionProblem + | UntypedResolutionProblem + | FalseESMProblem + | FalseCJSProblem + | CJSResolvesToESMProblem + | NamedExportsProblem + | FallbackConditionProblem + | FalseExportDefaultProblem + | MissingExportEqualsProblem + | InternalResolutionErrorProblem + | UnexpectedModuleSyntaxProblem + | CJSOnlyExportsDefaultProblem; + +export type ProblemKind = Problem['kind']; + +export type Failable = { status: 'error'; error: string; data?: never } | { status: 'success'; data: T }; + +export interface ParsedPackageSpec { + name: string; + versionKind: 'none' | 'exact' | 'range' | 'tag'; + version: string; +} diff --git a/attw-fork/src/utils.ts b/attw-fork/src/utils.ts new file mode 100644 index 0000000000..40a2db0945 --- /dev/null +++ b/attw-fork/src/utils.ts @@ -0,0 +1,182 @@ +import { valid, validRange } from 'semver'; +import validatePackgeName from 'validate-npm-package-name'; +import type { + BuildTool, + EntrypointInfo, + EntrypointResolutionAnalysis, + Failable, + ParsedPackageSpec, + Problem, + ProblemKind, + ResolutionKind, + ResolutionOption, +} from './types.ts'; + +export const allResolutionOptions: ResolutionOption[] = ['node10', 'node16', 'bundler']; +export const allResolutionKinds: ResolutionKind[] = ['node10', 'node16-cjs', 'node16-esm', 'bundler']; + +export function getResolutionOption(resolutionKind: ResolutionKind): ResolutionOption { + switch (resolutionKind) { + case 'node10': { + return 'node10'; + } + case 'node16-cjs': + case 'node16-esm': { + return 'node16'; + } + case 'bundler': { + return 'bundler'; + } + } +} + +export function getResolutionKinds(resolutionOption: ResolutionOption): ResolutionKind[] { + switch (resolutionOption) { + case 'node10': { + return ['node10']; + } + case 'node16': { + return ['node16-cjs', 'node16-esm']; + } + case 'bundler': { + return ['bundler']; + } + } +} + +export function isDefined(value: T | undefined): value is T { + return value !== undefined; +} + +export function resolvedThroughFallback(traces: string[]) { + let i = 0; + while (i < traces.length) { + i = traces.indexOf('Entering conditional exports.', i); + if (i === -1) { + return false; + } + if (conditionalExportsResolvedThroughFallback()) { + return true; + } + } + + function conditionalExportsResolvedThroughFallback(): boolean { + i++; + let seenFailure = false; + for (; i < traces.length; i++) { + if (traces[i]!.startsWith("Failed to resolve under condition '")) { + seenFailure = true; + } else if (seenFailure && traces[i]!.startsWith("Resolved under condition '")) { + return true; + } else if (traces[i] === 'Entering conditional exports.') { + if (conditionalExportsResolvedThroughFallback()) { + return true; + } + } else if (traces[i] === 'Exiting conditional exports.') { + return false; + } + } + return false; + } + + return; +} + +export function visitResolutions( + entrypoints: Record, + visitor: (analysis: EntrypointResolutionAnalysis, info: EntrypointInfo) => unknown, +) { + for (const entrypoint of Object.values(entrypoints)) { + for (const resolution of Object.values(entrypoint.resolutions)) { + if (visitor(resolution, entrypoint)) { + return; + } + } + } +} + +export function groupProblemsByKind( + problems: (Problem & { kind: K })[], +): Partial> { + const result: Partial> = {}; + for (const problem of problems) { + (result[problem.kind] ??= []).push(problem); + } + return result; +} + +export function parsePackageSpec(input: string): Failable { + let name; + let version; + let i = 0; + if (input.startsWith('@')) { + i = input.indexOf('/'); + if (i === -1 || i === 1) { + return { + status: 'error', + error: 'Invalid package name', + }; + } + i++; + } + i = input.indexOf('@', i); + if (i === -1) { + name = input; + } else { + name = input.slice(0, i); + version = input.slice(i + 1); + } + + if (validatePackgeName(name).errors) { + return { + status: 'error', + error: 'Invalid package name', + }; + } + if (!version) { + return { + status: 'success', + data: { versionKind: 'none', name, version: '' }, + }; + } + if (valid(version)) { + return { + status: 'success', + data: { versionKind: 'exact', name, version }, + }; + } + if (validRange(version)) { + return { + status: 'success', + data: { versionKind: 'range', name, version }, + }; + } + return { + status: 'success', + data: { versionKind: 'tag', name, version }, + }; +} + +export const allBuildTools = Object.keys( + { + '@arethetypeswrong/cli': true, + typescript: true, + rollup: true, + '@rollup/plugin-typescript': true, + '@rollup/plugin-typescript2': true, + webpack: true, + esbuild: true, + 'parcel-bundler': true, + '@preconstruct/cli': true, + vite: true, + snowpack: true, + microbundle: true, + '@microsoft/api-extractor': true, + tshy: true, + '@rspack/cli': true, + tsup: true, + tsdown: true, + } satisfies Record, +) as BuildTool[]; + +export { type ParsedPackageSpec } from './types.ts'; diff --git a/attw-fork/src/versions.ts b/attw-fork/src/versions.ts new file mode 100644 index 0000000000..274273963a --- /dev/null +++ b/attw-fork/src/versions.ts @@ -0,0 +1,13 @@ +import ts from 'typescript'; + +// @ts-ignore +// This file is only accessible from Node, but the rest of the package +// needs to run in the browser, so we don't have @types/node installed. +import { createRequire } from 'module'; + +const packageJson = createRequire(import.meta.url)('../package.json'); + +export const versions = { + core: packageJson.version, + typescript: ts.version, +}; diff --git a/attw-fork/tsconfig.json b/attw-fork/tsconfig.json new file mode 100644 index 0000000000..02a2e75906 --- /dev/null +++ b/attw-fork/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../tsconfig.json", + "compilerOptions": { + "baseUrl": ".", + "allowImportingTsExtensions": true, + "paths": { + "~/*": ["src/*"] + }, + "lib": ["WebWorker", "ESNext"], + "declaration": true, + "outDir": "dist", + "noEmit": true, + "types": ["ts-expose-internals", "node"] + }, + "include": ["src"] +} diff --git a/changelogs/drizzle-seed/0.4.0.md b/changelogs/drizzle-seed/0.4.0.md new file mode 100644 index 0000000000..4ee0e88c5e --- /dev/null +++ b/changelogs/drizzle-seed/0.4.0.md @@ -0,0 +1,295 @@ +## Features + +- ### Added support for MS SQL database + +```ts +import { int, mssqlTable, text } from "drizzle-orm/mssql-core"; +import { drizzle } from "drizzle-orm/node-mssql"; +import { seed } from "drizzle-seed"; + +const users = mssqlTable("users", { + id: integer().primaryKey(), + name: text().notNull(), +}); + +async function main() { + const db = drizzle(process.env.DATABASE_URL!); + await seed(db, { users }); +} + +main(); +``` + +- ### Added support for CockroachDB database + +```ts +import { cockroachTable, int4, text } from "drizzle-orm/cockroach-core"; +import { drizzle } from "drizzle-orm/cockroach"; +import { seed } from "drizzle-seed"; + +const users = cockroachTable("users", { + id: int4().primaryKey(), + name: text().notNull(), +}); + +async function main() { + const db = drizzle(process.env.DATABASE_URL!); + await seed(db, { users }); +} + +main(); +``` + +- ### Added support for SingleStore database + +```ts +import { int, singlestoreTable, text } from "drizzle-orm/singlestore-core"; +import { drizzle } from "drizzle-orm/singlestore"; +import { seed } from "drizzle-seed"; + +const users = singlestoreTable("users", { + id: int().primaryKey(), + name: text().notNull(), +}); + +async function main() { + const db = drizzle(process.env.DATABASE_URL!); + await seed(db, { users }); +} + +main(); +``` + +- ### Added new generator `bitString` for CockroachDB and PostgreSQL `bit` type. + +#### Generates bit strings based on specified parameters: + +- param `isUnique` - property that controls if generated values gonna be unique or not; + +- param `arraySize` - number of elements in each one-dimensional array (If specified, arrays will be generated); + +- param `dimensions` - desired length of each bit string (e.g., `dimensions = 3` produces values like `'010'`). + + Defaults to the value of the database column bit-length + +```ts +await seed(db, { bitStringTable: schema.bitStringTable }).refine((funcs) => ({ + bitStringTable: { + count, + columns: { + bit: funcs.bitString({ + dimensions: 12, + isUnique: true, + }), + }, + }, +})); +``` + +- ### Added new generator `inet` for CockroachDB and PostgreSQL `inet` type. + +#### Generates ip addresses based on specified parameters: + +- param `isUnique` - property that controls if generated values gonna be unique or not; + +- param `arraySize` - number of elements in each one-dimensional array (If specified, arrays will be generated); + +- param `ipAddress` - type of IP address to generate — either "ipv4" or "ipv6"; + + Defaults to `'ipv4'` + +- param `includeCidr` - determines whether generated IPs include a CIDR suffix. + + Defaults to `true` + +```ts +await seed(db, { inetTable: schema.inetTable }).refine((funcs) => ({ + inetTable: { + count, + columns: { + inet: funcs.inet({ + ipAddress: "ipv4", + includeCidr: true, + isUnique: true, + }), + }, + }, +})); +``` + +- ### Added new generator `geometry` for CockroachDB and PostgreSQL `geometry` type. + +#### Generates PostGIS geometry objects based on the given parameters: + +- param `isUnique` - property that controls if generated values gonna be unique or not; + +- param `arraySize` - number of elements in each one-dimensional array (If specified, arrays will be generated); + +- param `type` - geometry type to generate; currently only `'point'` is supported; + + Defaults to `'point'` + +- param `srid` - Spatial Reference System Identifier: determines what type of point will be generated - either `4326` or `3857`; + + Defaults to `4326` + +- param `decimalPlaces` - number of decimal places for points when `srid` is `4326` (e.g., `decimalPlaces = 3` produces values like `'point(30.723 46.482)'`). + + Defaults to `6` + +```ts +await seed(db, { geometryTable: schema.geometryTable }).refine((funcs) => ({ + geometryTable: { + count, + columns: { + geometryPointTuple: funcs.geometry({ + type: "point", + srid: 4326, + decimalPlaces: 5, + isUnique: true, + }), + }, + }, +})); +``` + +- ### Added new generator `vector` for CockroachDB, PostgreSQL and SingleStore `vector` type. + +#### Generates vectors based on the provided parameters: + +- param `isUnique` - property that controls if generated values gonna be unique or not; + +- param `arraySize` - number of elements in each one-dimensional array (If specified, arrays will be generated); + +- param `decimalPlaces` - number of decimal places for each vector element (e.g., `decimalPlaces = 3` produces values like `1.123`); + + Defaults to `2` + +- param `dimensions` - number of elements in each generated vector (e.g., `dimensions = 3` produces values like `[1,2,3]`); + + Defaults to the value of the database column's dimensions + +- param `minValue` - minimum allowed value for each vector element; + + Defaults to `-1000` + +- param `maxValue` - maximum allowed value for each vector element. + + Defaults to `1000` + +```ts +await seed(db, { vectorTable: schema.vectorTable }).refine((funcs) => ({ + vectorTable: { + count, + columns: { + vector: funcs.vector({ + decimalPlaces: 5, + dimensions: 12, + minValue: -100, + maxValue: 100, + isUnique: true, + }), + }, + }, +})); +``` + +- ### Added support for composite unique constraints + + #### Example: + + Table with a composite unique constraint: + + ```ts + import { integer, pgTable, text, unique } from "drizzle-orm/pg-core"; + + const composite = pgTable( + "composite_example", + { + id: integer("id").notNull(), + name: text("name").notNull(), + }, + (t) => [unique("custom_name").on(t.id, t.name)] + ); + ``` + + Seeding script: + + ```ts + await seed(db, { composite: composite }, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ["a", "b", "c", "d"] }), + }, + }, + })); + ``` + + #### Limitations: + + - Seeding is not supported when two composite unique constraints share a column: + ```ts + const composite = pgTable( + "composite_example", + { + id: integer("id").notNull(), + name: text("name").notNull(), + slug: text("slug").notNull(), + }, + (t) => [ + unique("custom_name").on(t.id, t.name), + unique("custom_name1").on(t.name, t.slug), + ] + ); + ``` + This is allowed, however, if one of the constraints is a single-column unique constraint: + ```ts + unique("custom_name1").on(t.name); + ``` + - You can’t use a generator that doesn’t expose an `isUnique` option in its config, unless it’s one of the always-unique generators: `intPrimaryKey`, `email`, `phoneNumber`, or `uuid`. + +## Bug Fixes + +- fixed type error in `seed` and `reset` functions when using a drizzle db instance that was created with a schema in `DrizzleConfig`. + + https://github.com/drizzle-team/drizzle-orm/issues/4435 + +## Breaking changes + +### Hash generating function was changed and upgraded to v3 + +```ts +await seed(db, { table }).refine((f) => ({ + table: { + columns: { + // all generators will output different values compared to the previous version, even with the same seed number. + column1: f.interval({ isUnique: true }), + } + } +})) +``` + +**Reason for upgrade** + +The previous version of the hash generating function generated different hashes depending on whether Bun or Node.js was used, and hashes also varied across versions of Node.js. + +The new hash generating function will generate the same hash regardless of the version of Node.js or Bun, resulting in deterministic data generation across all versions. + +**Usage** +```ts +await seed(db, schema); +// or explicit +await seed(db, schema, { version: '3' }); +``` + +**Switch to the old version** + +The previous version of hash generating function is v1. +```ts +await seed(db, schema, { version: '1' }); +``` +To use the v2 generators while maintaining the v1 hash generating function: +```ts +await seed(db, schema, { version: '2' }); +``` diff --git a/clean.ts b/clean.ts new file mode 100644 index 0000000000..8a05413fd7 --- /dev/null +++ b/clean.ts @@ -0,0 +1,33 @@ +// import { existsSync, lstatSync, readdirSync, rmSync } from 'node:fs'; +// import { join } from 'node:path'; + +// const printTree = (path: string, indentation: number) => { +// for (const it of readdirSync(path)) { +// if (it === 'node_modules') continue; +// if (it === '.git') continue; +// if (it === '.github') continue; +// if (it === '.turbo') continue; +// if (it === 'dist') continue; + +// const full = join(path, it); +// const stat = existsSync(full) ? lstatSync(full) : undefined; +// if (!stat) continue; + +// if (stat.isDirectory()) { +// printTree(full, indentation + 1); +// } else { +// if ( +// full.endsWith('.js') +// && existsSync(full.replace('.js', '.js.map')) +// && existsSync(full.replace('.js', '.ts')) +// ) { +// console.log(full); +// rmSync(full); +// rmSync(full.replace('.js', '.js.map')); +// } +// } +// } +// }; + +// I've accidentally ran tsc which generated .d.ts files for all ts files in repo +// printTree("."); diff --git a/compose/cockroach.yml b/compose/cockroach.yml new file mode 100644 index 0000000000..527ceae919 --- /dev/null +++ b/compose/cockroach.yml @@ -0,0 +1,12 @@ +services: + cockroach: + image: cockroachdb/cockroach:latest + command: start-single-node --insecure --store=type=mem,size=2GiB + ports: + - "26257:26257" + - "8080:8080" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 26257"] + interval: 2s + timeout: 3s + retries: 60 diff --git a/compose/dockers.sh b/compose/dockers.sh new file mode 100644 index 0000000000..01650ecf92 --- /dev/null +++ b/compose/dockers.sh @@ -0,0 +1,32 @@ +docker run -d -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=postgres -p 5432:5432 postgres:17-alpine +docker run -d -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=drizzle -e TZ=UTC -p 54322:5432 postgis/postgis:16-3.4 +docker run -d -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=drizzle -p 54321:5432 pgvector/pgvector:pg16 +docker run -it -d -p 3306:3306 -e MYSQL_ROOT_PASSWORD=mysql -e MYSQL_DATABASE=drizzle mysql:8 +docker run -it -d -p 26257:26257 cockroachdb/cockroach:v25.2.0 start-single-node --insecure --store=type=mem,size=1GiB + +docker run -it -d -p 1433:1433 \ + -e 'ACCEPT_EULA=1' \ + -e 'MSSQL_SA_PASSWORD=drizzle123PASSWORD!' \ + mcr.microsoft.com/azure-sql-edge + + docker run -d --name gel -p 56565:5656 \ + -e GEL_CLIENT_SECURITY=insecure_dev_mode \ + -e GEL_SERVER_SECURITY=insecure_dev_mode \ + -e GEL_CLIENT geldata/gel:latest + +docker run -d --name singlestore -p 33307:3306 \ + -e ROOT_PASSWORD=singlestore \ + -e TZ=UTC \ + --health-cmd="bash -lc 'nc -z 127.0.0.1 3306'" \ + --health-interval=2s \ + --health-timeout=3s \ + --health-retries=60 \ + ghcr.io/singlestore-labs/singlestoredb-dev:latest + +# macos +docker run -d --name singlestoredb-dev \ + -e ROOT_PASSWORD="password" \ + --platform linux/amd64 \ + -p 3306:3306 -p 8080:8080 -p 9000:9000 \ + ghcr.io/singlestore-labs/singlestoredb-dev:latest +# if the command above doesn't work for you on mac m1, try using version 0.2.57 of docker image. \ No newline at end of file diff --git a/compose/gel.yml b/compose/gel.yml new file mode 100644 index 0000000000..8511c545a2 --- /dev/null +++ b/compose/gel.yml @@ -0,0 +1,10 @@ +services: + gel: + image: geldata/gel:latest + environment: + GEL_CLIENT_SECURITY: insecure_dev_mode + GEL_SERVER_SECURITY: insecure_dev_mode + GEL_CLIENT_TLS_SECURITY: no_host_verification + GEL_SERVER_PASSWORD: password + ports: + - "56565:5656" diff --git a/compose/lint.sh b/compose/lint.sh new file mode 100644 index 0000000000..32b6dd0e44 --- /dev/null +++ b/compose/lint.sh @@ -0,0 +1,58 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Always run from repo root +PROJECT_ROOT="$(git rev-parse --show-toplevel)" +cd "$PROJECT_ROOT" + +# Name of the pnpm store volume used as cache between runs +PNPM_STORE_VOLUME="pnpm-store" +PNPM_COREPACK_CACHE_VOLUME="pnpm-corepack-cache" +APK_CACHE_VOLUME="apk-cache" + +docker run --rm \ + -e CI=1 \ + -v "$PROJECT_ROOT":/src \ + -v "${PNPM_STORE_VOLUME}":/pnpm/store \ + -v "${PNPM_COREPACK_CACHE_VOLUME}":/root/.cache \ + -v "${APK_CACHE_VOLUME}":/var/cache/apk \ + node:24-alpine \ + sh -lc ' + set -euo pipefail + + apk add --no-cache git >/dev/null + + APP_DIR="$(mktemp -d)" + + tar \ + --exclude="node_modules" \ + --exclude="*/node_modules" \ + --exclude=".turbo" \ + --exclude=".git" \ + --exclude=".pnpm-store" \ + --exclude="dist" \ + --exclude="*/dist" \ + --exclude="coverage" \ + -C /src \ + -cf - . \ + | tar -C "$APP_DIR" -xf - + + rm -rf "$APP_DIR/.git" + ln -s /src/.git "$APP_DIR/.git" + + cd "$APP_DIR" + + export PNPM_HOME=/pnpm + export PNPM_STORE_DIR=/pnpm/store + export PATH="$PNPM_HOME:$PATH" + + corepack enable pnpm + + pnpm install \ + --frozen-lockfile \ + --prefer-offline \ + --ignore-scripts \ + --filter . + + pnpm lint-staged + ' diff --git a/compose/mssql.yml b/compose/mssql.yml new file mode 100644 index 0000000000..45e49ade41 --- /dev/null +++ b/compose/mssql.yml @@ -0,0 +1,14 @@ +services: + mssql: + image: mcr.microsoft.com/azure-sql-edge:latest + environment: + ACCEPT_EULA: "1" + MSSQL_SA_PASSWORD: "drizzle123PASSWORD!" + TZ: UTC + ports: + - "1433:1433" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 1433"] + interval: 2s + timeout: 3s + retries: 60 diff --git a/compose/mysql.yml b/compose/mysql.yml new file mode 100644 index 0000000000..8ba9c8d49c --- /dev/null +++ b/compose/mysql.yml @@ -0,0 +1,15 @@ +services: + mysql: + image: mysql:8 + environment: + MYSQL_ROOT_PASSWORD: mysql + MYSQL_DATABASE: drizzle + TZ: UTC + ports: + - "3306:3306" + healthcheck: + test: ["CMD-SHELL", "mysqladmin ping -h 127.0.0.1 -proot --silent"] + interval: 2s + timeout: 3s + retries: 40 + diff --git a/compose/postgres-postgis.yml b/compose/postgres-postgis.yml new file mode 100644 index 0000000000..c1278309e5 --- /dev/null +++ b/compose/postgres-postgis.yml @@ -0,0 +1,15 @@ +services: + postgres-postgis: + image: postgis/postgis:16-3.4 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: drizzle + TZ: UTC + ports: + - "54322:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d drizzle"] + interval: 2s + timeout: 3s + retries: 30 diff --git a/compose/postgres-vector.yml b/compose/postgres-vector.yml new file mode 100644 index 0000000000..57d05cefe6 --- /dev/null +++ b/compose/postgres-vector.yml @@ -0,0 +1,15 @@ +services: + postgres-vector: + image: pgvector/pgvector:pg16 + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: drizzle + TZ: UTC + ports: + - "54321:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d drizzle"] + interval: 2s + timeout: 3s + retries: 30 \ No newline at end of file diff --git a/compose/postgres.yml b/compose/postgres.yml new file mode 100644 index 0000000000..927453795c --- /dev/null +++ b/compose/postgres.yml @@ -0,0 +1,15 @@ +services: + postgres: + image: postgres:17-alpine # change to 18-alpine when ready + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: drizzle + TZ: UTC + ports: + - "55433:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres -d drizzle"] + interval: 2s + timeout: 3s + retries: 30 diff --git a/compose/singlestore-many.yml b/compose/singlestore-many.yml new file mode 100644 index 0000000000..6e022acfc9 --- /dev/null +++ b/compose/singlestore-many.yml @@ -0,0 +1,52 @@ +services: + singlestore0: + image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 + environment: + ROOT_PASSWORD: singlestore + TZ: UTC + ports: + - "3308:3306" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 3306"] + interval: 2s + timeout: 3s + retries: 60 + + singlestore1: + image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 + environment: + ROOT_PASSWORD: singlestore + TZ: UTC + ports: + - "3309:3306" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 3306"] + interval: 2s + timeout: 3s + retries: 60 + + singlestore2: + image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 + environment: + ROOT_PASSWORD: singlestore + TZ: UTC + ports: + - "3310:3306" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 3306"] + interval: 2s + timeout: 3s + retries: 60 + + singlestore3: + image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 + environment: + ROOT_PASSWORD: singlestore + TZ: UTC + ports: + - "3311:3306" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 3306"] + interval: 2s + timeout: 3s + retries: 60 diff --git a/compose/singlestore.yml b/compose/singlestore.yml new file mode 100644 index 0000000000..ecd4a3c815 --- /dev/null +++ b/compose/singlestore.yml @@ -0,0 +1,13 @@ +services: + singlestore: + image: ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67 + environment: + ROOT_PASSWORD: singlestore + TZ: UTC + ports: + - "33307:3306" + healthcheck: + test: ["CMD", "bash", "-lc", "nc -z 127.0.0.1 3306"] + interval: 2s + timeout: 3s + retries: 60 diff --git a/compose/wait.sh b/compose/wait.sh new file mode 100644 index 0000000000..31349c5815 --- /dev/null +++ b/compose/wait.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +set -euo pipefail + +wait_tcp() { + host="$1"; port="$2"; name="$3"; tries="${4:-120}" + for i in $(seq 1 "$tries"); do + if nc -z "$host" "$port" >/dev/null 2>&1; then + echo "$name is up on $host:$port" + return 0 + fi + sleep 1 + done + echo "Timed out waiting for $name on $host:$port" >&2 + return 1 +} + +for db in "$@"; do + case "$db" in + postgres) wait_tcp 127.0.0.1 55433 "postgres" ;; + postgres-postgis) wait_tcp 127.0.0.1 54322 "postgres" ;; + mysql) wait_tcp 127.0.0.1 3306 "mysql" ;; + singlestore) wait_tcp 127.0.0.1 33307 "singlestore" ;; + singlestore-many) + # loop through 5 ports (33307–33311) + for i in $(seq 3308 3311); do + wait_tcp 127.0.0.1 "$i" "singlestore-$((i-3308))" + done + ;; + mssql) wait_tcp 127.0.0.1 1433 "mssql" ;; + cockroach) wait_tcp 127.0.0.1 26257 "cockroach" ;; + neon) wait_tcp 127.0.0.1 5446 "neon-serverless" ;; + *) echo "Unknown db '$db'";; + esac +done diff --git a/dprint.json b/dprint.json index 17cf15ca33..45652bc0f2 100644 --- a/dprint.json +++ b/dprint.json @@ -27,8 +27,8 @@ "integration-tests/tests/prisma/*/drizzle" ], "plugins": [ - "https://plugins.dprint.dev/typescript-0.91.1.wasm", - "https://plugins.dprint.dev/json-0.19.3.wasm", - "https://plugins.dprint.dev/markdown-0.17.1.wasm" + "https://unpkg.com/@dprint/typescript@0.91.1/plugin.wasm", + "https://unpkg.com/@dprint/json@0.19.3/plugin.wasm", + "https://unpkg.com/@dprint/markdown@0.17.1/plugin.wasm" ] } diff --git a/drizzle-arktype/package.json b/drizzle-arktype/package.json index 196aec1d7e..69c39b3f29 100644 --- a/drizzle-arktype/package.json +++ b/drizzle-arktype/package.json @@ -1,13 +1,15 @@ { "name": "drizzle-arktype", - "version": "0.1.3", + "version": "1.0.0-beta.2", "description": "Generate arktype schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", + "build:artifact": "pnpm run build", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz", "test": "vitest run", "bench:types": "tsx ./benchmarks/types.ts" @@ -57,7 +59,7 @@ "license": "Apache-2.0", "peerDependencies": { "arktype": ">=2.0.0", - "drizzle-orm": ">=1.0.0-beta.1" + "drizzle-orm": ">=1.0.0-beta.2" }, "devDependencies": { "@ark/attest": "^0.45.8", @@ -70,8 +72,6 @@ "rimraf": "^5.0.0", "rollup": "^3.29.5", "tsx": "^4.19.3", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "zx": "^7.2.2" } } diff --git a/drizzle-arktype/src/column.ts b/drizzle-arktype/src/column.ts index 45d5163d46..0a276db65e 100644 --- a/drizzle-arktype/src/column.ts +++ b/drizzle-arktype/src/column.ts @@ -14,7 +14,7 @@ import { CONSTANTS } from './constants.ts'; export const literalSchema = type.string.or(type.number).or(type.boolean).or(type.null); export const jsonSchema = literalSchema.or(type.unknown.as().array()).or(type.object.as>()); -export const bufferSchema = type.unknown.narrow((value) => value instanceof Buffer).as().describe( // eslint-disable-line no-instanceof/no-instanceof +export const bufferSchema = type.unknown.narrow((value) => value instanceof Buffer).as().describe( 'a Buffer instance', ); @@ -151,6 +151,11 @@ function numberColumnToSchema(column: Column, constraint: ColumnDataNumberConstr integer = true; break; } + case 'unsigned': { + min = 0; + max = Number.MAX_SAFE_INTEGER; + break; + } default: { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; @@ -178,6 +183,11 @@ function arrayColumnToSchema( const length = column.length; return length ? type.number.array().exactlyLength(length) : type.number.array(); } + case 'int64vector': { + const length = column.length; + // TODO - INT64 number range + return length ? type.bigint.array().exactlyLength(length) : type.bigint.array(); + } case 'basecolumn': { const length = column.length; const schema = (<{ baseColumn?: Column }> column).baseColumn @@ -234,6 +244,46 @@ export const unsignedBigintNarrow = (v: bigint, ctx: { mustBe: (expected: string export const bigintNarrow = (v: bigint, ctx: { mustBe: (expected: string) => false }) => v < CONSTANTS.INT64_MIN ? ctx.mustBe('greater than') : v > CONSTANTS.INT64_MAX ? ctx.mustBe('less than') : true; +/** @internal */ +export const bigintStringModeSchema = type.string.narrow((v, ctx) => { + if (typeof v !== 'string') { + return ctx.mustBe('a string'); + } + if (!(/^-?\d+$/.test(v))) { + return ctx.mustBe('a string representing a number'); + } + + const bigint = BigInt(v); + if (bigint < CONSTANTS.INT64_MIN) { + return ctx.mustBe('greater than'); + } + if (bigint > CONSTANTS.INT64_MAX) { + return ctx.mustBe('less than'); + } + + return true; +}); + +/** @internal */ +export const unsignedBigintStringModeSchema = type.string.narrow((v, ctx) => { + if (typeof v !== 'string') { + return ctx.mustBe('a string'); + } + if (!(/^\d+$/.test(v))) { + return ctx.mustBe('a string representing a number'); + } + + const bigint = BigInt(v); + if (bigint < 0) { + return ctx.mustBe('greater than'); + } + if (bigint > CONSTANTS.INT64_MAX) { + return ctx.mustBe('less than'); + } + + return true; +}); + function bigintColumnToSchema(column: Column, constraint?: ColumnDataBigIntConstraint | undefined): Type { switch (constraint) { case 'int64': { @@ -269,6 +319,12 @@ function stringColumnToSchema(column: Column, constraint: ColumnDataStringConstr } return type.enumerated(...enumValues); } + if (constraint === 'int64') { + return bigintStringModeSchema; + } + if (constraint === 'uint64') { + return unsignedBigintStringModeSchema; + } return length && isLengthExact ? type.string.exactlyLength(length) diff --git a/drizzle-arktype/src/schema.ts b/drizzle-arktype/src/schema.ts index 90f4dbd0cf..62c33f7938 100644 --- a/drizzle-arktype/src/schema.ts +++ b/drizzle-arktype/src/schema.ts @@ -79,7 +79,9 @@ export const createInsertSchema = (( ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }) as any; @@ -91,7 +93,9 @@ export const createUpdateSchema = (( ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: () => true, nullable: (column) => !column.notNull, }) as any; diff --git a/drizzle-arktype/src/schema.types.internal.ts b/drizzle-arktype/src/schema.types.internal.ts index fa77b61d33..aee69e0aa2 100644 --- a/drizzle-arktype/src/schema.types.internal.ts +++ b/drizzle-arktype/src/schema.types.internal.ts @@ -47,17 +47,20 @@ export type BuildSchema< > = type.instantiate< Simplify< { - readonly [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: - TColumns[K] extends infer TColumn extends Column - ? IsRefinementDefined extends true - ? HandleRefinement - : HandleColumn - : TColumns[K] extends infer TNested extends SelectedFieldsFlat | Table | View ? BuildSchema< - TType, - GetSelection, - TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined - > - : any; + readonly [ + K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? TType extends 'select' ? K + : never + : K + ]: TColumns[K] extends infer TColumn extends Column + ? IsRefinementDefined extends true + ? HandleRefinement + : HandleColumn + : TColumns[K] extends infer TNested extends SelectedFieldsFlat | Table | View ? BuildSchema< + TType, + GetSelection, + TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined + > + : any; } > >; diff --git a/drizzle-arktype/src/schema.types.ts b/drizzle-arktype/src/schema.types.ts index d5ac656a44..123a331286 100644 --- a/drizzle-arktype/src/schema.types.ts +++ b/drizzle-arktype/src/schema.types.ts @@ -1,5 +1,6 @@ import type { Type } from 'arktype'; import type { InferInsertModel, InferSelectModel, Table, View } from 'drizzle-orm'; +import type { CockroachEnum } from 'drizzle-orm/cockroach-core'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; @@ -22,7 +23,7 @@ export interface CreateSelectSchema { refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; - >(enum_: TEnum): Type; + | CockroachEnum>(enum_: TEnum): Type; } export interface CreateInsertSchema { diff --git a/drizzle-arktype/tests/cockroach.test.ts b/drizzle-arktype/tests/cockroach.test.ts new file mode 100644 index 0000000000..7c4e524590 --- /dev/null +++ b/drizzle-arktype/tests/cockroach.test.ts @@ -0,0 +1,554 @@ +import { type Type, type } from 'arktype'; +import { type Equal, sql } from 'drizzle-orm'; +import { + cockroachEnum, + cockroachMaterializedView, + cockroachSchema, + cockroachTable, + cockroachView, + customType, + int4, + jsonb, + text, +} from 'drizzle-orm/cockroach-core'; +import type { TopLevelCondition } from 'json-rules-engine'; +import { test } from 'vitest'; +import { bigintNarrow, jsonSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; + +const int4Schema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); +const int4NullableSchema = int4Schema.or(type.null); +const int4OptionalSchema = int4Schema.optional(); +const int4NullableOptionalSchema = int4Schema.or(type.null).optional(); + +const textSchema = type.string; +const textOptionalSchema = textSchema.optional(); + +const anySchema = type('unknown.any'); + +const extendedSchema = int4Schema.atMost(1000); +const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); + +test('table - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + generated: int4().generatedAlwaysAsIdentity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = type({ id: int4Schema, generated: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = cockroachSchema('test'); + const table = schema.table('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = type({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createInsertSchema(table); + const expected = type({ name: textSchema, age: int4NullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createUpdateSchema(table); + const expected = type({ + name: textOptionalSchema, + age: int4NullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = type({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = cockroachView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = type({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachMaterializedView('test').as((qb) => + qb.select({ id: table.id, age: sql``.as('age') }).from(table) + ); + + const result = createSelectSchema(view); + const expected = type({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view columns - select', (t) => { + const view = cockroachMaterializedView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = type({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = type({ + id: int4Schema, + nested: type({ name: textSchema, age: anySchema }), + table: type({ id: int4Schema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('enum - select', (t) => { + const enum_ = cockroachEnum('test', ['a', 'b', 'c']); + + const result = createSelectSchema(enum_); + const expected = type.enumerated('a', 'b', 'c'); + expectEnumValues(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = type({ + c1: int4NullableSchema, + c2: int4Schema, + c3: int4NullableSchema, + c4: int4Schema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createInsertSchema(table); + const expected = type({ + c1: int4NullableOptionalSchema, + c2: int4Schema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createUpdateSchema(table); + const expected = type({ + c1: int4NullableOptionalSchema, + c2: int4OptionalSchema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: customText(), + }); + + const customTextSchema = type.string.atLeastLength(1).atMostLength(100); + const result = createSelectSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + c4: customTextSchema, + }); + const expected = type({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: int4NullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: int4NullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4(), + c3: int4(), + c4: int4(), + c5: int4(), + c6: int4(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + nested: { + c5: (schema) => schema.atMost(1000), + c6: type.string.pipe(Number), + }, + table: { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }, + }); + const expected = type({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: type({ + c4: int4NullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: type({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: int4NullableSchema, + c5: int4NullableSchema, + c6: int4NullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = cockroachTable('test', ({ + bigint, + bit, + bool, + char, + date, + decimal, + float, + doublePrecision, + geometry, + inet, + int2, + int4, + int8, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varchar, + vector, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bit: bit({ length: 5 }).notNull(), + boolean: bool().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + doublePrecision: doublePrecision().notNull(), + geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), + geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), + inet: inet().notNull(), + int2: int2().notNull(), + int4: int4().notNull(), + int8_1: int8({ mode: 'number' }).notNull(), + int8_2: int8({ mode: 'bigint' }).notNull(), + interval: interval().notNull(), + jsonb: jsonb().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + string1: string().notNull(), + string2: string({ enum: ['a', 'b', 'c'] }).notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time: time().notNull(), + timestamp1: timestamp({ mode: 'date' }).notNull(), + timestamp2: timestamp({ mode: 'string' }).notNull(), + uuid: uuid().notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ dimensions: 3 }).notNull(), + array: int4().array().notNull(), + })); + + const result = createSelectSchema(table); + const expected = type({ + bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + bigint2: type.bigint.narrow(bigintNarrow), + bit: type(/^[01]{5}$/).describe('a string containing ones or zeros while being 5 characters long'), + boolean: type.boolean, + char1: type.string.atMostLength(10), + char2: type.enumerated('a', 'b', 'c'), + date1: type.Date, + date2: type.string, + decimal1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + decimal2: type.bigint.narrow(bigintNarrow), + decimal3: type.string, + float: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), + doublePrecision: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), + geometry1: type([type.number, type.number]), + geometry2: type({ x: type.number, y: type.number }), + inet: type.string, + int2: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), + int4: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), + int8_1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + int8_2: type.bigint.narrow(bigintNarrow), + interval: type.string, + jsonb: jsonSchema, + numeric1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + numeric2: type.bigint.narrow(bigintNarrow), + numeric3: type.string, + real: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), + smallint: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), + string1: type.string, + string2: type.enumerated('a', 'b', 'c'), + text1: type.string, + text2: type.enumerated('a', 'b', 'c'), + time: type.string, + timestamp1: type.Date, + timestamp2: type.string, + uuid: type(/^[\da-f]{8}(?:-[\da-f]{4}){3}-[\da-f]{12}$/iu).describe('a RFC-4122-compliant UUID'), + varchar1: type.string.atMostLength(10), + varchar2: type.enumerated('a', 'b', 'c'), + vector: type.number.array().exactlyLength(3), + array: int4Schema.array(), + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +/* Infinitely recursive type */ { + const TopLevelCondition: Type = type('unknown.any') as any; + const table = cockroachTable('test', { + jsonb: jsonb().$type(), + }); + const result = createSelectSchema(table); + const expected = type({ + jsonb: TopLevelCondition.or(type.null), + }); + Expect, type.infer>>(); +} + +/* Disallow unknown keys in table refinement - select */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createSelectSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createInsertSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = cockroachTable('test', { id: int4() }); + const view = cockroachView('test').as((qb) => qb.select().from(table)); + const mView = cockroachMaterializedView('test').as((qb) => qb.select().from(table)); + const nestedSelect = cockroachView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: type.string }); + // @ts-expect-error + createSelectSchema(mView, { unknown: type.string }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: type.string } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = cockroachView('test', { id: int4() }).as(sql``); + const mView = cockroachView('test', { id: int4() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: type.string }); + // @ts-expect-error + createSelectSchema(mView, { unknown: type.string }); +} diff --git a/drizzle-arktype/tests/mssql.test.ts b/drizzle-arktype/tests/mssql.test.ts new file mode 100644 index 0000000000..56b0907d49 --- /dev/null +++ b/drizzle-arktype/tests/mssql.test.ts @@ -0,0 +1,493 @@ +import { type } from 'arktype'; +import { type Equal, sql } from 'drizzle-orm'; +import { customType, int, mssqlSchema, mssqlTable, mssqlView, text } from 'drizzle-orm/mssql-core'; +import { test } from 'vitest'; +import { bigintNarrow, bigintStringModeSchema, bufferSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src/index.ts'; +import { Expect, expectSchemaShape } from './utils.ts'; + +const integerSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); +const integerNullableSchema = integerSchema.or(type.null); +const integerOptionalSchema = integerSchema.optional(); +const integerNullableOptionalSchema = integerSchema.or(type.null).optional(); + +const textSchema = type.string; +const textOptionalSchema = textSchema.optional(); + +const anySchema = type('unknown.any'); + +const extendedSchema = integerSchema.atMost(1000); +const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); + +test('table - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + generated: int().identity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = type({ id: integerSchema, generated: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = mssqlSchema('test'); + const table = schema.table('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = type({ id: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createInsertSchema(table); + const expected = type({ name: textSchema, age: integerNullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createUpdateSchema(table); + const expected = type({ + name: textOptionalSchema, + age: integerNullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = type({ id: integerSchema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = mssqlView('test', { + id: int().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = type({ id: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = type({ + id: integerSchema, + nested: type({ name: textSchema, age: anySchema }), + table: type({ id: integerSchema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = type({ + c1: integerNullableSchema, + c2: integerSchema, + c3: integerNullableSchema, + c4: integerSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createInsertSchema(table); + const expected = type({ + c1: integerNullableOptionalSchema, + c2: integerSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createUpdateSchema(table); + const expected = type({ + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: customText(), + }); + + const customTextSchema = type.string.atLeastLength(1).atMostLength(100); + const result = createSelectSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + c4: customTextSchema, + }); + const expected = type({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }); + const expected = type({ + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int(), + c3: int(), + c4: int(), + c5: int(), + c6: int(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + nested: { + c5: (schema) => schema.atMost(1000), + c6: type.string.pipe(Number), + }, + table: { + c2: (schema) => schema.atMost(1000), + c3: type.string.pipe(Number), + }, + }); + const expected = type({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: type({ + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: type({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = mssqlTable('test', ({ + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + numeric, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, + ntext, + nvarchar, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bigint3: bigint({ mode: 'string' }).notNull(), + binary: binary({ length: 10 }).notNull(), + bit: bit().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + datetime1: datetime({ mode: 'date' }).notNull(), + datetime2: datetime({ mode: 'string' }).notNull(), + datetime2_1: datetime2({ mode: 'date' }).notNull(), + datetime2_2: datetime2({ mode: 'string' }).notNull(), + datetimeoffset1: datetimeoffset({ mode: 'date' }).notNull(), + datetimeoffset2: datetimeoffset({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + int: int().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time1: time({ mode: 'date' }).notNull(), + time2: time({ mode: 'string' }).notNull(), + tinyint: tinyint().notNull(), + varbinary: varbinary({ length: 10 }).notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + ntext1: ntext().notNull(), + ntext2: ntext({ enum: ['a', 'b', 'c'] }).notNull(), + nvarchar1: nvarchar({ length: 10 }).notNull(), + nvarchar2: nvarchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + })); + + const result = createSelectSchema(table); + const expected = type({ + bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + bigint2: type.bigint.narrow(bigintNarrow), + bigint3: bigintStringModeSchema, + binary: bufferSchema, + bit: type.boolean, + char1: type.string.atMostLength(10), + char2: type.enumerated('a', 'b', 'c'), + date1: type.Date, + date2: type.string, + datetime1: type.Date, + datetime2: type.string, + datetime2_1: type.Date, + datetime2_2: type.string, + datetimeoffset1: type.Date, + datetimeoffset2: type.string, + decimal1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + decimal2: type.bigint.narrow(bigintNarrow), + decimal3: type.string, + float: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), + int: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), + numeric1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + numeric2: type.bigint.narrow(bigintNarrow), + numeric3: type.string, + real: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), + smallint: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), + text1: type.string, + text2: type.enumerated('a', 'b', 'c'), + time1: type.Date, + time2: type.string, + tinyint: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT8_UNSIGNED_MAX), + varbinary: bufferSchema, + varchar1: type.string.atMostLength(10), + varchar2: type.enumerated('a', 'b', 'c'), + ntext1: type.string, + ntext2: type.enumerated('a', 'b', 'c'), + nvarchar1: type.string.atMostLength(10), + nvarchar2: type.enumerated('a', 'b', 'c'), + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +// MSSQL doesn't support JSON data type +// /* Infinitely recursive type */ { +// const TopLevelCondition: Type = type('unknown.any') as any; +// const table = mssqlTable('test', { +// json: json().$type(), +// }); +// const result = createSelectSchema(table); +// const expected = type({ +// json: TopLevelCondition.or(type.null), +// }); +// Expect, type.infer>>(); +// } + +/* Disallow unknown keys in table refinement - select */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createSelectSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createInsertSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: type.string }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = mssqlTable('test', { id: int() }); + const view = mssqlView('test').as((qb) => qb.select().from(table)); + const nestedSelect = mssqlView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: type.string }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: type.string } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = mssqlView('test', { id: int() }).as(sql``); + const mView = mssqlView('test', { id: int() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: type.string }); + // @ts-expect-error + createSelectSchema(mView, { unknown: type.string }); +} diff --git a/drizzle-arktype/tests/mysql.test.ts b/drizzle-arktype/tests/mysql.test.ts index b3bb6c7a5a..b0cb1b7db6 100644 --- a/drizzle-arktype/tests/mysql.test.ts +++ b/drizzle-arktype/tests/mysql.test.ts @@ -3,14 +3,35 @@ import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts'; +import { + bigintNarrow, + bigintStringModeSchema, + jsonSchema, + unsignedBigintNarrow, + unsignedBigintStringModeSchema, +} from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); -const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.or(type.null); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intSchema.or(type.null).optional(); + +const serialSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); +const serialOptionalSchema = serialSchema.optional(); + const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX); +const textOptionalSchema = textSchema.optional(); + +const anySchema = type('unknown.any'); + +const extendedSchema = intSchema.atMost(1000); +const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); test('table - select', (t) => { const table = mysqlTable('test', { @@ -19,7 +40,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = type({ id: serialNumberModeSchema, name: textSchema }); + const expected = type({ id: serialSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -32,7 +53,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = type({ id: serialNumberModeSchema, name: textSchema }); + const expected = type({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -46,9 +67,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - id: serialNumberModeSchema.optional(), + id: serialOptionalSchema, name: textSchema, - age: intSchema.or(type.null).optional(), + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -63,9 +84,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - id: serialNumberModeSchema.optional(), - name: textSchema.optional(), - age: intSchema.or(type.null).optional(), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -79,7 +100,7 @@ test('view qb - select', (t) => { const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = type({ id: serialNumberModeSchema, age: type('unknown.any') }); + const expected = type({ id: serialSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -91,7 +112,7 @@ test('view columns - select', (t) => { }).as(sql``); const result = createSelectSchema(view); - const expected = type({ id: serialNumberModeSchema, name: textSchema }); + const expected = type({ id: serialSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -114,9 +135,9 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = type({ - id: serialNumberModeSchema, - nested: type({ name: textSchema, age: type('unknown.any') }), - table: type({ id: serialNumberModeSchema, name: textSchema }), + id: serialSchema, + nested: type({ name: textSchema, age: anySchema }), + table: type({ id: serialSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -132,9 +153,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = type({ - c1: intSchema.or(type.null), + c1: intNullableSchema, c2: intSchema, - c3: intSchema.or(type.null), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -152,10 +173,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -172,10 +193,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.optional(), - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -193,9 +214,9 @@ test('refine table - select', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -217,9 +238,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -240,9 +261,9 @@ test('refine table - insert', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -261,9 +282,9 @@ test('refine table - update', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000).optional(), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -305,21 +326,21 @@ test('refine view - select', (t) => { }, }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: type({ - c4: intSchema.or(type.null), - c5: intSchema.atMost(1000).or(type.null), - c6: type.string.pipe(Number), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), - c4: intSchema.or(type.null), - c5: intSchema.or(type.null), - c6: intSchema.or(type.null), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -369,8 +390,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -410,20 +435,24 @@ test('all data types', (t) => { bigint2: type.bigint.narrow(bigintNarrow), bigint3: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), bigint4: type.bigint.narrow(unsignedBigintNarrow), - bigint5: type.string, - bigint6: type.string, - binary: type(`/^[01]{10}$/`).describe(`a string containing ones or zeros while being 10 characters long`) as Type< - string - >, + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: type(`/^[01]{0,10}$/`).describe( + `a string containing ones or zeros while being up to 10 characters long`, + ) as Type, boolean: type.boolean, - char1: type.string.exactlyLength(10), + char1: type.string.atMostLength(10), char2: type.enumerated('a', 'b', 'c'), date1: type.Date, date2: type.string, datetime1: type.Date, datetime2: type.string, - decimal1: type.string, - decimal2: type.string, + decimal1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + decimal2: type.number.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), + decimal3: type.bigint.narrow(bigintNarrow), + decimal4: type.bigint.narrow(unsignedBigintNarrow), + decimal5: type.string, + decimal6: type.string, double1: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), double2: type.number.atLeast(0).atMost(CONSTANTS.INT48_UNSIGNED_MAX), float1: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), diff --git a/drizzle-arktype/tests/pg.test.ts b/drizzle-arktype/tests/pg.test.ts index 603c67301c..8c740f6b32 100644 --- a/drizzle-arktype/tests/pg.test.ts +++ b/drizzle-arktype/tests/pg.test.ts @@ -15,13 +15,26 @@ import { } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { bigintNarrow, jsonSchema } from '~/column.ts'; +import { bigintNarrow, bigintStringModeSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); +const integerNullableSchema = integerSchema.or(type.null); +const integerOptionalSchema = integerSchema.optional(); +const integerNullableOptionalSchema = integerSchema.or(type.null).optional(); + const textSchema = type.string; +const textOptionalSchema = textSchema.optional(); + +const anySchema = type('unknown.any'); + +const extendedSchema = integerSchema.atMost(1000); +const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); test('table - select', (t) => { const table = pgTable('test', { @@ -56,7 +69,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = type({ name: textSchema, age: integerSchema.or(type.null).optional() }); + const expected = type({ name: textSchema, age: integerNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -70,8 +83,8 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - name: textSchema.optional(), - age: integerSchema.or(type.null).optional(), + name: textOptionalSchema, + age: integerNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -85,7 +98,7 @@ test('view qb - select', (t) => { const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = type({ id: integerSchema, age: type('unknown.any') }); + const expected = type({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -110,13 +123,13 @@ test('materialized view qb - select', (t) => { const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = type({ id: integerSchema, age: type('unknown.any') }); + const expected = type({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { - const view = pgView('test', { + const view = pgMaterializedView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); @@ -132,7 +145,7 @@ test('view with nested fields - select', (t) => { id: serial().primaryKey(), name: text().notNull(), }); - const view = pgMaterializedView('test').as((qb) => + const view = pgView('test').as((qb) => qb.select({ id: table.id, nested: { @@ -146,7 +159,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = type({ id: integerSchema, - nested: { name: textSchema, age: type('unknown.any') }, + nested: { name: textSchema, age: anySchema }, table: { id: integerSchema, name: textSchema }, }); expectSchemaShape(t, expected).from(result); @@ -172,9 +185,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = type({ - c1: integerSchema.or(type.null), + c1: integerNullableSchema, c2: integerSchema, - c3: integerSchema.or(type.null), + c3: integerNullableSchema, c4: integerSchema, }); expectSchemaShape(t, expected).from(result); @@ -194,11 +207,11 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - c1: integerSchema.or(type.null).optional(), + c1: integerNullableOptionalSchema, c2: integerSchema, - c3: integerSchema.or(type.null).optional(), - c4: integerSchema.optional(), - c7: integerSchema.optional(), + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); }); @@ -216,11 +229,11 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - c1: integerSchema.or(type.null).optional(), - c2: integerSchema.optional(), - c3: integerSchema.or(type.null).optional(), - c4: integerSchema.optional(), - c7: integerSchema.optional(), + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -238,9 +251,9 @@ test('refine table - select', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: integerSchema.or(type.null), - c2: integerSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -262,9 +275,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = type({ - c1: integerSchema.or(type.null), - c2: integerSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -285,9 +298,9 @@ test('refine table - insert', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: integerSchema.or(type.null).optional(), - c2: integerSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -306,9 +319,9 @@ test('refine table - update', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: integerSchema.or(type.null).optional(), - c2: integerSchema.atMost(1000).optional(), - c3: type.string.pipe(Number), + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -350,21 +363,21 @@ test('refine view - select', (t) => { }, }); const expected = type({ - c1: integerSchema.or(type.null), - c2: integerSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: type({ - c4: integerSchema.or(type.null), - c5: integerSchema.atMost(1000).or(type.null), - c6: type.string.pipe(Number), + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: type({ - c1: integerSchema.or(type.null), - c2: integerSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), - c4: integerSchema.or(type.null), - c5: integerSchema.or(type.null), - c6: integerSchema.or(type.null), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -430,7 +443,9 @@ test('all data types', (t) => { line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), @@ -456,14 +471,14 @@ test('all data types', (t) => { const expected = type({ bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigint2: type.bigint.narrow(bigintNarrow), - bigint3: type.string, + bigint3: bigintStringModeSchema, bigserial1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigserial2: type.bigint.narrow(bigintNarrow), bit: type(/^[01]{5}$/).describe('a string containing ones or zeros while being 5 characters long'), boolean: type.boolean, date1: type.Date, date2: type.string, - char1: type.string.exactlyLength(10), + char1: type.string.atMostLength(10), char2: type.enumerated('a', 'b', 'c'), cidr: type.string, doublePrecision: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), @@ -479,7 +494,9 @@ test('all data types', (t) => { line2: type([type.number, type.number, type.number]), macaddr: type.string, macaddr8: type.string, - numeric: type.string, + numeric1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + numeric2: type.bigint.narrow(bigintNarrow), + numeric3: type.string, point1: type({ x: type.number, y: type.number }), point2: type([type.number, type.number]), real: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), diff --git a/drizzle-arktype/tests/singlestore.test.ts b/drizzle-arktype/tests/singlestore.test.ts index 6bdacbb843..e8a0dcb570 100644 --- a/drizzle-arktype/tests/singlestore.test.ts +++ b/drizzle-arktype/tests/singlestore.test.ts @@ -3,14 +3,35 @@ import type { Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts'; +import { + bigintNarrow, + bigintStringModeSchema, + jsonSchema, + unsignedBigintNarrow, + unsignedBigintStringModeSchema, +} from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); -const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.or(type.null); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intSchema.or(type.null).optional(); + +const serialSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); +const serialOptionalSchema = serialSchema.optional(); + const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX); +const textOptionalSchema = textSchema.optional(); + +// const anySchema = type('unknown.any'); + +const extendedSchema = intSchema.atMost(1000); +// const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); test('table - select', (t) => { const table = singlestoreTable('test', { @@ -19,7 +40,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = type({ id: serialNumberModeSchema, name: textSchema }); + const expected = type({ id: serialSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -32,7 +53,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = type({ id: serialNumberModeSchema, name: textSchema }); + const expected = type({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -46,9 +67,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - id: serialNumberModeSchema.optional(), + id: serialOptionalSchema, name: textSchema, - age: intSchema.or(type.null).optional(), + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -63,9 +84,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - id: serialNumberModeSchema.optional(), - name: textSchema.optional(), - age: intSchema.or(type.null).optional(), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -81,7 +102,7 @@ test('table - update', (t) => { // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); -// const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); +// const expected = v.object({ id: serialSchema, age: anySchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -93,7 +114,7 @@ test('table - update', (t) => { // }).as(sql``); // const result = createSelectSchema(view); -// const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); +// const expected = v.object({ id: serialSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -116,9 +137,9 @@ test('table - update', (t) => { // const result = createSelectSchema(view); // const expected = v.object({ -// id: serialNumberModeSchema, -// nested: v.object({ name: textSchema, age: v.any() }), -// table: v.object({ id: serialNumberModeSchema, name: textSchema }), +// id: serialSchema, +// nested: v.object({ name: textSchema, age: anySchema }), +// table: v.object({ id: serialSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); @@ -134,9 +155,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = type({ - c1: intSchema.or(type.null), + c1: intNullableSchema, c2: intSchema, - c3: intSchema.or(type.null), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -154,10 +175,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -174,10 +195,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.optional(), - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -195,9 +216,9 @@ test('refine table - select', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -219,9 +240,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -242,9 +263,9 @@ test('refine table - insert', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -263,9 +284,9 @@ test('refine table - update', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000).optional(), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -307,21 +328,21 @@ test('refine table - update', (t) => { // }, // }); // const expected = v.object({ -// c1: v.nullable(intSchema), -// c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c3: v.pipe(type.string, v.transform(Number)), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, // nested: v.object({ -// c4: v.nullable(intSchema), -// c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c6: v.pipe(type.string, v.transform(Number)), +// c4: intNullableSchema,, +// c5: extendedNullableSchema, +// c6: customSchema, // }), // table: v.object({ // c1: v.nullable(intSchema), -// c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c3: v.pipe(type.string, v.transform(Number)), -// c4: v.nullable(intSchema), -// c5: v.nullable(intSchema), -// c6: v.nullable(intSchema), +// c2: extendedNullableSchema, +// c3: customSchema, +// c4: intNullableSchema,, +// c5: intNullableSchema,, +// c6: intNullableSchema,, // }), // }); // expectSchemaShape(t, expected).from(result); @@ -356,6 +377,7 @@ test('all data types', (t) => { longtext, mediumtext, tinytext, + vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), @@ -371,8 +393,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -404,6 +430,14 @@ test('all data types', (t) => { mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ + dimensions: 3, + elementType: 'F32', + }).notNull(), + vector2: vector({ + dimensions: 2, + elementType: 'I64', + }).notNull(), })); const result = createSelectSchema(table); @@ -412,20 +446,24 @@ test('all data types', (t) => { bigint2: type.bigint.narrow(bigintNarrow), bigint3: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), bigint4: type.bigint.narrow(unsignedBigintNarrow), - bigint5: type.string, - bigint6: type.string, - binary: type(`/^[01]{10}$/`).describe(`a string containing ones or zeros while being 10 characters long`) as Type< - string - >, + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: type(`/^[01]{0,10}$/`).describe( + `a string containing ones or zeros while being up to 10 characters long`, + ) as Type, boolean: type.boolean, - char1: type.string.exactlyLength(10), + char1: type.string.atMostLength(10), char2: type.enumerated('a', 'b', 'c'), date1: type.Date, date2: type.string, datetime1: type.Date, datetime2: type.string, - decimal1: type.string, - decimal2: type.string, + decimal1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + decimal2: type.number.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), + decimal3: type.bigint.narrow(bigintNarrow), + decimal4: type.bigint.narrow(unsignedBigintNarrow), + decimal5: type.string, + decimal6: type.string, double1: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), double2: type.number.atLeast(0).atMost(CONSTANTS.INT48_UNSIGNED_MAX), float1: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), @@ -459,6 +497,8 @@ test('all data types', (t) => { mediumtext2: type.enumerated('a', 'b', 'c'), tinytext1: type.string.atMostLength(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: type.enumerated('a', 'b', 'c'), + vector: type.number.array().exactlyLength(3), + vector2: type.bigint.array().exactlyLength(2), }); expectSchemaShape(t, expected).from(result); Expect>(); diff --git a/drizzle-arktype/tests/sqlite.test.ts b/drizzle-arktype/tests/sqlite.test.ts index e7a01cb434..a08c29cec5 100644 --- a/drizzle-arktype/tests/sqlite.test.ts +++ b/drizzle-arktype/tests/sqlite.test.ts @@ -1,4 +1,4 @@ -import { Type, type } from 'arktype'; +import { type Type, type } from 'arktype'; import { type Equal, sql } from 'drizzle-orm'; import { blob, customType, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import type { TopLevelCondition } from 'json-rules-engine'; @@ -9,7 +9,20 @@ import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../s import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.or(type.null); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intSchema.or(type.null).optional(); + const textSchema = type.string; +const textOptionalSchema = textSchema.optional(); + +const anySchema = type('unknown.any'); + +const extendedSchema = intSchema.atMost(1000); +const extendedNullableSchema = extendedSchema.or(type.null); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = type.string.pipe(Number); test('table - select', (t) => { const table = sqliteTable('test', { @@ -31,7 +44,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = type({ id: intSchema.optional(), name: textSchema, age: intSchema.or(type.null).optional() }); + const expected = type({ id: intSchema.optional(), name: textSchema, age: intNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -45,9 +58,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - id: intSchema.optional(), - name: textSchema.optional(), - age: intSchema.or(type.null).optional(), + id: intOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -61,7 +74,7 @@ test('view qb - select', (t) => { const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = type({ id: intSchema, age: type('unknown.any') }); + const expected = type({ id: intSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -97,7 +110,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = type({ id: intSchema, - nested: type({ name: textSchema, age: type('unknown.any') }), + nested: type({ name: textSchema, age: anySchema }), table: type({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); @@ -114,9 +127,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = type({ - c1: intSchema.or(type.null), + c1: intNullableSchema, c2: intSchema, - c3: intSchema.or(type.null), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -134,10 +147,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -154,10 +167,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.optional(), - c3: intSchema.or(type.null).optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -175,9 +188,9 @@ test('refine table - select', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -199,9 +212,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -222,9 +235,9 @@ test('refine table - insert', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -243,9 +256,9 @@ test('refine table - update', (t) => { c3: type.string.pipe(Number), }); const expected = type({ - c1: intSchema.or(type.null).optional(), - c2: intSchema.atMost(1000).optional(), - c3: type.string.pipe(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -287,21 +300,21 @@ test('refine view - select', (t) => { }, }); const expected = type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: type({ - c4: intSchema.or(type.null), - c5: intSchema.atMost(1000).or(type.null), - c6: type.string.pipe(Number), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: type({ - c1: intSchema.or(type.null), - c2: intSchema.atMost(1000).or(type.null), - c3: type.string.pipe(Number), - c4: intSchema.or(type.null), - c5: intSchema.or(type.null), - c6: intSchema.or(type.null), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -323,7 +336,9 @@ test('all data types', (t) => { integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), @@ -340,7 +355,9 @@ test('all data types', (t) => { integer2: type.boolean, integer3: type.Date, integer4: type.Date, - numeric: type.string, + numeric1: type.number.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), + numeric2: type.bigint.narrow(bigintNarrow), + numeric3: type.string, real: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), text1: type.string, text2: type.string.atMostLength(10), diff --git a/drizzle-arktype/tests/utils.ts b/drizzle-arktype/tests/utils.ts index 9c51655337..e3e1366c17 100644 --- a/drizzle-arktype/tests/utils.ts +++ b/drizzle-arktype/tests/utils.ts @@ -1,7 +1,7 @@ -import { Type } from 'arktype'; -import { expect, type TaskContext } from 'vitest'; +import type { Type } from 'arktype'; +import { expect, type TestContext } from 'vitest'; -export function expectSchemaShape>(t: TaskContext, expected: T) { +export function expectSchemaShape>(t: TestContext, expected: T) { return { from(actual: T) { expect(actual.json).toStrictEqual(expected.json); diff --git a/drizzle-arktype/tsconfig.json b/drizzle-arktype/tsconfig.json index a8d7c00116..bfa98b9e29 100644 --- a/drizzle-arktype/tsconfig.json +++ b/drizzle-arktype/tsconfig.json @@ -5,6 +5,7 @@ "baseUrl": ".", "declaration": true, "noEmit": true, + "allowImportingTsExtensions": true, "paths": { "~/*": ["src/*"] } diff --git a/drizzle-arktype/vitest.config.ts b/drizzle-arktype/vitest.config.ts index 1f0eb7ad9a..d767b570bd 100644 --- a/drizzle-arktype/vitest.config.ts +++ b/drizzle-arktype/vitest.config.ts @@ -1,4 +1,5 @@ import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/drizzle-kit/.gitignore b/drizzle-kit/.gitignore index 4916f095a2..059f93b7fb 100644 --- a/drizzle-kit/.gitignore +++ b/drizzle-kit/.gitignore @@ -3,22 +3,26 @@ !src !imports-checker + !tests +tests/**/tmp/ !vitest.config.ts !README.md !CONTRIBUTING.md -!schema.ts !.eslint !.gitignore !package.json !tsconfig.json +!tsconfig.typetest.json !tsconfig.cli-types.json !tsconfig.build.json !pnpm-lock.yaml !.github !build.ts !build.dev.ts +!build.ext.ts +!build.cli.ts tests/test.ts diff --git a/drizzle-kit/README.md b/drizzle-kit/README.md index bd69a4d3d8..c98ddcd014 100644 --- a/drizzle-kit/README.md +++ b/drizzle-kit/README.md @@ -7,6 +7,7 @@ Drizzle Kit is a CLI migrator tool for Drizzle ORM. It is probably the one and o Check the full documentation on [the website](https://orm.drizzle.team/kit-docs/overview). + ### How it works Drizzle Kit traverses a schema module and generates a snapshot to compare with the previous version, if there is one. diff --git a/drizzle-kit/build.cli.ts b/drizzle-kit/build.cli.ts new file mode 100644 index 0000000000..0e34d05836 --- /dev/null +++ b/drizzle-kit/build.cli.ts @@ -0,0 +1,42 @@ +/// +import * as esbuild from 'esbuild'; +import pkg from './package.json'; + +const driversPackages = [ + // postgres drivers + 'pg', + 'postgres', + '@vercel/postgres', + '@neondatabase/serverless', + '@electric-sql/pglite', + // mysql drivers + 'mysql2', + '@planetscale/database', + // sqlite drivers + '@libsql/client', + 'better-sqlite3', + 'bun:sqlite', + '@sqlitecloud/drivers', + '@tursodatabase/database', + 'bun', +]; + +esbuild.buildSync({ + entryPoints: ['./src/cli/index.ts'], + bundle: true, + outfile: 'dist/bin.cjs', + format: 'cjs', + target: 'node16', + platform: 'node', + define: { + 'process.env.DRIZZLE_KIT_VERSION': `"${pkg.version}"`, + }, + external: [ + 'esbuild', + 'drizzle-orm', + ...driversPackages, + ], + banner: { + js: `#!/usr/bin/env node`, + }, +}); diff --git a/drizzle-kit/build.dev.ts b/drizzle-kit/build.dev.ts index 58879d9c17..3997dda5ba 100644 --- a/drizzle-kit/build.dev.ts +++ b/drizzle-kit/build.dev.ts @@ -7,27 +7,19 @@ const driversPackages = [ 'postgres', '@vercel/postgres', '@neondatabase/serverless', + '@electric-sql/pglite', // mysql drivers 'mysql2', '@planetscale/database', // sqlite drivers '@libsql/client', 'better-sqlite3', + 'bun:sqlite', + '@sqlitecloud/drivers', + '@tursodatabase/database', + 'bun', ]; -esbuild.buildSync({ - entryPoints: ['./src/utils.ts'], - bundle: true, - outfile: 'dist/utils.js', - format: 'cjs', - target: 'node16', - platform: 'node', - external: ['drizzle-orm', 'esbuild', ...driversPackages], - banner: { - js: `#!/usr/bin/env -S node --loader @esbuild-kit/esm-loader --no-warnings`, - }, -}); - esbuild.buildSync({ entryPoints: ['./src/cli/index.ts'], bundle: true, diff --git a/drizzle-kit/build.ext.ts b/drizzle-kit/build.ext.ts new file mode 100644 index 0000000000..3a0a6a5688 --- /dev/null +++ b/drizzle-kit/build.ext.ts @@ -0,0 +1,65 @@ +import * as tsup from 'tsup'; +// import { readFileSync, writeFileSync } from 'node:fs'; + +const main = async () => { + // await tsup.build({ + // entryPoints: ['./src/utils/studio.ts'], + // outDir: './dist', + // external: [], + // splitting: false, + // dts: true, + // platform: 'browser', + // format: ['esm'], + // }); + + await tsup.build({ + entryPoints: ['./src/ext/studio-sqlite.ts'], + outDir: './dist', + external: [], + splitting: false, + dts: true, + platform: 'browser', + format: ['esm'], + }); + + await tsup.build({ + entryPoints: ['./src/ext/studio-postgres.ts'], + outDir: './dist', + external: [], + splitting: false, + dts: true, + platform: 'browser', + format: ['esm'], + // noExternal: ['@js-temporal/polyfill'], + }); + + await tsup.build({ + entryPoints: ['./src/ext/studio-mysql.ts'], + outDir: './dist', + external: [], + splitting: false, + dts: true, + platform: 'browser', + format: ['esm'], + }); + + // await tsup.build({ + // entryPoints: ['./src/utils/mover-postgres.ts', './src/utils/mover-mysql.ts'], + // outDir: './dist', + // external: [], + // splitting: false, + // dts: true, + // platform: 'browser', + // format: ['esm'], + // }); +}; + +main().then(() => { + process.exit(0); +}).catch((e) => { + console.error(e); + process.exit(1); +}); + +// const apiCjs = readFileSync('./dist/api.js', 'utf8').replace(/await import\(/g, 'require('); +// writeFileSync('./dist/api.js', apiCjs); diff --git a/drizzle-kit/build.ts b/drizzle-kit/build.ts index c07d628f27..07f054fccb 100644 --- a/drizzle-kit/build.ts +++ b/drizzle-kit/build.ts @@ -23,46 +23,6 @@ const driversPackages = [ 'bun', ]; -esbuild.buildSync({ - entryPoints: ['./src/utils.ts'], - bundle: true, - outfile: 'dist/utils.js', - format: 'cjs', - target: 'node16', - platform: 'node', - external: [ - 'commander', - 'json-diff', - 'glob', - 'esbuild', - 'drizzle-orm', - ...driversPackages, - ], - banner: { - js: `#!/usr/bin/env node`, - }, -}); - -esbuild.buildSync({ - entryPoints: ['./src/utils.ts'], - bundle: true, - outfile: 'dist/utils.mjs', - format: 'esm', - target: 'node16', - platform: 'node', - external: [ - 'commander', - 'json-diff', - 'glob', - 'esbuild', - 'drizzle-orm', - ...driversPackages, - ], - banner: { - js: `#!/usr/bin/env node`, - }, -}); - esbuild.buildSync({ entryPoints: ['./src/cli/index.ts'], bundle: true, @@ -106,7 +66,7 @@ const main = async () => { }); await tsup.build({ - entryPoints: ['./src/api.ts'], + entryPoints: ['./src/ext/api-postgres.ts', './src/ext/api-mysql.ts', './src/ext/api-sqlite.ts'], outDir: './dist', external: [ 'esbuild', @@ -126,7 +86,7 @@ const main = async () => { js: "import { createRequire } from 'module'; const require = createRequire(import.meta.url);", }; } - return undefined; + return; }, outExtension: (ctx) => { if (ctx.format === 'cjs') { @@ -142,8 +102,124 @@ const main = async () => { }, }); - const apiCjs = readFileSync('./dist/api.js', 'utf8').replace(/await import\(/g, 'require('); - writeFileSync('./dist/api.js', apiCjs); + writeFileSync( + './dist/api-postgres.js', + readFileSync('./dist/api-postgres.js', 'utf8').replace(/await import\(/g, 'require('), + ); + + // await tsup.build({ + // entryPoints: [], + // outDir: './dist', + // external: ['bun:sqlite'], + // splitting: false, + // dts: true, + // format: ['cjs', 'esm'], + // banner: (ctx) => { + // /** + // * fix dynamic require in ESM ("glob" -> "fs.realpath" requires 'fs' module) + // * @link https://github.com/drizzle-team/drizzle-orm/issues/2853 + // */ + // if (ctx.format === 'esm') { + // return { + // js: "import { createRequire } from 'module'; const require = createRequire(import.meta.url);", + // }; + // } + // return; + // }, + // outExtension: (ctx) => { + // if (ctx.format === 'cjs') { + // return { + // dts: '.d.ts', + // js: '.js', + // }; + // } + // return { + // dts: '.d.mts', + // js: '.mjs', + // }; + // }, + // }); + + writeFileSync( + './dist/api-mysql.js', + readFileSync('./dist/api-mysql.js', 'utf8').replace(/await import\(/g, 'require('), + ); + + // await tsup.build({ + // entryPoints: [], + // outDir: './dist', + // external: ['bun:sqlite'], + // splitting: false, + // dts: true, + // format: ['cjs', 'esm'], + // banner: (ctx) => { + // /** + // * fix dynamic require in ESM ("glob" -> "fs.realpath" requires 'fs' module) + // * @link https://github.com/drizzle-team/drizzle-orm/issues/2853 + // */ + // if (ctx.format === 'esm') { + // return { + // js: "import { createRequire } from 'module'; const require = createRequire(import.meta.url);", + // }; + // } + // return; + // }, + // outExtension: (ctx) => { + // if (ctx.format === 'cjs') { + // return { + // dts: '.d.ts', + // js: '.js', + // }; + // } + // return { + // dts: '.d.mts', + // js: '.mjs', + // }; + // }, + // }); + + writeFileSync( + './dist/api-sqlite.js', + readFileSync('./dist/api-sqlite.js', 'utf8').replace(/await import\(/g, 'require('), + ); + + // await tsup.build({ + // entryPoints: ['./src/ext/api-singlestore.ts'], + // outDir: './dist', + // external: ['bun:sqlite'], + // splitting: false, + // dts: true, + // format: ['cjs', 'esm'], + // banner: (ctx) => { + // /** + // * fix dynamic require in ESM ("glob" -> "fs.realpath" requires 'fs' module) + // * @link https://github.com/drizzle-team/drizzle-orm/issues/2853 + // */ + // if (ctx.format === 'esm') { + // return { + // js: "import { createRequire } from 'module'; const require = createRequire(import.meta.url);", + // }; + // } + // return; + // }, + // outExtension: (ctx) => { + // if (ctx.format === 'cjs') { + // return { + // dts: '.d.ts', + // js: '.js', + // }; + // } + // return { + // dts: '.d.mts', + // js: '.mjs', + // }; + // }, + // }); + + // writeFileSync( + // './dist/api-singlestore.js', + // readFileSync('./dist/api-singlestore.js', 'utf8').replace(/await import\(/g, 'require('), + // ); }; main().catch((e) => { diff --git a/drizzle-kit/imports-checker/analyze.ts b/drizzle-kit/imports-checker/analyze.ts index b31686e166..8db4e4f276 100644 --- a/drizzle-kit/imports-checker/analyze.ts +++ b/drizzle-kit/imports-checker/analyze.ts @@ -45,9 +45,10 @@ function init(collection: CollectionItem[]) { ImportExpr_From(kImport, importInner, kFrom, importSource) { const ruleName = importInner.children[0]!.ctorName; - const importType = ruleName === 'ImportInner_Type' || ruleName === 'ImportInner_Types' - ? 'types' - : 'data'; + const importType = + ruleName === 'ImportInner_Type' || ruleName === 'ImportInner_Types' || ruleName === 'ImportInner_AllTypes' + ? 'types' + : 'data'; collection.push({ source: importSource.children[1]!.sourceString!, diff --git a/drizzle-kit/imports-checker/checker.ts b/drizzle-kit/imports-checker/checker.ts index d8fc4b2195..06ea81475a 100644 --- a/drizzle-kit/imports-checker/checker.ts +++ b/drizzle-kit/imports-checker/checker.ts @@ -49,7 +49,7 @@ class ImportAnalyzer { private isDirectory = (path: string) => { try { return fs.lstatSync(path).isDirectory(); - } catch (e) { + } catch { return false; } }; @@ -57,7 +57,7 @@ class ImportAnalyzer { private isFile = (path: string) => { try { return fs.lstatSync(path).isFile(); - } catch (e) { + } catch { return false; } }; @@ -154,8 +154,6 @@ class ImportAnalyzer { type: type, }); } - } catch (e) { - throw e; } finally { this.visited.add(target); } diff --git a/drizzle-kit/imports-checker/grammar/grammar.ohm b/drizzle-kit/imports-checker/grammar/grammar.ohm index de1459942c..64ffeb9a2f 100644 --- a/drizzle-kit/imports-checker/grammar/grammar.ohm +++ b/drizzle-kit/imports-checker/grammar/grammar.ohm @@ -16,6 +16,7 @@ JSImports { ImportInner = | ("type" "{" NonemptyListOf ","? "}") -- Type | ("{" NonemptyListOf ","? "}") -- Types + | ("type " "*") -- AllTypes | ("{" NonemptyListOf ","? "}") -- Extended | (identifier ("," "type"? "{" NonemptyListOf ","? "}")?) -- Mixed | ("*" ("as" identifier)?) -- All @@ -29,7 +30,7 @@ JSImports { Import = identifier ("as" identifier)? TypeImport = "type" Import ("as" identifier)? - identifier = letter alnum* + identifier = (letter | "_" | "$" | "~" ) (alnum | "_" | "$" | "~")* quote = "\"" | "'" | "`" notQuote = ~quote any importSource = diff --git a/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.d.ts b/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.d.ts index 64b5dfb787..abe3d43677 100644 --- a/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.d.ts +++ b/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.d.ts @@ -31,6 +31,7 @@ export interface JSImportsActionDict extends BaseActionDict { arg2: IterationNode, arg3: TerminalNode, ) => T; + ImportInner_AllTypes?: (this: NonterminalNode, arg0: TerminalNode, arg1: TerminalNode) => T; ImportInner_Extended?: ( this: NonterminalNode, arg0: TerminalNode, @@ -62,7 +63,7 @@ export interface JSImportsActionDict extends BaseActionDict { arg2: IterationNode, arg3: IterationNode, ) => T; - identifier?: (this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode) => T; + identifier?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode, arg1: IterationNode) => T; quote?: (this: NonterminalNode, arg0: TerminalNode) => T; notQuote?: (this: NonterminalNode, arg0: NonterminalNode) => T; importSource?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; diff --git a/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.js b/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.js index 9a889d66f3..c826dad18d 100644 --- a/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.js +++ b/drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.js @@ -3,7 +3,7 @@ const result = makeRecipe([ 'grammar', { source: - 'JSImports {\n JSImports = (Expr ";"?)*\n\n Expr = \n | comment\n | stringLiteral\n | ImportExpr\n | Rest\n\n ImportExpr =\n | "import" ImportInner "from" importSource -- From\n | "import" importSource -- NoFrom\n\n Rest = (~(ImportExpr | comment | stringLiteral) any)+\n\n ImportInner = \n | ("type" "{" NonemptyListOf ","? "}") -- Type\n | ("{" NonemptyListOf ","? "}") -- Types\n | ("{" NonemptyListOf ","? "}") -- Extended\n | (identifier ("," "type"? "{" NonemptyListOf ","? "}")?) -- Mixed\n | ("*" ("as" identifier)?) -- All\n | (identifier ("as" identifier)?) -- Default\n \n\n ImportExtendedSelection = TypeImport | Import\n ImportExtendedSelectionTypes = TypeImport\n ImportExtendedSelectionTypeless = Import\n\n Import = identifier ("as" identifier)?\n TypeImport = "type" Import ("as" identifier)?\n\n identifier = letter alnum*\n quote = "\\"" | "\'" | "`"\n notQuote = ~quote any\n importSource =\n | "\\"" notQuote+ "\\""\n | "\'" notQuote+ "\'"\n | "`" notQuote+ "`"\n\n lineTerminator = "\\n" | "\\r" | "\\u2028" | "\\u2029"\n lineTerminatorSequence = "\\n" | "\\r" ~"\\n" | "\\u2028" | "\\u2029" | "\\r\\n"\n \n comment = multiLineComment | singleLineComment\n\n multiLineComment = "/*" (~"*/" any)* "*/"\n singleLineComment = "//" (~lineTerminator any)*\n\n stringLiteral =\n | "\\"" doubleStringCharacter* "\\""\n | "\'" singleStringCharacter* "\'"\n | "`" templateStringCharacter* "`"\n doubleStringCharacter =\n | ~("\\"" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n singleStringCharacter =\n | ~("\'" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n templateStringCharacter = \n | ~ ("`" | "\\\\") any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n lineContinuation = "\\\\" lineTerminatorSequence\n escapeSequence = unicodeEscapeSequence | hexEscapeSequence | octalEscapeSequence | characterEscapeSequence\n characterEscapeSequence = singleEscapeCharacter | nonEscapeCharacter\n singleEscapeCharacter = "\'" | "\\"" | "\\\\" | "b" | "f" | "n" | "r" | "t" | "v"\n nonEscapeCharacter = ~(escapeCharacter | lineTerminator) any\n escapeCharacter = singleEscapeCharacter | decimalDigit | "x" | "u"\n octalEscapeSequence =\n | zeroToThree octalDigit octalDigit -- Whole\n | fourToSeven octalDigit -- EightTimesfourToSeven\n | zeroToThree octalDigit ~decimalDigit -- EightTimesZeroToThree\n | octalDigit ~decimalDigit -- Octal\n hexEscapeSequence = "x" hexDigit hexDigit\n unicodeEscapeSequence = "u" hexDigit hexDigit hexDigit hexDigit\n\n zeroToThree = "0".."3"\n fourToSeven = "4".."7"\n decimalDigit = "0".."9"\n nonZeroDigit = "1".."9"\n octalDigit = "0".."7"\n\n regularExpressionLiteral = "/" regularExpressionBody "/" regularExpressionFlags\n regularExpressionBody = regularExpressionFirstChar regularExpressionChar*\n regularExpressionFirstChar =\n | ~("*" | "\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionChar = ~("\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionBackslashSequence = "\\\\" regularExpressionNonTerminator\n regularExpressionNonTerminator = ~(lineTerminator) any\n regularExpressionClass = "[" regularExpressionClassChar* "]"\n regularExpressionClassChar =\n | ~("]" | "\\\\") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n regularExpressionFlags = identifierPart*\n\n multiLineCommentNoNL = "/*" (~("*/" | lineTerminator) any)* "*/"\n\n identifierStart =\n | letter | "$" | "_"\n | "\\\\" unicodeEscapeSequence -- escaped\n identifierPart =\n | identifierStart | unicodeCombiningMark\n | unicodeDigit | unicodeConnectorPunctuation\n | "\\u200C" | "\\u200D"\n letter += unicodeCategoryNl\n unicodeCategoryNl\n = "\\u2160".."\\u2182" | "\\u3007" | "\\u3021".."\\u3029"\n unicodeDigit (a digit)\n = "\\u0030".."\\u0039" | "\\u0660".."\\u0669" | "\\u06F0".."\\u06F9" | "\\u0966".."\\u096F" | "\\u09E6".."\\u09EF" | "\\u0A66".."\\u0A6F" | "\\u0AE6".."\\u0AEF" | "\\u0B66".."\\u0B6F" | "\\u0BE7".."\\u0BEF" | "\\u0C66".."\\u0C6F" | "\\u0CE6".."\\u0CEF" | "\\u0D66".."\\u0D6F" | "\\u0E50".."\\u0E59" | "\\u0ED0".."\\u0ED9" | "\\u0F20".."\\u0F29" | "\\uFF10".."\\uFF19"\n\n unicodeCombiningMark (a Unicode combining mark)\n = "\\u0300".."\\u0345" | "\\u0360".."\\u0361" | "\\u0483".."\\u0486" | "\\u0591".."\\u05A1" | "\\u05A3".."\\u05B9" | "\\u05BB".."\\u05BD" | "\\u05BF".."\\u05BF" | "\\u05C1".."\\u05C2" | "\\u05C4".."\\u05C4" | "\\u064B".."\\u0652" | "\\u0670".."\\u0670" | "\\u06D6".."\\u06DC" | "\\u06DF".."\\u06E4" | "\\u06E7".."\\u06E8" | "\\u06EA".."\\u06ED" | "\\u0901".."\\u0902" | "\\u093C".."\\u093C" | "\\u0941".."\\u0948" | "\\u094D".."\\u094D" | "\\u0951".."\\u0954" | "\\u0962".."\\u0963" | "\\u0981".."\\u0981" | "\\u09BC".."\\u09BC" | "\\u09C1".."\\u09C4" | "\\u09CD".."\\u09CD" | "\\u09E2".."\\u09E3" | "\\u0A02".."\\u0A02" | "\\u0A3C".."\\u0A3C" | "\\u0A41".."\\u0A42" | "\\u0A47".."\\u0A48" | "\\u0A4B".."\\u0A4D" | "\\u0A70".."\\u0A71" | "\\u0A81".."\\u0A82" | "\\u0ABC".."\\u0ABC" | "\\u0AC1".."\\u0AC5" | "\\u0AC7".."\\u0AC8" | "\\u0ACD".."\\u0ACD" | "\\u0B01".."\\u0B01" | "\\u0B3C".."\\u0B3C" | "\\u0B3F".."\\u0B3F" | "\\u0B41".."\\u0B43" | "\\u0B4D".."\\u0B4D" | "\\u0B56".."\\u0B56" | "\\u0B82".."\\u0B82" | "\\u0BC0".."\\u0BC0" | "\\u0BCD".."\\u0BCD" | "\\u0C3E".."\\u0C40" | "\\u0C46".."\\u0C48" | "\\u0C4A".."\\u0C4D" | "\\u0C55".."\\u0C56" | "\\u0CBF".."\\u0CBF" | "\\u0CC6".."\\u0CC6" | "\\u0CCC".."\\u0CCD" | "\\u0D41".."\\u0D43" | "\\u0D4D".."\\u0D4D" | "\\u0E31".."\\u0E31" | "\\u0E34".."\\u0E3A" | "\\u0E47".."\\u0E4E" | "\\u0EB1".."\\u0EB1" | "\\u0EB4".."\\u0EB9" | "\\u0EBB".."\\u0EBC" | "\\u0EC8".."\\u0ECD" | "\\u0F18".."\\u0F19" | "\\u0F35".."\\u0F35" | "\\u0F37".."\\u0F37" | "\\u0F39".."\\u0F39" | "\\u0F71".."\\u0F7E" | "\\u0F80".."\\u0F84" | "\\u0F86".."\\u0F87" | "\\u0F90".."\\u0F95" | "\\u0F97".."\\u0F97" | "\\u0F99".."\\u0FAD" | "\\u0FB1".."\\u0FB7" | "\\u0FB9".."\\u0FB9" | "\\u20D0".."\\u20DC" | "\\u20E1".."\\u20E1" | "\\u302A".."\\u302F" | "\\u3099".."\\u309A" | "\\uFB1E".."\\uFB1E" | "\\uFE20".."\\uFE23"\n\n unicodeConnectorPunctuation = "\\u005F" | "\\u203F".."\\u2040" | "\\u30FB" | "\\uFE33".."\\uFE34" | "\\uFE4D".."\\uFE4F" | "\\uFF3F" | "\\uFF65"\n unicodeSpaceSeparator = "\\u2000".."\\u200B" | "\\u3000"\n\n}', + 'JSImports {\n JSImports = (Expr ";"?)*\n\n Expr = \n | comment\n | stringLiteral\n | ImportExpr\n | Rest\n\n ImportExpr =\n | "import" ImportInner "from" importSource -- From\n | "import" importSource -- NoFrom\n\n Rest = (~(ImportExpr | comment | stringLiteral) any)+\n\n ImportInner = \n | ("type" "{" NonemptyListOf ","? "}") -- Type\n | ("{" NonemptyListOf ","? "}") -- Types\n | ("type " "*") -- AllTypes\n | ("{" NonemptyListOf ","? "}") -- Extended\n | (identifier ("," "type"? "{" NonemptyListOf ","? "}")?) -- Mixed\n | ("*" ("as" identifier)?) -- All\n | (identifier ("as" identifier)?) -- Default\n \n\n ImportExtendedSelection = TypeImport | Import\n ImportExtendedSelectionTypes = TypeImport\n ImportExtendedSelectionTypeless = Import\n\n Import = identifier ("as" identifier)?\n TypeImport = "type" Import ("as" identifier)?\n\n identifier = (letter | "_" | "$" | "~" ) (alnum | "_" | "$" | "~")*\n quote = "\\"" | "\'" | "`"\n notQuote = ~quote any\n importSource =\n | "\\"" notQuote+ "\\""\n | "\'" notQuote+ "\'"\n | "`" notQuote+ "`"\n\n lineTerminator = "\\n" | "\\r" | "\\u2028" | "\\u2029"\n lineTerminatorSequence = "\\n" | "\\r" ~"\\n" | "\\u2028" | "\\u2029" | "\\r\\n"\n \n comment = multiLineComment | singleLineComment\n\n multiLineComment = "/*" (~"*/" any)* "*/"\n singleLineComment = "//" (~lineTerminator any)*\n\n stringLiteral =\n | "\\"" doubleStringCharacter* "\\""\n | "\'" singleStringCharacter* "\'"\n | "`" templateStringCharacter* "`"\n doubleStringCharacter =\n | ~("\\"" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n singleStringCharacter =\n | ~("\'" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n templateStringCharacter = \n | ~ ("`" | "\\\\") any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n lineContinuation = "\\\\" lineTerminatorSequence\n escapeSequence = unicodeEscapeSequence | hexEscapeSequence | octalEscapeSequence | characterEscapeSequence\n characterEscapeSequence = singleEscapeCharacter | nonEscapeCharacter\n singleEscapeCharacter = "\'" | "\\"" | "\\\\" | "b" | "f" | "n" | "r" | "t" | "v"\n nonEscapeCharacter = ~(escapeCharacter | lineTerminator) any\n escapeCharacter = singleEscapeCharacter | decimalDigit | "x" | "u"\n octalEscapeSequence =\n | zeroToThree octalDigit octalDigit -- Whole\n | fourToSeven octalDigit -- EightTimesfourToSeven\n | zeroToThree octalDigit ~decimalDigit -- EightTimesZeroToThree\n | octalDigit ~decimalDigit -- Octal\n hexEscapeSequence = "x" hexDigit hexDigit\n unicodeEscapeSequence = "u" hexDigit hexDigit hexDigit hexDigit\n\n zeroToThree = "0".."3"\n fourToSeven = "4".."7"\n decimalDigit = "0".."9"\n nonZeroDigit = "1".."9"\n octalDigit = "0".."7"\n\n regularExpressionLiteral = "/" regularExpressionBody "/" regularExpressionFlags\n regularExpressionBody = regularExpressionFirstChar regularExpressionChar*\n regularExpressionFirstChar =\n | ~("*" | "\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionChar = ~("\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionBackslashSequence = "\\\\" regularExpressionNonTerminator\n regularExpressionNonTerminator = ~(lineTerminator) any\n regularExpressionClass = "[" regularExpressionClassChar* "]"\n regularExpressionClassChar =\n | ~("]" | "\\\\") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n regularExpressionFlags = identifierPart*\n\n multiLineCommentNoNL = "/*" (~("*/" | lineTerminator) any)* "*/"\n\n identifierStart =\n | letter | "$" | "_"\n | "\\\\" unicodeEscapeSequence -- escaped\n identifierPart =\n | identifierStart | unicodeCombiningMark\n | unicodeDigit | unicodeConnectorPunctuation\n | "\\u200C" | "\\u200D"\n letter += unicodeCategoryNl\n unicodeCategoryNl\n = "\\u2160".."\\u2182" | "\\u3007" | "\\u3021".."\\u3029"\n unicodeDigit (a digit)\n = "\\u0030".."\\u0039" | "\\u0660".."\\u0669" | "\\u06F0".."\\u06F9" | "\\u0966".."\\u096F" | "\\u09E6".."\\u09EF" | "\\u0A66".."\\u0A6F" | "\\u0AE6".."\\u0AEF" | "\\u0B66".."\\u0B6F" | "\\u0BE7".."\\u0BEF" | "\\u0C66".."\\u0C6F" | "\\u0CE6".."\\u0CEF" | "\\u0D66".."\\u0D6F" | "\\u0E50".."\\u0E59" | "\\u0ED0".."\\u0ED9" | "\\u0F20".."\\u0F29" | "\\uFF10".."\\uFF19"\n\n unicodeCombiningMark (a Unicode combining mark)\n = "\\u0300".."\\u0345" | "\\u0360".."\\u0361" | "\\u0483".."\\u0486" | "\\u0591".."\\u05A1" | "\\u05A3".."\\u05B9" | "\\u05BB".."\\u05BD" | "\\u05BF".."\\u05BF" | "\\u05C1".."\\u05C2" | "\\u05C4".."\\u05C4" | "\\u064B".."\\u0652" | "\\u0670".."\\u0670" | "\\u06D6".."\\u06DC" | "\\u06DF".."\\u06E4" | "\\u06E7".."\\u06E8" | "\\u06EA".."\\u06ED" | "\\u0901".."\\u0902" | "\\u093C".."\\u093C" | "\\u0941".."\\u0948" | "\\u094D".."\\u094D" | "\\u0951".."\\u0954" | "\\u0962".."\\u0963" | "\\u0981".."\\u0981" | "\\u09BC".."\\u09BC" | "\\u09C1".."\\u09C4" | "\\u09CD".."\\u09CD" | "\\u09E2".."\\u09E3" | "\\u0A02".."\\u0A02" | "\\u0A3C".."\\u0A3C" | "\\u0A41".."\\u0A42" | "\\u0A47".."\\u0A48" | "\\u0A4B".."\\u0A4D" | "\\u0A70".."\\u0A71" | "\\u0A81".."\\u0A82" | "\\u0ABC".."\\u0ABC" | "\\u0AC1".."\\u0AC5" | "\\u0AC7".."\\u0AC8" | "\\u0ACD".."\\u0ACD" | "\\u0B01".."\\u0B01" | "\\u0B3C".."\\u0B3C" | "\\u0B3F".."\\u0B3F" | "\\u0B41".."\\u0B43" | "\\u0B4D".."\\u0B4D" | "\\u0B56".."\\u0B56" | "\\u0B82".."\\u0B82" | "\\u0BC0".."\\u0BC0" | "\\u0BCD".."\\u0BCD" | "\\u0C3E".."\\u0C40" | "\\u0C46".."\\u0C48" | "\\u0C4A".."\\u0C4D" | "\\u0C55".."\\u0C56" | "\\u0CBF".."\\u0CBF" | "\\u0CC6".."\\u0CC6" | "\\u0CCC".."\\u0CCD" | "\\u0D41".."\\u0D43" | "\\u0D4D".."\\u0D4D" | "\\u0E31".."\\u0E31" | "\\u0E34".."\\u0E3A" | "\\u0E47".."\\u0E4E" | "\\u0EB1".."\\u0EB1" | "\\u0EB4".."\\u0EB9" | "\\u0EBB".."\\u0EBC" | "\\u0EC8".."\\u0ECD" | "\\u0F18".."\\u0F19" | "\\u0F35".."\\u0F35" | "\\u0F37".."\\u0F37" | "\\u0F39".."\\u0F39" | "\\u0F71".."\\u0F7E" | "\\u0F80".."\\u0F84" | "\\u0F86".."\\u0F87" | "\\u0F90".."\\u0F95" | "\\u0F97".."\\u0F97" | "\\u0F99".."\\u0FAD" | "\\u0FB1".."\\u0FB7" | "\\u0FB9".."\\u0FB9" | "\\u20D0".."\\u20DC" | "\\u20E1".."\\u20E1" | "\\u302A".."\\u302F" | "\\u3099".."\\u309A" | "\\uFB1E".."\\uFB1E" | "\\uFE20".."\\uFE23"\n\n unicodeConnectorPunctuation = "\\u005F" | "\\u203F".."\\u2040" | "\\u30FB" | "\\uFE33".."\\uFE34" | "\\uFE4D".."\\uFE4F" | "\\uFF3F" | "\\uFF65"\n unicodeSpaceSeparator = "\\u2000".."\\u200B" | "\\u3000"\n\n}', }, 'JSImports', null, @@ -77,676 +77,691 @@ const result = makeRecipe([ ], ['terminal', { sourceInterval: [462, 465] }, ',']]], ['opt', { sourceInterval: [467, 471] }, ['terminal', { sourceInterval: [467, 470], }, ',']], ['terminal', { sourceInterval: [472, 475] }, '}']]], - ImportInner_Extended: ['define', { sourceInterval: [513, 610] }, null, [], ['seq', { sourceInterval: [513, 572] }, [ + ImportInner_AllTypes: ['define', { sourceInterval: [513, 610] }, null, [], ['seq', { sourceInterval: [513, 526] }, [ 'terminal', - { sourceInterval: [514, 517] }, + { sourceInterval: [514, 521] }, + 'type ', + ], ['terminal', { sourceInterval: [522, 525] }, '*']]], + ImportInner_Extended: ['define', { sourceInterval: [617, 714] }, null, [], ['seq', { sourceInterval: [617, 676] }, [ + 'terminal', + { sourceInterval: [618, 621] }, '{', - ], ['app', { sourceInterval: [518, 562] }, 'NonemptyListOf', [[ + ], ['app', { sourceInterval: [622, 666] }, 'NonemptyListOf', [[ 'app', - { sourceInterval: [533, 556] }, + { sourceInterval: [637, 660] }, 'ImportExtendedSelection', [], - ], ['terminal', { sourceInterval: [558, 561] }, ',']]], ['opt', { sourceInterval: [563, 567] }, ['terminal', { - sourceInterval: [563, 566], - }, ',']], ['terminal', { sourceInterval: [568, 571] }, '}']]], - ImportInner_Mixed: ['define', { sourceInterval: [617, 711] }, null, [], ['seq', { sourceInterval: [617, 702] }, [ + ], ['terminal', { sourceInterval: [662, 665] }, ',']]], ['opt', { sourceInterval: [667, 671] }, ['terminal', { + sourceInterval: [667, 670], + }, ',']], ['terminal', { sourceInterval: [672, 675] }, '}']]], + ImportInner_Mixed: ['define', { sourceInterval: [721, 815] }, null, [], ['seq', { sourceInterval: [721, 806] }, [ 'app', - { sourceInterval: [618, 628] }, + { sourceInterval: [722, 732] }, 'identifier', [], - ], ['opt', { sourceInterval: [629, 701] }, [ + ], ['opt', { sourceInterval: [733, 805] }, [ 'seq', - { sourceInterval: [630, 699] }, - ['terminal', { sourceInterval: [630, 633] }, ','], - ['opt', { sourceInterval: [634, 641] }, ['terminal', { sourceInterval: [634, 640] }, 'type']], - ['terminal', { sourceInterval: [642, 645] }, '{'], - ['app', { sourceInterval: [646, 690] }, 'NonemptyListOf', [[ + { sourceInterval: [734, 803] }, + ['terminal', { sourceInterval: [734, 737] }, ','], + ['opt', { sourceInterval: [738, 745] }, ['terminal', { sourceInterval: [738, 744] }, 'type']], + ['terminal', { sourceInterval: [746, 749] }, '{'], + ['app', { sourceInterval: [750, 794] }, 'NonemptyListOf', [[ 'app', - { sourceInterval: [661, 684] }, + { sourceInterval: [765, 788] }, 'ImportExtendedSelection', [], - ], ['terminal', { sourceInterval: [686, 689] }, ',']]], - ['opt', { sourceInterval: [691, 695] }, ['terminal', { sourceInterval: [691, 694] }, ',']], - ['terminal', { sourceInterval: [696, 699] }, '}'], + ], ['terminal', { sourceInterval: [790, 793] }, ',']]], + ['opt', { sourceInterval: [795, 799] }, ['terminal', { sourceInterval: [795, 798] }, ',']], + ['terminal', { sourceInterval: [800, 803] }, '}'], ]]]], - ImportInner_All: ['define', { sourceInterval: [718, 810] }, null, [], ['seq', { sourceInterval: [718, 742] }, [ + ImportInner_All: ['define', { sourceInterval: [822, 914] }, null, [], ['seq', { sourceInterval: [822, 846] }, [ 'terminal', - { sourceInterval: [719, 722] }, + { sourceInterval: [823, 826] }, '*', - ], ['opt', { sourceInterval: [723, 741] }, ['seq', { sourceInterval: [724, 739] }, ['terminal', { - sourceInterval: [724, 728], - }, 'as'], ['app', { sourceInterval: [729, 739] }, 'identifier', []]]]]], - ImportInner_Default: ['define', { sourceInterval: [817, 913] }, null, [], ['seq', { sourceInterval: [817, 848] }, [ + ], ['opt', { sourceInterval: [827, 845] }, ['seq', { sourceInterval: [828, 843] }, ['terminal', { + sourceInterval: [828, 832], + }, 'as'], ['app', { sourceInterval: [833, 843] }, 'identifier', []]]]]], + ImportInner_Default: ['define', { sourceInterval: [921, 1017] }, null, [], ['seq', { sourceInterval: [921, 952] }, [ 'app', - { sourceInterval: [818, 828] }, + { sourceInterval: [922, 932] }, 'identifier', [], - ], ['opt', { sourceInterval: [829, 847] }, ['seq', { sourceInterval: [830, 845] }, ['terminal', { - sourceInterval: [830, 834], - }, 'as'], ['app', { sourceInterval: [835, 845] }, 'identifier', []]]]]], - ImportInner: ['define', { sourceInterval: [291, 913] }, null, [], [ + ], ['opt', { sourceInterval: [933, 951] }, ['seq', { sourceInterval: [934, 949] }, ['terminal', { + sourceInterval: [934, 938], + }, 'as'], ['app', { sourceInterval: [939, 949] }, 'identifier', []]]]]], + ImportInner: ['define', { sourceInterval: [291, 1017] }, null, [], [ 'alt', - { sourceInterval: [310, 913] }, + { sourceInterval: [310, 1017] }, ['app', { sourceInterval: [312, 386] }, 'ImportInner_Type', []], ['app', { sourceInterval: [412, 476] }, 'ImportInner_Types', []], - ['app', { sourceInterval: [513, 572] }, 'ImportInner_Extended', []], - ['app', { sourceInterval: [617, 702] }, 'ImportInner_Mixed', []], - ['app', { sourceInterval: [718, 742] }, 'ImportInner_All', []], - ['app', { sourceInterval: [817, 848] }, 'ImportInner_Default', []], + ['app', { sourceInterval: [513, 526] }, 'ImportInner_AllTypes', []], + ['app', { sourceInterval: [617, 676] }, 'ImportInner_Extended', []], + ['app', { sourceInterval: [721, 806] }, 'ImportInner_Mixed', []], + ['app', { sourceInterval: [822, 846] }, 'ImportInner_All', []], + ['app', { sourceInterval: [921, 952] }, 'ImportInner_Default', []], ]], - ImportExtendedSelection: ['define', { sourceInterval: [924, 969] }, null, [], [ + ImportExtendedSelection: ['define', { sourceInterval: [1028, 1073] }, null, [], [ 'alt', - { sourceInterval: [950, 969] }, - ['app', { sourceInterval: [950, 960] }, 'TypeImport', []], - ['app', { sourceInterval: [963, 969] }, 'Import', []], + { sourceInterval: [1054, 1073] }, + ['app', { sourceInterval: [1054, 1064] }, 'TypeImport', []], + ['app', { sourceInterval: [1067, 1073] }, 'Import', []], ]], - ImportExtendedSelectionTypes: ['define', { sourceInterval: [974, 1015] }, null, [], [ + ImportExtendedSelectionTypes: ['define', { sourceInterval: [1078, 1119] }, null, [], [ 'app', - { sourceInterval: [1005, 1015] }, + { sourceInterval: [1109, 1119] }, 'TypeImport', [], ]], - ImportExtendedSelectionTypeless: ['define', { sourceInterval: [1020, 1060] }, null, [], [ + ImportExtendedSelectionTypeless: ['define', { sourceInterval: [1124, 1164] }, null, [], [ 'app', - { sourceInterval: [1054, 1060] }, + { sourceInterval: [1158, 1164] }, 'Import', [], ]], - Import: ['define', { sourceInterval: [1066, 1104] }, null, [], ['seq', { sourceInterval: [1075, 1104] }, [ + Import: ['define', { sourceInterval: [1170, 1208] }, null, [], ['seq', { sourceInterval: [1179, 1208] }, [ 'app', - { sourceInterval: [1075, 1085] }, + { sourceInterval: [1179, 1189] }, 'identifier', [], - ], ['opt', { sourceInterval: [1086, 1104] }, ['seq', { sourceInterval: [1087, 1102] }, ['terminal', { - sourceInterval: [1087, 1091], - }, 'as'], ['app', { sourceInterval: [1092, 1102] }, 'identifier', []]]]]], - TypeImport: ['define', { sourceInterval: [1109, 1154] }, null, [], [ + ], ['opt', { sourceInterval: [1190, 1208] }, ['seq', { sourceInterval: [1191, 1206] }, ['terminal', { + sourceInterval: [1191, 1195], + }, 'as'], ['app', { sourceInterval: [1196, 1206] }, 'identifier', []]]]]], + TypeImport: ['define', { sourceInterval: [1213, 1258] }, null, [], [ 'seq', - { sourceInterval: [1122, 1154] }, - ['terminal', { sourceInterval: [1122, 1128] }, 'type'], - ['app', { sourceInterval: [1129, 1135] }, 'Import', []], - ['opt', { sourceInterval: [1136, 1154] }, ['seq', { sourceInterval: [1137, 1152] }, ['terminal', { - sourceInterval: [1137, 1141], - }, 'as'], ['app', { sourceInterval: [1142, 1152] }, 'identifier', []]]], - ]], - identifier: ['define', { sourceInterval: [1160, 1186] }, null, [], ['seq', { sourceInterval: [1173, 1186] }, [ - 'app', - { sourceInterval: [1173, 1179] }, - 'letter', - [], - ], ['star', { sourceInterval: [1180, 1186] }, ['app', { sourceInterval: [1180, 1185] }, 'alnum', []]]]], - quote: ['define', { sourceInterval: [1191, 1215] }, null, [], [ + { sourceInterval: [1226, 1258] }, + ['terminal', { sourceInterval: [1226, 1232] }, 'type'], + ['app', { sourceInterval: [1233, 1239] }, 'Import', []], + ['opt', { sourceInterval: [1240, 1258] }, ['seq', { sourceInterval: [1241, 1256] }, ['terminal', { + sourceInterval: [1241, 1245], + }, 'as'], ['app', { sourceInterval: [1246, 1256] }, 'identifier', []]]], + ]], + identifier: ['define', { sourceInterval: [1264, 1331] }, null, [], ['seq', { sourceInterval: [1277, 1331] }, [ + 'alt', + { sourceInterval: [1278, 1302] }, + ['app', { sourceInterval: [1278, 1284] }, 'letter', []], + ['terminal', { sourceInterval: [1287, 1290] }, '_'], + ['terminal', { sourceInterval: [1293, 1296] }, '$'], + ['terminal', { sourceInterval: [1299, 1302] }, '~'], + ], ['star', { sourceInterval: [1305, 1331] }, [ 'alt', - { sourceInterval: [1199, 1215] }, - ['terminal', { sourceInterval: [1199, 1203] }, '"'], - ['terminal', { sourceInterval: [1206, 1209] }, "'"], - ['terminal', { sourceInterval: [1212, 1215] }, '`'], - ]], - notQuote: ['define', { sourceInterval: [1220, 1241] }, null, [], ['seq', { sourceInterval: [1231, 1241] }, ['not', { - sourceInterval: [1231, 1237], - }, ['app', { sourceInterval: [1232, 1237] }, 'quote', []]], ['app', { sourceInterval: [1238, 1241] }, 'any', []]]], - importSource: ['define', { sourceInterval: [1246, 1334] }, null, [], [ + { sourceInterval: [1306, 1329] }, + ['app', { sourceInterval: [1306, 1311] }, 'alnum', []], + ['terminal', { sourceInterval: [1314, 1317] }, '_'], + ['terminal', { sourceInterval: [1320, 1323] }, '$'], + ['terminal', { sourceInterval: [1326, 1329] }, '~'], + ]]]], + quote: ['define', { sourceInterval: [1336, 1360] }, null, [], [ + 'alt', + { sourceInterval: [1344, 1360] }, + ['terminal', { sourceInterval: [1344, 1348] }, '"'], + ['terminal', { sourceInterval: [1351, 1354] }, "'"], + ['terminal', { sourceInterval: [1357, 1360] }, '`'], + ]], + notQuote: ['define', { sourceInterval: [1365, 1386] }, null, [], ['seq', { sourceInterval: [1376, 1386] }, ['not', { + sourceInterval: [1376, 1382], + }, ['app', { sourceInterval: [1377, 1382] }, 'quote', []]], ['app', { sourceInterval: [1383, 1386] }, 'any', []]]], + importSource: ['define', { sourceInterval: [1391, 1479] }, null, [], [ 'alt', - { sourceInterval: [1265, 1334] }, - ['seq', { sourceInterval: [1267, 1286] }, ['terminal', { sourceInterval: [1267, 1271] }, '"'], ['plus', { - sourceInterval: [1272, 1281], - }, ['app', { sourceInterval: [1272, 1280] }, 'notQuote', []]], [ + { sourceInterval: [1410, 1479] }, + ['seq', { sourceInterval: [1412, 1431] }, ['terminal', { sourceInterval: [1412, 1416] }, '"'], ['plus', { + sourceInterval: [1417, 1426], + }, ['app', { sourceInterval: [1417, 1425] }, 'notQuote', []]], [ 'terminal', - { sourceInterval: [1282, 1286] }, + { sourceInterval: [1427, 1431] }, '"', ]], - ['seq', { sourceInterval: [1293, 1310] }, ['terminal', { sourceInterval: [1293, 1296] }, "'"], ['plus', { - sourceInterval: [1297, 1306], - }, ['app', { sourceInterval: [1297, 1305] }, 'notQuote', []]], [ + ['seq', { sourceInterval: [1438, 1455] }, ['terminal', { sourceInterval: [1438, 1441] }, "'"], ['plus', { + sourceInterval: [1442, 1451], + }, ['app', { sourceInterval: [1442, 1450] }, 'notQuote', []]], [ 'terminal', - { sourceInterval: [1307, 1310] }, + { sourceInterval: [1452, 1455] }, "'", ]], - ['seq', { sourceInterval: [1317, 1334] }, ['terminal', { sourceInterval: [1317, 1320] }, '`'], ['plus', { - sourceInterval: [1321, 1330], - }, ['app', { sourceInterval: [1321, 1329] }, 'notQuote', []]], [ + ['seq', { sourceInterval: [1462, 1479] }, ['terminal', { sourceInterval: [1462, 1465] }, '`'], ['plus', { + sourceInterval: [1466, 1475], + }, ['app', { sourceInterval: [1466, 1474] }, 'notQuote', []]], [ 'terminal', - { sourceInterval: [1331, 1334] }, + { sourceInterval: [1476, 1479] }, '`', ]], ]], - lineTerminator: ['define', { sourceInterval: [1340, 1390] }, null, [], [ + lineTerminator: ['define', { sourceInterval: [1485, 1535] }, null, [], [ 'alt', - { sourceInterval: [1357, 1390] }, - ['terminal', { sourceInterval: [1357, 1361] }, '\n'], - ['terminal', { sourceInterval: [1364, 1368] }, '\r'], - ['terminal', { sourceInterval: [1371, 1379] }, '\u2028'], - ['terminal', { sourceInterval: [1382, 1390] }, '\u2029'], + { sourceInterval: [1502, 1535] }, + ['terminal', { sourceInterval: [1502, 1506] }, '\n'], + ['terminal', { sourceInterval: [1509, 1513] }, '\r'], + ['terminal', { sourceInterval: [1516, 1524] }, '\u2028'], + ['terminal', { sourceInterval: [1527, 1535] }, '\u2029'], ]], - lineTerminatorSequence: ['define', { sourceInterval: [1395, 1468] }, null, [], [ + lineTerminatorSequence: ['define', { sourceInterval: [1540, 1613] }, null, [], [ 'alt', - { sourceInterval: [1420, 1468] }, - ['terminal', { sourceInterval: [1420, 1424] }, '\n'], - ['seq', { sourceInterval: [1427, 1437] }, ['terminal', { sourceInterval: [1427, 1431] }, '\r'], ['not', { - sourceInterval: [1432, 1437], - }, ['terminal', { sourceInterval: [1433, 1437] }, '\n']]], - ['terminal', { sourceInterval: [1440, 1448] }, '\u2028'], - ['terminal', { sourceInterval: [1451, 1459] }, '\u2029'], - ['terminal', { sourceInterval: [1462, 1468] }, '\r\n'], - ]], - comment: ['define', { sourceInterval: [1478, 1524] }, null, [], ['alt', { sourceInterval: [1488, 1524] }, [ + { sourceInterval: [1565, 1613] }, + ['terminal', { sourceInterval: [1565, 1569] }, '\n'], + ['seq', { sourceInterval: [1572, 1582] }, ['terminal', { sourceInterval: [1572, 1576] }, '\r'], ['not', { + sourceInterval: [1577, 1582], + }, ['terminal', { sourceInterval: [1578, 1582] }, '\n']]], + ['terminal', { sourceInterval: [1585, 1593] }, '\u2028'], + ['terminal', { sourceInterval: [1596, 1604] }, '\u2029'], + ['terminal', { sourceInterval: [1607, 1613] }, '\r\n'], + ]], + comment: ['define', { sourceInterval: [1623, 1669] }, null, [], ['alt', { sourceInterval: [1633, 1669] }, [ 'app', - { sourceInterval: [1488, 1504] }, + { sourceInterval: [1633, 1649] }, 'multiLineComment', [], - ], ['app', { sourceInterval: [1507, 1524] }, 'singleLineComment', []]]], - multiLineComment: ['define', { sourceInterval: [1530, 1571] }, null, [], ['seq', { sourceInterval: [1549, 1571] }, [ + ], ['app', { sourceInterval: [1652, 1669] }, 'singleLineComment', []]]], + multiLineComment: ['define', { sourceInterval: [1675, 1716] }, null, [], ['seq', { sourceInterval: [1694, 1716] }, [ 'terminal', - { sourceInterval: [1549, 1553] }, + { sourceInterval: [1694, 1698] }, '/*', - ], ['star', { sourceInterval: [1554, 1566] }, ['seq', { sourceInterval: [1555, 1564] }, ['not', { - sourceInterval: [1555, 1560], - }, ['terminal', { sourceInterval: [1556, 1560] }, '*/']], ['app', { sourceInterval: [1561, 1564] }, 'any', []]]], [ + ], ['star', { sourceInterval: [1699, 1711] }, ['seq', { sourceInterval: [1700, 1709] }, ['not', { + sourceInterval: [1700, 1705], + }, ['terminal', { sourceInterval: [1701, 1705] }, '*/']], ['app', { sourceInterval: [1706, 1709] }, 'any', []]]], [ 'terminal', - { sourceInterval: [1567, 1571] }, + { sourceInterval: [1712, 1716] }, '*/', ]]], - singleLineComment: ['define', { sourceInterval: [1576, 1623] }, null, [], [ + singleLineComment: ['define', { sourceInterval: [1721, 1768] }, null, [], [ 'seq', - { sourceInterval: [1596, 1623] }, - ['terminal', { sourceInterval: [1596, 1600] }, '//'], - ['star', { sourceInterval: [1601, 1623] }, ['seq', { sourceInterval: [1602, 1621] }, ['not', { - sourceInterval: [1602, 1617], - }, ['app', { sourceInterval: [1603, 1617] }, 'lineTerminator', []]], [ + { sourceInterval: [1741, 1768] }, + ['terminal', { sourceInterval: [1741, 1745] }, '//'], + ['star', { sourceInterval: [1746, 1768] }, ['seq', { sourceInterval: [1747, 1766] }, ['not', { + sourceInterval: [1747, 1762], + }, ['app', { sourceInterval: [1748, 1762] }, 'lineTerminator', []]], [ 'app', - { sourceInterval: [1618, 1621] }, + { sourceInterval: [1763, 1766] }, 'any', [], ]]], ]], - stringLiteral: ['define', { sourceInterval: [1629, 1759] }, null, [], ['alt', { sourceInterval: [1649, 1759] }, [ + stringLiteral: ['define', { sourceInterval: [1774, 1904] }, null, [], ['alt', { sourceInterval: [1794, 1904] }, [ 'seq', - { sourceInterval: [1651, 1683] }, - ['terminal', { sourceInterval: [1651, 1655] }, '"'], - ['star', { sourceInterval: [1656, 1678] }, [ + { sourceInterval: [1796, 1828] }, + ['terminal', { sourceInterval: [1796, 1800] }, '"'], + ['star', { sourceInterval: [1801, 1823] }, [ 'app', - { sourceInterval: [1656, 1677] }, + { sourceInterval: [1801, 1822] }, 'doubleStringCharacter', [], ]], - ['terminal', { sourceInterval: [1679, 1683] }, '"'], - ], ['seq', { sourceInterval: [1690, 1720] }, ['terminal', { sourceInterval: [1690, 1693] }, "'"], ['star', { - sourceInterval: [1694, 1716], - }, ['app', { sourceInterval: [1694, 1715] }, 'singleStringCharacter', []]], ['terminal', { - sourceInterval: [1717, 1720], - }, "'"]], ['seq', { sourceInterval: [1727, 1759] }, ['terminal', { sourceInterval: [1727, 1730] }, '`'], ['star', { - sourceInterval: [1731, 1755], - }, ['app', { sourceInterval: [1731, 1754] }, 'templateStringCharacter', []]], ['terminal', { - sourceInterval: [1756, 1759], + ['terminal', { sourceInterval: [1824, 1828] }, '"'], + ], ['seq', { sourceInterval: [1835, 1865] }, ['terminal', { sourceInterval: [1835, 1838] }, "'"], ['star', { + sourceInterval: [1839, 1861], + }, ['app', { sourceInterval: [1839, 1860] }, 'singleStringCharacter', []]], ['terminal', { + sourceInterval: [1862, 1865], + }, "'"]], ['seq', { sourceInterval: [1872, 1904] }, ['terminal', { sourceInterval: [1872, 1875] }, '`'], ['star', { + sourceInterval: [1876, 1900], + }, ['app', { sourceInterval: [1876, 1899] }, 'templateStringCharacter', []]], ['terminal', { + sourceInterval: [1901, 1904], }, '`']]]], - doubleStringCharacter_NonEscaped: ['define', { sourceInterval: [1794, 1845] }, null, [], ['seq', { - sourceInterval: [1794, 1829], - }, ['not', { sourceInterval: [1794, 1825] }, [ + doubleStringCharacter_NonEscaped: ['define', { sourceInterval: [1939, 1990] }, null, [], ['seq', { + sourceInterval: [1939, 1974], + }, ['not', { sourceInterval: [1939, 1970] }, [ 'alt', - { sourceInterval: [1796, 1824] }, - ['terminal', { sourceInterval: [1796, 1800] }, '"'], - ['terminal', { sourceInterval: [1803, 1807] }, '\\'], - ['app', { sourceInterval: [1810, 1824] }, 'lineTerminator', []], - ]], ['app', { sourceInterval: [1826, 1829] }, 'any', []]]], - doubleStringCharacter_Escaped: ['define', { sourceInterval: [1852, 1900] }, null, [], [ + { sourceInterval: [1941, 1969] }, + ['terminal', { sourceInterval: [1941, 1945] }, '"'], + ['terminal', { sourceInterval: [1948, 1952] }, '\\'], + ['app', { sourceInterval: [1955, 1969] }, 'lineTerminator', []], + ]], ['app', { sourceInterval: [1971, 1974] }, 'any', []]]], + doubleStringCharacter_Escaped: ['define', { sourceInterval: [1997, 2045] }, null, [], [ 'seq', - { sourceInterval: [1852, 1871] }, - ['terminal', { sourceInterval: [1852, 1856] }, '\\'], - ['app', { sourceInterval: [1857, 1871] }, 'escapeSequence', []], + { sourceInterval: [1997, 2016] }, + ['terminal', { sourceInterval: [1997, 2001] }, '\\'], + ['app', { sourceInterval: [2002, 2016] }, 'escapeSequence', []], ]], - doubleStringCharacter_LineContinuation: ['define', { sourceInterval: [1907, 1964] }, null, [], [ + doubleStringCharacter_LineContinuation: ['define', { sourceInterval: [2052, 2109] }, null, [], [ 'app', - { sourceInterval: [1907, 1923] }, + { sourceInterval: [2052, 2068] }, 'lineContinuation', [], ]], - doubleStringCharacter: ['define', { sourceInterval: [1764, 1964] }, null, [], [ + doubleStringCharacter: ['define', { sourceInterval: [1909, 2109] }, null, [], [ 'alt', - { sourceInterval: [1792, 1964] }, - ['app', { sourceInterval: [1794, 1829] }, 'doubleStringCharacter_NonEscaped', []], - ['app', { sourceInterval: [1852, 1871] }, 'doubleStringCharacter_Escaped', []], - ['app', { sourceInterval: [1907, 1923] }, 'doubleStringCharacter_LineContinuation', []], - ]], - singleStringCharacter_NonEscaped: ['define', { sourceInterval: [1999, 2050] }, null, [], ['seq', { - sourceInterval: [1999, 2033], - }, ['not', { sourceInterval: [1999, 2029] }, [ + { sourceInterval: [1937, 2109] }, + ['app', { sourceInterval: [1939, 1974] }, 'doubleStringCharacter_NonEscaped', []], + ['app', { sourceInterval: [1997, 2016] }, 'doubleStringCharacter_Escaped', []], + ['app', { sourceInterval: [2052, 2068] }, 'doubleStringCharacter_LineContinuation', []], + ]], + singleStringCharacter_NonEscaped: ['define', { sourceInterval: [2144, 2195] }, null, [], ['seq', { + sourceInterval: [2144, 2178], + }, ['not', { sourceInterval: [2144, 2174] }, [ 'alt', - { sourceInterval: [2001, 2028] }, - ['terminal', { sourceInterval: [2001, 2004] }, "'"], - ['terminal', { sourceInterval: [2007, 2011] }, '\\'], - ['app', { sourceInterval: [2014, 2028] }, 'lineTerminator', []], - ]], ['app', { sourceInterval: [2030, 2033] }, 'any', []]]], - singleStringCharacter_Escaped: ['define', { sourceInterval: [2057, 2105] }, null, [], [ + { sourceInterval: [2146, 2173] }, + ['terminal', { sourceInterval: [2146, 2149] }, "'"], + ['terminal', { sourceInterval: [2152, 2156] }, '\\'], + ['app', { sourceInterval: [2159, 2173] }, 'lineTerminator', []], + ]], ['app', { sourceInterval: [2175, 2178] }, 'any', []]]], + singleStringCharacter_Escaped: ['define', { sourceInterval: [2202, 2250] }, null, [], [ 'seq', - { sourceInterval: [2057, 2076] }, - ['terminal', { sourceInterval: [2057, 2061] }, '\\'], - ['app', { sourceInterval: [2062, 2076] }, 'escapeSequence', []], + { sourceInterval: [2202, 2221] }, + ['terminal', { sourceInterval: [2202, 2206] }, '\\'], + ['app', { sourceInterval: [2207, 2221] }, 'escapeSequence', []], ]], - singleStringCharacter_LineContinuation: ['define', { sourceInterval: [2112, 2169] }, null, [], [ + singleStringCharacter_LineContinuation: ['define', { sourceInterval: [2257, 2314] }, null, [], [ 'app', - { sourceInterval: [2112, 2128] }, + { sourceInterval: [2257, 2273] }, 'lineContinuation', [], ]], - singleStringCharacter: ['define', { sourceInterval: [1969, 2169] }, null, [], [ + singleStringCharacter: ['define', { sourceInterval: [2114, 2314] }, null, [], [ 'alt', - { sourceInterval: [1997, 2169] }, - ['app', { sourceInterval: [1999, 2033] }, 'singleStringCharacter_NonEscaped', []], - ['app', { sourceInterval: [2057, 2076] }, 'singleStringCharacter_Escaped', []], - ['app', { sourceInterval: [2112, 2128] }, 'singleStringCharacter_LineContinuation', []], - ]], - templateStringCharacter_NonEscaped: ['define', { sourceInterval: [2207, 2258] }, null, [], ['seq', { - sourceInterval: [2207, 2225], - }, ['not', { sourceInterval: [2207, 2221] }, ['alt', { sourceInterval: [2210, 2220] }, ['terminal', { - sourceInterval: [2210, 2213], - }, '`'], ['terminal', { sourceInterval: [2216, 2220] }, '\\']]], [ + { sourceInterval: [2142, 2314] }, + ['app', { sourceInterval: [2144, 2178] }, 'singleStringCharacter_NonEscaped', []], + ['app', { sourceInterval: [2202, 2221] }, 'singleStringCharacter_Escaped', []], + ['app', { sourceInterval: [2257, 2273] }, 'singleStringCharacter_LineContinuation', []], + ]], + templateStringCharacter_NonEscaped: ['define', { sourceInterval: [2352, 2403] }, null, [], ['seq', { + sourceInterval: [2352, 2370], + }, ['not', { sourceInterval: [2352, 2366] }, ['alt', { sourceInterval: [2355, 2365] }, ['terminal', { + sourceInterval: [2355, 2358], + }, '`'], ['terminal', { sourceInterval: [2361, 2365] }, '\\']]], [ 'app', - { sourceInterval: [2222, 2225] }, + { sourceInterval: [2367, 2370] }, 'any', [], ]]], - templateStringCharacter_Escaped: ['define', { sourceInterval: [2265, 2318] }, null, [], [ + templateStringCharacter_Escaped: ['define', { sourceInterval: [2410, 2463] }, null, [], [ 'seq', - { sourceInterval: [2265, 2284] }, - ['terminal', { sourceInterval: [2265, 2269] }, '\\'], - ['app', { sourceInterval: [2270, 2284] }, 'escapeSequence', []], + { sourceInterval: [2410, 2429] }, + ['terminal', { sourceInterval: [2410, 2414] }, '\\'], + ['app', { sourceInterval: [2415, 2429] }, 'escapeSequence', []], ]], - templateStringCharacter: ['define', { sourceInterval: [2174, 2318] }, null, [], [ + templateStringCharacter: ['define', { sourceInterval: [2319, 2463] }, null, [], [ 'alt', - { sourceInterval: [2205, 2318] }, - ['app', { sourceInterval: [2207, 2225] }, 'templateStringCharacter_NonEscaped', []], - ['app', { sourceInterval: [2265, 2284] }, 'templateStringCharacter_Escaped', []], + { sourceInterval: [2350, 2463] }, + ['app', { sourceInterval: [2352, 2370] }, 'templateStringCharacter_NonEscaped', []], + ['app', { sourceInterval: [2410, 2429] }, 'templateStringCharacter_Escaped', []], ]], - lineContinuation: ['define', { sourceInterval: [2323, 2369] }, null, [], ['seq', { sourceInterval: [2342, 2369] }, [ + lineContinuation: ['define', { sourceInterval: [2468, 2514] }, null, [], ['seq', { sourceInterval: [2487, 2514] }, [ 'terminal', - { sourceInterval: [2342, 2346] }, + { sourceInterval: [2487, 2491] }, '\\', - ], ['app', { sourceInterval: [2347, 2369] }, 'lineTerminatorSequence', []]]], - escapeSequence: ['define', { sourceInterval: [2374, 2480] }, null, [], [ + ], ['app', { sourceInterval: [2492, 2514] }, 'lineTerminatorSequence', []]]], + escapeSequence: ['define', { sourceInterval: [2519, 2625] }, null, [], [ 'alt', - { sourceInterval: [2391, 2480] }, - ['app', { sourceInterval: [2391, 2412] }, 'unicodeEscapeSequence', []], - ['app', { sourceInterval: [2415, 2432] }, 'hexEscapeSequence', []], - ['app', { sourceInterval: [2435, 2454] }, 'octalEscapeSequence', []], - ['app', { sourceInterval: [2457, 2480] }, 'characterEscapeSequence', []], + { sourceInterval: [2536, 2625] }, + ['app', { sourceInterval: [2536, 2557] }, 'unicodeEscapeSequence', []], + ['app', { sourceInterval: [2560, 2577] }, 'hexEscapeSequence', []], + ['app', { sourceInterval: [2580, 2599] }, 'octalEscapeSequence', []], + ['app', { sourceInterval: [2602, 2625] }, 'characterEscapeSequence', []], ]], - characterEscapeSequence: ['define', { sourceInterval: [2485, 2553] }, null, [], [ + characterEscapeSequence: ['define', { sourceInterval: [2630, 2698] }, null, [], [ 'alt', - { sourceInterval: [2511, 2553] }, - ['app', { sourceInterval: [2511, 2532] }, 'singleEscapeCharacter', []], - ['app', { sourceInterval: [2535, 2553] }, 'nonEscapeCharacter', []], + { sourceInterval: [2656, 2698] }, + ['app', { sourceInterval: [2656, 2677] }, 'singleEscapeCharacter', []], + ['app', { sourceInterval: [2680, 2698] }, 'nonEscapeCharacter', []], ]], - singleEscapeCharacter: ['define', { sourceInterval: [2558, 2635] }, null, [], [ + singleEscapeCharacter: ['define', { sourceInterval: [2703, 2780] }, null, [], [ 'alt', - { sourceInterval: [2582, 2635] }, - ['terminal', { sourceInterval: [2582, 2585] }, "'"], - ['terminal', { sourceInterval: [2588, 2592] }, '"'], - ['terminal', { sourceInterval: [2595, 2599] }, '\\'], - ['terminal', { sourceInterval: [2602, 2605] }, 'b'], - ['terminal', { sourceInterval: [2608, 2611] }, 'f'], - ['terminal', { sourceInterval: [2614, 2617] }, 'n'], - ['terminal', { sourceInterval: [2620, 2623] }, 'r'], - ['terminal', { sourceInterval: [2626, 2629] }, 't'], - ['terminal', { sourceInterval: [2632, 2635] }, 'v'], - ]], - nonEscapeCharacter: ['define', { sourceInterval: [2640, 2700] }, null, [], [ + { sourceInterval: [2727, 2780] }, + ['terminal', { sourceInterval: [2727, 2730] }, "'"], + ['terminal', { sourceInterval: [2733, 2737] }, '"'], + ['terminal', { sourceInterval: [2740, 2744] }, '\\'], + ['terminal', { sourceInterval: [2747, 2750] }, 'b'], + ['terminal', { sourceInterval: [2753, 2756] }, 'f'], + ['terminal', { sourceInterval: [2759, 2762] }, 'n'], + ['terminal', { sourceInterval: [2765, 2768] }, 'r'], + ['terminal', { sourceInterval: [2771, 2774] }, 't'], + ['terminal', { sourceInterval: [2777, 2780] }, 'v'], + ]], + nonEscapeCharacter: ['define', { sourceInterval: [2785, 2845] }, null, [], [ 'seq', - { sourceInterval: [2661, 2700] }, - ['not', { sourceInterval: [2661, 2696] }, ['alt', { sourceInterval: [2663, 2695] }, [ + { sourceInterval: [2806, 2845] }, + ['not', { sourceInterval: [2806, 2841] }, ['alt', { sourceInterval: [2808, 2840] }, [ 'app', - { sourceInterval: [2663, 2678] }, + { sourceInterval: [2808, 2823] }, 'escapeCharacter', [], - ], ['app', { sourceInterval: [2681, 2695] }, 'lineTerminator', []]]], - ['app', { sourceInterval: [2697, 2700] }, 'any', []], + ], ['app', { sourceInterval: [2826, 2840] }, 'lineTerminator', []]]], + ['app', { sourceInterval: [2842, 2845] }, 'any', []], ]], - escapeCharacter: ['define', { sourceInterval: [2705, 2771] }, null, [], [ + escapeCharacter: ['define', { sourceInterval: [2850, 2916] }, null, [], [ 'alt', - { sourceInterval: [2723, 2771] }, - ['app', { sourceInterval: [2723, 2744] }, 'singleEscapeCharacter', []], - ['app', { sourceInterval: [2747, 2759] }, 'decimalDigit', []], - ['terminal', { sourceInterval: [2762, 2765] }, 'x'], - ['terminal', { sourceInterval: [2768, 2771] }, 'u'], + { sourceInterval: [2868, 2916] }, + ['app', { sourceInterval: [2868, 2889] }, 'singleEscapeCharacter', []], + ['app', { sourceInterval: [2892, 2904] }, 'decimalDigit', []], + ['terminal', { sourceInterval: [2907, 2910] }, 'x'], + ['terminal', { sourceInterval: [2913, 2916] }, 'u'], ]], - octalEscapeSequence_Whole: ['define', { sourceInterval: [2804, 2850] }, null, [], [ + octalEscapeSequence_Whole: ['define', { sourceInterval: [2949, 2995] }, null, [], [ 'seq', - { sourceInterval: [2804, 2837] }, - ['app', { sourceInterval: [2804, 2815] }, 'zeroToThree', []], - ['app', { sourceInterval: [2816, 2826] }, 'octalDigit', []], - ['app', { sourceInterval: [2827, 2837] }, 'octalDigit', []], + { sourceInterval: [2949, 2982] }, + ['app', { sourceInterval: [2949, 2960] }, 'zeroToThree', []], + ['app', { sourceInterval: [2961, 2971] }, 'octalDigit', []], + ['app', { sourceInterval: [2972, 2982] }, 'octalDigit', []], ]], - octalEscapeSequence_EightTimesfourToSeven: ['define', { sourceInterval: [2857, 2919] }, null, [], [ + octalEscapeSequence_EightTimesfourToSeven: ['define', { sourceInterval: [3002, 3064] }, null, [], [ 'seq', - { sourceInterval: [2857, 2879] }, - ['app', { sourceInterval: [2857, 2868] }, 'fourToSeven', []], - ['app', { sourceInterval: [2869, 2879] }, 'octalDigit', []], + { sourceInterval: [3002, 3024] }, + ['app', { sourceInterval: [3002, 3013] }, 'fourToSeven', []], + ['app', { sourceInterval: [3014, 3024] }, 'octalDigit', []], ]], - octalEscapeSequence_EightTimesZeroToThree: ['define', { sourceInterval: [2926, 2988] }, null, [], [ + octalEscapeSequence_EightTimesZeroToThree: ['define', { sourceInterval: [3071, 3133] }, null, [], [ 'seq', - { sourceInterval: [2926, 2962] }, - ['app', { sourceInterval: [2926, 2937] }, 'zeroToThree', []], - ['app', { sourceInterval: [2938, 2948] }, 'octalDigit', []], - ['not', { sourceInterval: [2949, 2962] }, ['app', { sourceInterval: [2950, 2962] }, 'decimalDigit', []]], + { sourceInterval: [3071, 3107] }, + ['app', { sourceInterval: [3071, 3082] }, 'zeroToThree', []], + ['app', { sourceInterval: [3083, 3093] }, 'octalDigit', []], + ['not', { sourceInterval: [3094, 3107] }, ['app', { sourceInterval: [3095, 3107] }, 'decimalDigit', []]], ]], - octalEscapeSequence_Octal: ['define', { sourceInterval: [2995, 3041] }, null, [], [ + octalEscapeSequence_Octal: ['define', { sourceInterval: [3140, 3186] }, null, [], [ 'seq', - { sourceInterval: [2995, 3019] }, - ['app', { sourceInterval: [2995, 3005] }, 'octalDigit', []], - ['not', { sourceInterval: [3006, 3019] }, ['app', { sourceInterval: [3007, 3019] }, 'decimalDigit', []]], + { sourceInterval: [3140, 3164] }, + ['app', { sourceInterval: [3140, 3150] }, 'octalDigit', []], + ['not', { sourceInterval: [3151, 3164] }, ['app', { sourceInterval: [3152, 3164] }, 'decimalDigit', []]], ]], - octalEscapeSequence: ['define', { sourceInterval: [2776, 3041] }, null, [], [ + octalEscapeSequence: ['define', { sourceInterval: [2921, 3186] }, null, [], [ 'alt', - { sourceInterval: [2802, 3041] }, - ['app', { sourceInterval: [2804, 2837] }, 'octalEscapeSequence_Whole', []], - ['app', { sourceInterval: [2857, 2879] }, 'octalEscapeSequence_EightTimesfourToSeven', []], - ['app', { sourceInterval: [2926, 2962] }, 'octalEscapeSequence_EightTimesZeroToThree', []], - ['app', { sourceInterval: [2995, 3019] }, 'octalEscapeSequence_Octal', []], + { sourceInterval: [2947, 3186] }, + ['app', { sourceInterval: [2949, 2982] }, 'octalEscapeSequence_Whole', []], + ['app', { sourceInterval: [3002, 3024] }, 'octalEscapeSequence_EightTimesfourToSeven', []], + ['app', { sourceInterval: [3071, 3107] }, 'octalEscapeSequence_EightTimesZeroToThree', []], + ['app', { sourceInterval: [3140, 3164] }, 'octalEscapeSequence_Octal', []], ]], - hexEscapeSequence: ['define', { sourceInterval: [3046, 3087] }, null, [], [ + hexEscapeSequence: ['define', { sourceInterval: [3191, 3232] }, null, [], [ 'seq', - { sourceInterval: [3066, 3087] }, - ['terminal', { sourceInterval: [3066, 3069] }, 'x'], - ['app', { sourceInterval: [3070, 3078] }, 'hexDigit', []], - ['app', { sourceInterval: [3079, 3087] }, 'hexDigit', []], + { sourceInterval: [3211, 3232] }, + ['terminal', { sourceInterval: [3211, 3214] }, 'x'], + ['app', { sourceInterval: [3215, 3223] }, 'hexDigit', []], + ['app', { sourceInterval: [3224, 3232] }, 'hexDigit', []], ]], - unicodeEscapeSequence: ['define', { sourceInterval: [3092, 3155] }, null, [], [ + unicodeEscapeSequence: ['define', { sourceInterval: [3237, 3300] }, null, [], [ 'seq', - { sourceInterval: [3116, 3155] }, - ['terminal', { sourceInterval: [3116, 3119] }, 'u'], - ['app', { sourceInterval: [3120, 3128] }, 'hexDigit', []], - ['app', { sourceInterval: [3129, 3137] }, 'hexDigit', []], - ['app', { sourceInterval: [3138, 3146] }, 'hexDigit', []], - ['app', { sourceInterval: [3147, 3155] }, 'hexDigit', []], - ]], - zeroToThree: ['define', { sourceInterval: [3161, 3183] }, null, [], [ + { sourceInterval: [3261, 3300] }, + ['terminal', { sourceInterval: [3261, 3264] }, 'u'], + ['app', { sourceInterval: [3265, 3273] }, 'hexDigit', []], + ['app', { sourceInterval: [3274, 3282] }, 'hexDigit', []], + ['app', { sourceInterval: [3283, 3291] }, 'hexDigit', []], + ['app', { sourceInterval: [3292, 3300] }, 'hexDigit', []], + ]], + zeroToThree: ['define', { sourceInterval: [3306, 3328] }, null, [], [ 'range', - { sourceInterval: [3175, 3183] }, + { sourceInterval: [3320, 3328] }, '0', '3', ]], - fourToSeven: ['define', { sourceInterval: [3188, 3210] }, null, [], [ + fourToSeven: ['define', { sourceInterval: [3333, 3355] }, null, [], [ 'range', - { sourceInterval: [3202, 3210] }, + { sourceInterval: [3347, 3355] }, '4', '7', ]], - decimalDigit: ['define', { sourceInterval: [3215, 3238] }, null, [], [ + decimalDigit: ['define', { sourceInterval: [3360, 3383] }, null, [], [ 'range', - { sourceInterval: [3230, 3238] }, + { sourceInterval: [3375, 3383] }, '0', '9', ]], - nonZeroDigit: ['define', { sourceInterval: [3243, 3266] }, null, [], [ + nonZeroDigit: ['define', { sourceInterval: [3388, 3411] }, null, [], [ 'range', - { sourceInterval: [3258, 3266] }, + { sourceInterval: [3403, 3411] }, '1', '9', ]], - octalDigit: ['define', { sourceInterval: [3271, 3292] }, null, [], [ + octalDigit: ['define', { sourceInterval: [3416, 3437] }, null, [], [ 'range', - { sourceInterval: [3284, 3292] }, + { sourceInterval: [3429, 3437] }, '0', '7', ]], - regularExpressionLiteral: ['define', { sourceInterval: [3298, 3377] }, null, [], [ + regularExpressionLiteral: ['define', { sourceInterval: [3443, 3522] }, null, [], [ 'seq', - { sourceInterval: [3325, 3377] }, - ['terminal', { sourceInterval: [3325, 3328] }, '/'], - ['app', { sourceInterval: [3329, 3350] }, 'regularExpressionBody', []], - ['terminal', { sourceInterval: [3351, 3354] }, '/'], - ['app', { sourceInterval: [3355, 3377] }, 'regularExpressionFlags', []], + { sourceInterval: [3470, 3522] }, + ['terminal', { sourceInterval: [3470, 3473] }, '/'], + ['app', { sourceInterval: [3474, 3495] }, 'regularExpressionBody', []], + ['terminal', { sourceInterval: [3496, 3499] }, '/'], + ['app', { sourceInterval: [3500, 3522] }, 'regularExpressionFlags', []], ]], - regularExpressionBody: ['define', { sourceInterval: [3382, 3455] }, null, [], [ + regularExpressionBody: ['define', { sourceInterval: [3527, 3600] }, null, [], [ 'seq', - { sourceInterval: [3406, 3455] }, - ['app', { sourceInterval: [3406, 3432] }, 'regularExpressionFirstChar', []], - ['star', { sourceInterval: [3433, 3455] }, [ + { sourceInterval: [3551, 3600] }, + ['app', { sourceInterval: [3551, 3577] }, 'regularExpressionFirstChar', []], + ['star', { sourceInterval: [3578, 3600] }, [ 'app', - { sourceInterval: [3433, 3454] }, + { sourceInterval: [3578, 3599] }, 'regularExpressionChar', [], ]], ]], - regularExpressionFirstChar: ['define', { sourceInterval: [3460, 3621] }, null, [], ['alt', { - sourceInterval: [3493, 3621], - }, ['seq', { sourceInterval: [3495, 3551] }, ['not', { sourceInterval: [3495, 3520] }, [ + regularExpressionFirstChar: ['define', { sourceInterval: [3605, 3766] }, null, [], ['alt', { + sourceInterval: [3638, 3766], + }, ['seq', { sourceInterval: [3640, 3696] }, ['not', { sourceInterval: [3640, 3665] }, [ 'alt', - { sourceInterval: [3497, 3519] }, - ['terminal', { sourceInterval: [3497, 3500] }, '*'], - ['terminal', { sourceInterval: [3503, 3507] }, '\\'], - ['terminal', { sourceInterval: [3510, 3513] }, '/'], - ['terminal', { sourceInterval: [3516, 3519] }, '['], - ]], ['app', { sourceInterval: [3521, 3551] }, 'regularExpressionNonTerminator', []]], [ + { sourceInterval: [3642, 3664] }, + ['terminal', { sourceInterval: [3642, 3645] }, '*'], + ['terminal', { sourceInterval: [3648, 3652] }, '\\'], + ['terminal', { sourceInterval: [3655, 3658] }, '/'], + ['terminal', { sourceInterval: [3661, 3664] }, '['], + ]], ['app', { sourceInterval: [3666, 3696] }, 'regularExpressionNonTerminator', []]], [ 'app', - { sourceInterval: [3558, 3592] }, + { sourceInterval: [3703, 3737] }, 'regularExpressionBackslashSequence', [], - ], ['app', { sourceInterval: [3599, 3621] }, 'regularExpressionClass', []]]], - regularExpressionChar: ['define', { sourceInterval: [3626, 3770] }, null, [], ['alt', { - sourceInterval: [3650, 3770], - }, ['seq', { sourceInterval: [3650, 3700] }, ['not', { sourceInterval: [3650, 3669] }, [ + ], ['app', { sourceInterval: [3744, 3766] }, 'regularExpressionClass', []]]], + regularExpressionChar: ['define', { sourceInterval: [3771, 3915] }, null, [], ['alt', { + sourceInterval: [3795, 3915], + }, ['seq', { sourceInterval: [3795, 3845] }, ['not', { sourceInterval: [3795, 3814] }, [ 'alt', - { sourceInterval: [3652, 3668] }, - ['terminal', { sourceInterval: [3652, 3656] }, '\\'], - ['terminal', { sourceInterval: [3659, 3662] }, '/'], - ['terminal', { sourceInterval: [3665, 3668] }, '['], - ]], ['app', { sourceInterval: [3670, 3700] }, 'regularExpressionNonTerminator', []]], [ + { sourceInterval: [3797, 3813] }, + ['terminal', { sourceInterval: [3797, 3801] }, '\\'], + ['terminal', { sourceInterval: [3804, 3807] }, '/'], + ['terminal', { sourceInterval: [3810, 3813] }, '['], + ]], ['app', { sourceInterval: [3815, 3845] }, 'regularExpressionNonTerminator', []]], [ 'app', - { sourceInterval: [3707, 3741] }, + { sourceInterval: [3852, 3886] }, 'regularExpressionBackslashSequence', [], - ], ['app', { sourceInterval: [3748, 3770] }, 'regularExpressionClass', []]]], - regularExpressionBackslashSequence: ['define', { sourceInterval: [3775, 3847] }, null, [], [ + ], ['app', { sourceInterval: [3893, 3915] }, 'regularExpressionClass', []]]], + regularExpressionBackslashSequence: ['define', { sourceInterval: [3920, 3992] }, null, [], [ 'seq', - { sourceInterval: [3812, 3847] }, - ['terminal', { sourceInterval: [3812, 3816] }, '\\'], - ['app', { sourceInterval: [3817, 3847] }, 'regularExpressionNonTerminator', []], + { sourceInterval: [3957, 3992] }, + ['terminal', { sourceInterval: [3957, 3961] }, '\\'], + ['app', { sourceInterval: [3962, 3992] }, 'regularExpressionNonTerminator', []], ]], - regularExpressionNonTerminator: ['define', { sourceInterval: [3852, 3906] }, null, [], [ + regularExpressionNonTerminator: ['define', { sourceInterval: [3997, 4051] }, null, [], [ 'seq', - { sourceInterval: [3885, 3906] }, - ['not', { sourceInterval: [3885, 3902] }, ['app', { sourceInterval: [3887, 3901] }, 'lineTerminator', []]], - ['app', { sourceInterval: [3903, 3906] }, 'any', []], + { sourceInterval: [4030, 4051] }, + ['not', { sourceInterval: [4030, 4047] }, ['app', { sourceInterval: [4032, 4046] }, 'lineTerminator', []]], + ['app', { sourceInterval: [4048, 4051] }, 'any', []], ]], - regularExpressionClass: ['define', { sourceInterval: [3911, 3971] }, null, [], [ + regularExpressionClass: ['define', { sourceInterval: [4056, 4116] }, null, [], [ 'seq', - { sourceInterval: [3936, 3971] }, - ['terminal', { sourceInterval: [3936, 3939] }, '['], - ['star', { sourceInterval: [3940, 3967] }, [ + { sourceInterval: [4081, 4116] }, + ['terminal', { sourceInterval: [4081, 4084] }, '['], + ['star', { sourceInterval: [4085, 4112] }, [ 'app', - { sourceInterval: [3940, 3966] }, + { sourceInterval: [4085, 4111] }, 'regularExpressionClassChar', [], ]], - ['terminal', { sourceInterval: [3968, 3971] }, ']'], + ['terminal', { sourceInterval: [4113, 4116] }, ']'], ]], - regularExpressionClassChar: ['define', { sourceInterval: [3976, 4096] }, null, [], ['alt', { - sourceInterval: [4009, 4096], - }, ['seq', { sourceInterval: [4011, 4055] }, ['not', { sourceInterval: [4011, 4024] }, [ + regularExpressionClassChar: ['define', { sourceInterval: [4121, 4241] }, null, [], ['alt', { + sourceInterval: [4154, 4241], + }, ['seq', { sourceInterval: [4156, 4200] }, ['not', { sourceInterval: [4156, 4169] }, [ 'alt', - { sourceInterval: [4013, 4023] }, - ['terminal', { sourceInterval: [4013, 4016] }, ']'], - ['terminal', { sourceInterval: [4019, 4023] }, '\\'], - ]], ['app', { sourceInterval: [4025, 4055] }, 'regularExpressionNonTerminator', []]], [ + { sourceInterval: [4158, 4168] }, + ['terminal', { sourceInterval: [4158, 4161] }, ']'], + ['terminal', { sourceInterval: [4164, 4168] }, '\\'], + ]], ['app', { sourceInterval: [4170, 4200] }, 'regularExpressionNonTerminator', []]], [ 'app', - { sourceInterval: [4062, 4096] }, + { sourceInterval: [4207, 4241] }, 'regularExpressionBackslashSequence', [], ]]], - regularExpressionFlags: ['define', { sourceInterval: [4101, 4141] }, null, [], ['star', { - sourceInterval: [4126, 4141], - }, ['app', { sourceInterval: [4126, 4140] }, 'identifierPart', []]]], - multiLineCommentNoNL: ['define', { sourceInterval: [4147, 4211] }, null, [], [ + regularExpressionFlags: ['define', { sourceInterval: [4246, 4286] }, null, [], ['star', { + sourceInterval: [4271, 4286], + }, ['app', { sourceInterval: [4271, 4285] }, 'identifierPart', []]]], + multiLineCommentNoNL: ['define', { sourceInterval: [4292, 4356] }, null, [], [ 'seq', - { sourceInterval: [4170, 4211] }, - ['terminal', { sourceInterval: [4170, 4174] }, '/*'], - ['star', { sourceInterval: [4175, 4206] }, ['seq', { sourceInterval: [4176, 4204] }, ['not', { - sourceInterval: [4176, 4200], - }, ['alt', { sourceInterval: [4178, 4199] }, ['terminal', { sourceInterval: [4178, 4182] }, '*/'], [ + { sourceInterval: [4315, 4356] }, + ['terminal', { sourceInterval: [4315, 4319] }, '/*'], + ['star', { sourceInterval: [4320, 4351] }, ['seq', { sourceInterval: [4321, 4349] }, ['not', { + sourceInterval: [4321, 4345], + }, ['alt', { sourceInterval: [4323, 4344] }, ['terminal', { sourceInterval: [4323, 4327] }, '*/'], [ 'app', - { sourceInterval: [4185, 4199] }, + { sourceInterval: [4330, 4344] }, 'lineTerminator', [], - ]]], ['app', { sourceInterval: [4201, 4204] }, 'any', []]]], - ['terminal', { sourceInterval: [4207, 4211] }, '*/'], + ]]], ['app', { sourceInterval: [4346, 4349] }, 'any', []]]], + ['terminal', { sourceInterval: [4352, 4356] }, '*/'], ]], - identifierStart_escaped: ['define', { sourceInterval: [4266, 4303] }, null, [], [ + identifierStart_escaped: ['define', { sourceInterval: [4411, 4448] }, null, [], [ 'seq', - { sourceInterval: [4266, 4292] }, - ['terminal', { sourceInterval: [4266, 4270] }, '\\'], - ['app', { sourceInterval: [4271, 4292] }, 'unicodeEscapeSequence', []], + { sourceInterval: [4411, 4437] }, + ['terminal', { sourceInterval: [4411, 4415] }, '\\'], + ['app', { sourceInterval: [4416, 4437] }, 'unicodeEscapeSequence', []], ]], - identifierStart: ['define', { sourceInterval: [4217, 4303] }, null, [], [ + identifierStart: ['define', { sourceInterval: [4362, 4448] }, null, [], [ 'alt', - { sourceInterval: [4239, 4303] }, - ['app', { sourceInterval: [4241, 4247] }, 'letter', []], - ['terminal', { sourceInterval: [4250, 4253] }, '$'], - ['terminal', { sourceInterval: [4256, 4259] }, '_'], - ['app', { sourceInterval: [4266, 4292] }, 'identifierStart_escaped', []], + { sourceInterval: [4384, 4448] }, + ['app', { sourceInterval: [4386, 4392] }, 'letter', []], + ['terminal', { sourceInterval: [4395, 4398] }, '$'], + ['terminal', { sourceInterval: [4401, 4404] }, '_'], + ['app', { sourceInterval: [4411, 4437] }, 'identifierStart_escaped', []], ]], - identifierPart: ['define', { sourceInterval: [4308, 4444] }, null, [], [ + identifierPart: ['define', { sourceInterval: [4453, 4589] }, null, [], [ 'alt', - { sourceInterval: [4329, 4444] }, - ['app', { sourceInterval: [4331, 4346] }, 'identifierStart', []], - ['app', { sourceInterval: [4349, 4369] }, 'unicodeCombiningMark', []], - ['app', { sourceInterval: [4376, 4388] }, 'unicodeDigit', []], - ['app', { sourceInterval: [4391, 4418] }, 'unicodeConnectorPunctuation', []], - ['terminal', { sourceInterval: [4425, 4433] }, '‌'], - ['terminal', { sourceInterval: [4436, 4444] }, '‍'], - ]], - letter: ['extend', { sourceInterval: [4449, 4476] }, null, [], [ + { sourceInterval: [4474, 4589] }, + ['app', { sourceInterval: [4476, 4491] }, 'identifierStart', []], + ['app', { sourceInterval: [4494, 4514] }, 'unicodeCombiningMark', []], + ['app', { sourceInterval: [4521, 4533] }, 'unicodeDigit', []], + ['app', { sourceInterval: [4536, 4563] }, 'unicodeConnectorPunctuation', []], + ['terminal', { sourceInterval: [4570, 4578] }, '‌'], + ['terminal', { sourceInterval: [4581, 4589] }, '‍'], + ]], + letter: ['extend', { sourceInterval: [4594, 4621] }, null, [], [ 'app', - { sourceInterval: [4459, 4476] }, + { sourceInterval: [4604, 4621] }, 'unicodeCategoryNl', [], ]], - unicodeCategoryNl: ['define', { sourceInterval: [4481, 4555] }, null, [], [ + unicodeCategoryNl: ['define', { sourceInterval: [4626, 4700] }, null, [], [ 'alt', - { sourceInterval: [4505, 4555] }, - ['range', { sourceInterval: [4505, 4523] }, 'Ⅰ', 'ↂ'], - ['terminal', { sourceInterval: [4526, 4534] }, '〇'], - ['range', { sourceInterval: [4537, 4555] }, '〡', '〩'], + { sourceInterval: [4650, 4700] }, + ['range', { sourceInterval: [4650, 4668] }, 'Ⅰ', 'ↂ'], + ['terminal', { sourceInterval: [4671, 4679] }, '〇'], + ['range', { sourceInterval: [4682, 4700] }, '〡', '〩'], ]], - unicodeDigit: ['define', { sourceInterval: [4560, 4922] }, 'a digit', [], [ + unicodeDigit: ['define', { sourceInterval: [4705, 5067] }, 'a digit', [], [ 'alt', - { sourceInterval: [4589, 4922] }, - ['range', { sourceInterval: [4589, 4607] }, '0', '9'], - ['range', { sourceInterval: [4610, 4628] }, '٠', '٩'], - ['range', { sourceInterval: [4631, 4649] }, '۰', '۹'], - ['range', { sourceInterval: [4652, 4670] }, '०', '९'], - ['range', { sourceInterval: [4673, 4691] }, '০', '৯'], - ['range', { sourceInterval: [4694, 4712] }, '੦', '੯'], - ['range', { sourceInterval: [4715, 4733] }, '૦', '૯'], - ['range', { sourceInterval: [4736, 4754] }, '୦', '୯'], - ['range', { sourceInterval: [4757, 4775] }, '௧', '௯'], - ['range', { sourceInterval: [4778, 4796] }, '౦', '౯'], - ['range', { sourceInterval: [4799, 4817] }, '೦', '೯'], - ['range', { sourceInterval: [4820, 4838] }, '൦', '൯'], - ['range', { sourceInterval: [4841, 4859] }, '๐', '๙'], - ['range', { sourceInterval: [4862, 4880] }, '໐', '໙'], - ['range', { sourceInterval: [4883, 4901] }, '༠', '༩'], - ['range', { sourceInterval: [4904, 4922] }, '0', '9'], - ]], - unicodeCombiningMark: ['define', { sourceInterval: [4928, 6659] }, 'a Unicode combining mark', [], [ + { sourceInterval: [4734, 5067] }, + ['range', { sourceInterval: [4734, 4752] }, '0', '9'], + ['range', { sourceInterval: [4755, 4773] }, '٠', '٩'], + ['range', { sourceInterval: [4776, 4794] }, '۰', '۹'], + ['range', { sourceInterval: [4797, 4815] }, '०', '९'], + ['range', { sourceInterval: [4818, 4836] }, '০', '৯'], + ['range', { sourceInterval: [4839, 4857] }, '੦', '੯'], + ['range', { sourceInterval: [4860, 4878] }, '૦', '૯'], + ['range', { sourceInterval: [4881, 4899] }, '୦', '୯'], + ['range', { sourceInterval: [4902, 4920] }, '௧', '௯'], + ['range', { sourceInterval: [4923, 4941] }, '౦', '౯'], + ['range', { sourceInterval: [4944, 4962] }, '೦', '೯'], + ['range', { sourceInterval: [4965, 4983] }, '൦', '൯'], + ['range', { sourceInterval: [4986, 5004] }, '๐', '๙'], + ['range', { sourceInterval: [5007, 5025] }, '໐', '໙'], + ['range', { sourceInterval: [5028, 5046] }, '༠', '༩'], + ['range', { sourceInterval: [5049, 5067] }, '0', '9'], + ]], + unicodeCombiningMark: ['define', { sourceInterval: [5073, 6804] }, 'a Unicode combining mark', [], [ 'alt', - { sourceInterval: [4982, 6659] }, - ['range', { sourceInterval: [4982, 5000] }, '̀', 'ͅ'], - ['range', { sourceInterval: [5003, 5021] }, '͠', '͡'], - ['range', { sourceInterval: [5024, 5042] }, '҃', '҆'], - ['range', { sourceInterval: [5045, 5063] }, '֑', '֡'], - ['range', { sourceInterval: [5066, 5084] }, '֣', 'ֹ'], - ['range', { sourceInterval: [5087, 5105] }, 'ֻ', 'ֽ'], - ['range', { sourceInterval: [5108, 5126] }, 'ֿ', 'ֿ'], - ['range', { sourceInterval: [5129, 5147] }, 'ׁ', 'ׂ'], - ['range', { sourceInterval: [5150, 5168] }, 'ׄ', 'ׄ'], - ['range', { sourceInterval: [5171, 5189] }, 'ً', 'ْ'], - ['range', { sourceInterval: [5192, 5210] }, 'ٰ', 'ٰ'], - ['range', { sourceInterval: [5213, 5231] }, 'ۖ', 'ۜ'], - ['range', { sourceInterval: [5234, 5252] }, '۟', 'ۤ'], - ['range', { sourceInterval: [5255, 5273] }, 'ۧ', 'ۨ'], - ['range', { sourceInterval: [5276, 5294] }, '۪', 'ۭ'], - ['range', { sourceInterval: [5297, 5315] }, 'ँ', 'ं'], - ['range', { sourceInterval: [5318, 5336] }, '़', '़'], - ['range', { sourceInterval: [5339, 5357] }, 'ु', 'ै'], - ['range', { sourceInterval: [5360, 5378] }, '्', '्'], - ['range', { sourceInterval: [5381, 5399] }, '॑', '॔'], - ['range', { sourceInterval: [5402, 5420] }, 'ॢ', 'ॣ'], - ['range', { sourceInterval: [5423, 5441] }, 'ঁ', 'ঁ'], - ['range', { sourceInterval: [5444, 5462] }, '়', '়'], - ['range', { sourceInterval: [5465, 5483] }, 'ু', 'ৄ'], - ['range', { sourceInterval: [5486, 5504] }, '্', '্'], - ['range', { sourceInterval: [5507, 5525] }, 'ৢ', 'ৣ'], - ['range', { sourceInterval: [5528, 5546] }, 'ਂ', 'ਂ'], - ['range', { sourceInterval: [5549, 5567] }, '਼', '਼'], - ['range', { sourceInterval: [5570, 5588] }, 'ੁ', 'ੂ'], - ['range', { sourceInterval: [5591, 5609] }, 'ੇ', 'ੈ'], - ['range', { sourceInterval: [5612, 5630] }, 'ੋ', '੍'], - ['range', { sourceInterval: [5633, 5651] }, 'ੰ', 'ੱ'], - ['range', { sourceInterval: [5654, 5672] }, 'ઁ', 'ં'], - ['range', { sourceInterval: [5675, 5693] }, '઼', '઼'], - ['range', { sourceInterval: [5696, 5714] }, 'ુ', 'ૅ'], - ['range', { sourceInterval: [5717, 5735] }, 'ે', 'ૈ'], - ['range', { sourceInterval: [5738, 5756] }, '્', '્'], - ['range', { sourceInterval: [5759, 5777] }, 'ଁ', 'ଁ'], - ['range', { sourceInterval: [5780, 5798] }, '଼', '଼'], - ['range', { sourceInterval: [5801, 5819] }, 'ି', 'ି'], - ['range', { sourceInterval: [5822, 5840] }, 'ୁ', 'ୃ'], - ['range', { sourceInterval: [5843, 5861] }, '୍', '୍'], - ['range', { sourceInterval: [5864, 5882] }, 'ୖ', 'ୖ'], - ['range', { sourceInterval: [5885, 5903] }, 'ஂ', 'ஂ'], - ['range', { sourceInterval: [5906, 5924] }, 'ீ', 'ீ'], - ['range', { sourceInterval: [5927, 5945] }, '்', '்'], - ['range', { sourceInterval: [5948, 5966] }, 'ా', 'ీ'], - ['range', { sourceInterval: [5969, 5987] }, 'ె', 'ై'], - ['range', { sourceInterval: [5990, 6008] }, 'ొ', '్'], - ['range', { sourceInterval: [6011, 6029] }, 'ౕ', 'ౖ'], - ['range', { sourceInterval: [6032, 6050] }, 'ಿ', 'ಿ'], - ['range', { sourceInterval: [6053, 6071] }, 'ೆ', 'ೆ'], - ['range', { sourceInterval: [6074, 6092] }, 'ೌ', '್'], - ['range', { sourceInterval: [6095, 6113] }, 'ു', 'ൃ'], - ['range', { sourceInterval: [6116, 6134] }, '്', '്'], - ['range', { sourceInterval: [6137, 6155] }, 'ั', 'ั'], - ['range', { sourceInterval: [6158, 6176] }, 'ิ', 'ฺ'], - ['range', { sourceInterval: [6179, 6197] }, '็', '๎'], - ['range', { sourceInterval: [6200, 6218] }, 'ັ', 'ັ'], - ['range', { sourceInterval: [6221, 6239] }, 'ິ', 'ູ'], - ['range', { sourceInterval: [6242, 6260] }, 'ົ', 'ຼ'], - ['range', { sourceInterval: [6263, 6281] }, '່', 'ໍ'], - ['range', { sourceInterval: [6284, 6302] }, '༘', '༙'], - ['range', { sourceInterval: [6305, 6323] }, '༵', '༵'], - ['range', { sourceInterval: [6326, 6344] }, '༷', '༷'], - ['range', { sourceInterval: [6347, 6365] }, '༹', '༹'], - ['range', { sourceInterval: [6368, 6386] }, 'ཱ', 'ཾ'], - ['range', { sourceInterval: [6389, 6407] }, 'ྀ', '྄'], - ['range', { sourceInterval: [6410, 6428] }, '྆', '྇'], - ['range', { sourceInterval: [6431, 6449] }, 'ྐ', 'ྕ'], - ['range', { sourceInterval: [6452, 6470] }, 'ྗ', 'ྗ'], - ['range', { sourceInterval: [6473, 6491] }, 'ྙ', 'ྭ'], - ['range', { sourceInterval: [6494, 6512] }, 'ྱ', 'ྷ'], - ['range', { sourceInterval: [6515, 6533] }, 'ྐྵ', 'ྐྵ'], - ['range', { sourceInterval: [6536, 6554] }, '⃐', '⃜'], - ['range', { sourceInterval: [6557, 6575] }, '⃡', '⃡'], - ['range', { sourceInterval: [6578, 6596] }, '〪', '〯'], - ['range', { sourceInterval: [6599, 6617] }, '゙', '゚'], - ['range', { sourceInterval: [6620, 6638] }, 'ﬞ', 'ﬞ'], - ['range', { sourceInterval: [6641, 6659] }, '︠', '︣'], - ]], - unicodeConnectorPunctuation: ['define', { sourceInterval: [6665, 6799] }, null, [], [ + { sourceInterval: [5127, 6804] }, + ['range', { sourceInterval: [5127, 5145] }, '̀', 'ͅ'], + ['range', { sourceInterval: [5148, 5166] }, '͠', '͡'], + ['range', { sourceInterval: [5169, 5187] }, '҃', '҆'], + ['range', { sourceInterval: [5190, 5208] }, '֑', '֡'], + ['range', { sourceInterval: [5211, 5229] }, '֣', 'ֹ'], + ['range', { sourceInterval: [5232, 5250] }, 'ֻ', 'ֽ'], + ['range', { sourceInterval: [5253, 5271] }, 'ֿ', 'ֿ'], + ['range', { sourceInterval: [5274, 5292] }, 'ׁ', 'ׂ'], + ['range', { sourceInterval: [5295, 5313] }, 'ׄ', 'ׄ'], + ['range', { sourceInterval: [5316, 5334] }, 'ً', 'ْ'], + ['range', { sourceInterval: [5337, 5355] }, 'ٰ', 'ٰ'], + ['range', { sourceInterval: [5358, 5376] }, 'ۖ', 'ۜ'], + ['range', { sourceInterval: [5379, 5397] }, '۟', 'ۤ'], + ['range', { sourceInterval: [5400, 5418] }, 'ۧ', 'ۨ'], + ['range', { sourceInterval: [5421, 5439] }, '۪', 'ۭ'], + ['range', { sourceInterval: [5442, 5460] }, 'ँ', 'ं'], + ['range', { sourceInterval: [5463, 5481] }, '़', '़'], + ['range', { sourceInterval: [5484, 5502] }, 'ु', 'ै'], + ['range', { sourceInterval: [5505, 5523] }, '्', '्'], + ['range', { sourceInterval: [5526, 5544] }, '॑', '॔'], + ['range', { sourceInterval: [5547, 5565] }, 'ॢ', 'ॣ'], + ['range', { sourceInterval: [5568, 5586] }, 'ঁ', 'ঁ'], + ['range', { sourceInterval: [5589, 5607] }, '়', '়'], + ['range', { sourceInterval: [5610, 5628] }, 'ু', 'ৄ'], + ['range', { sourceInterval: [5631, 5649] }, '্', '্'], + ['range', { sourceInterval: [5652, 5670] }, 'ৢ', 'ৣ'], + ['range', { sourceInterval: [5673, 5691] }, 'ਂ', 'ਂ'], + ['range', { sourceInterval: [5694, 5712] }, '਼', '਼'], + ['range', { sourceInterval: [5715, 5733] }, 'ੁ', 'ੂ'], + ['range', { sourceInterval: [5736, 5754] }, 'ੇ', 'ੈ'], + ['range', { sourceInterval: [5757, 5775] }, 'ੋ', '੍'], + ['range', { sourceInterval: [5778, 5796] }, 'ੰ', 'ੱ'], + ['range', { sourceInterval: [5799, 5817] }, 'ઁ', 'ં'], + ['range', { sourceInterval: [5820, 5838] }, '઼', '઼'], + ['range', { sourceInterval: [5841, 5859] }, 'ુ', 'ૅ'], + ['range', { sourceInterval: [5862, 5880] }, 'ે', 'ૈ'], + ['range', { sourceInterval: [5883, 5901] }, '્', '્'], + ['range', { sourceInterval: [5904, 5922] }, 'ଁ', 'ଁ'], + ['range', { sourceInterval: [5925, 5943] }, '଼', '଼'], + ['range', { sourceInterval: [5946, 5964] }, 'ି', 'ି'], + ['range', { sourceInterval: [5967, 5985] }, 'ୁ', 'ୃ'], + ['range', { sourceInterval: [5988, 6006] }, '୍', '୍'], + ['range', { sourceInterval: [6009, 6027] }, 'ୖ', 'ୖ'], + ['range', { sourceInterval: [6030, 6048] }, 'ஂ', 'ஂ'], + ['range', { sourceInterval: [6051, 6069] }, 'ீ', 'ீ'], + ['range', { sourceInterval: [6072, 6090] }, '்', '்'], + ['range', { sourceInterval: [6093, 6111] }, 'ా', 'ీ'], + ['range', { sourceInterval: [6114, 6132] }, 'ె', 'ై'], + ['range', { sourceInterval: [6135, 6153] }, 'ొ', '్'], + ['range', { sourceInterval: [6156, 6174] }, 'ౕ', 'ౖ'], + ['range', { sourceInterval: [6177, 6195] }, 'ಿ', 'ಿ'], + ['range', { sourceInterval: [6198, 6216] }, 'ೆ', 'ೆ'], + ['range', { sourceInterval: [6219, 6237] }, 'ೌ', '್'], + ['range', { sourceInterval: [6240, 6258] }, 'ു', 'ൃ'], + ['range', { sourceInterval: [6261, 6279] }, '്', '്'], + ['range', { sourceInterval: [6282, 6300] }, 'ั', 'ั'], + ['range', { sourceInterval: [6303, 6321] }, 'ิ', 'ฺ'], + ['range', { sourceInterval: [6324, 6342] }, '็', '๎'], + ['range', { sourceInterval: [6345, 6363] }, 'ັ', 'ັ'], + ['range', { sourceInterval: [6366, 6384] }, 'ິ', 'ູ'], + ['range', { sourceInterval: [6387, 6405] }, 'ົ', 'ຼ'], + ['range', { sourceInterval: [6408, 6426] }, '່', 'ໍ'], + ['range', { sourceInterval: [6429, 6447] }, '༘', '༙'], + ['range', { sourceInterval: [6450, 6468] }, '༵', '༵'], + ['range', { sourceInterval: [6471, 6489] }, '༷', '༷'], + ['range', { sourceInterval: [6492, 6510] }, '༹', '༹'], + ['range', { sourceInterval: [6513, 6531] }, 'ཱ', 'ཾ'], + ['range', { sourceInterval: [6534, 6552] }, 'ྀ', '྄'], + ['range', { sourceInterval: [6555, 6573] }, '྆', '྇'], + ['range', { sourceInterval: [6576, 6594] }, 'ྐ', 'ྕ'], + ['range', { sourceInterval: [6597, 6615] }, 'ྗ', 'ྗ'], + ['range', { sourceInterval: [6618, 6636] }, 'ྙ', 'ྭ'], + ['range', { sourceInterval: [6639, 6657] }, 'ྱ', 'ྷ'], + ['range', { sourceInterval: [6660, 6678] }, 'ྐྵ', 'ྐྵ'], + ['range', { sourceInterval: [6681, 6699] }, '⃐', '⃜'], + ['range', { sourceInterval: [6702, 6720] }, '⃡', '⃡'], + ['range', { sourceInterval: [6723, 6741] }, '〪', '〯'], + ['range', { sourceInterval: [6744, 6762] }, '゙', '゚'], + ['range', { sourceInterval: [6765, 6783] }, 'ﬞ', 'ﬞ'], + ['range', { sourceInterval: [6786, 6804] }, '︠', '︣'], + ]], + unicodeConnectorPunctuation: ['define', { sourceInterval: [6810, 6944] }, null, [], [ 'alt', - { sourceInterval: [6695, 6799] }, - ['terminal', { sourceInterval: [6695, 6703] }, '_'], - ['range', { sourceInterval: [6706, 6724] }, '‿', '⁀'], - ['terminal', { sourceInterval: [6727, 6735] }, '・'], - ['range', { sourceInterval: [6738, 6756] }, '︳', '︴'], - ['range', { sourceInterval: [6759, 6777] }, '﹍', '﹏'], - ['terminal', { sourceInterval: [6780, 6788] }, '_'], - ['terminal', { sourceInterval: [6791, 6799] }, '・'], - ]], - unicodeSpaceSeparator: ['define', { sourceInterval: [6804, 6857] }, null, [], [ + { sourceInterval: [6840, 6944] }, + ['terminal', { sourceInterval: [6840, 6848] }, '_'], + ['range', { sourceInterval: [6851, 6869] }, '‿', '⁀'], + ['terminal', { sourceInterval: [6872, 6880] }, '・'], + ['range', { sourceInterval: [6883, 6901] }, '︳', '︴'], + ['range', { sourceInterval: [6904, 6922] }, '﹍', '﹏'], + ['terminal', { sourceInterval: [6925, 6933] }, '_'], + ['terminal', { sourceInterval: [6936, 6944] }, '・'], + ]], + unicodeSpaceSeparator: ['define', { sourceInterval: [6949, 7002] }, null, [], [ 'alt', - { sourceInterval: [6828, 6857] }, - ['range', { sourceInterval: [6828, 6846] }, ' ', '​'], - ['terminal', { sourceInterval: [6849, 6857] }, ' '], + { sourceInterval: [6973, 7002] }, + ['range', { sourceInterval: [6973, 6991] }, ' ', '​'], + ['terminal', { sourceInterval: [6994, 7002] }, ' '], ]], }, ]); diff --git a/drizzle-kit/imports-checker/index.ts b/drizzle-kit/imports-checker/index.ts index 7a4e908382..c25fa09c4a 100644 --- a/drizzle-kit/imports-checker/index.ts +++ b/drizzle-kit/imports-checker/index.ts @@ -1,5 +1,6 @@ import chalk from 'chalk'; -import { analyzeImports, ChainLink } from './checker'; +import type { ChainLink } from './checker'; +import { analyzeImports } from './checker'; const issues = analyzeImports({ basePath: './drizzle-kit', diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json index c51472d662..266f785ece 100644 --- a/drizzle-kit/package.json +++ b/drizzle-kit/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-kit", - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "homepage": "https://orm.drizzle.team", "keywords": [ "drizzle", @@ -35,31 +35,39 @@ "api": "tsx ./dev/api.ts", "migrate:old": "drizzle-kit generate:mysql", "cli": "tsx ./src/cli/index.ts", - "test": "pnpm tsc && TEST_CONFIG_PATH_PREFIX=./tests/cli/ vitest", - "build": "rm -rf ./dist && tsx build.ts && cp package.json dist/ && attw --pack dist", + "test": "TEST_CONFIG_PATH_PREFIX=./tests/cli/ vitest run", + "test:types": "pnpm tsc -p ./tsconfig.typetest.json", + "build": "rm -rf ./dist && tsx build.ts && cp package.json dist/", + "build:artifact": "pnpm run build", + "build:cli": "rm -rf ./dist && tsx build.cli.ts && cp package.json dist/", "build:dev": "rm -rf ./dist && tsx build.dev.ts && tsc -p tsconfig.cli-types.json && chmod +x ./dist/index.cjs", + "build:ext": "rm -rf ./dist && vitest run bin.test && vitest run ./tests/postgres/ && vitest run ./tests/sqlite && vitest run ./tests/mysql && tsx build.ext.ts", "pack": "cp package.json README.md dist/ && (cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", - "tsc": "tsc -p tsconfig.build.json --noEmit", - "publish": "npm publish package.tgz" + "pack:artifact": "pnpm run pack", + "publish": "npm publish package.tgz", + "test:postgres": "vitest run ./postgres/", + "test:other": "vitest run ./mysql/ ./sqlite/ ./other", + "test:cockroach": "vitest run ./cockroach", + "test:mssql": "vitest run ./mssql", + "test:gel": "vitest run ./gel", + "test:singlestore": "vitest run ./singlestore" }, "dependencies": { - "@drizzle-team/brocli": "^0.10.2", - "@esbuild-kit/esm-loader": "^2.5.5", - "esbuild": "^0.25.4", - "esbuild-register": "^3.5.0" + "@drizzle-team/brocli": "^0.11.0", + "@js-temporal/polyfill": "^0.5.1", + "esbuild": "^0.25.10", + "esbuild-register": "^3.6.0" }, "devDependencies": { - "@arethetypeswrong/cli": "^0.15.3", "@aws-sdk/client-rds-data": "^3.556.0", "@cloudflare/workers-types": "^4.20230518.0", "@electric-sql/pglite": "^0.2.12", - "@hono/bun-compress": "^0.1.0", "@hono/node-server": "^1.9.0", "@hono/zod-validator": "^0.2.1", "@libsql/client": "^0.10.0", - "@neondatabase/serverless": "^0.9.1", + "@neondatabase/serverless": "^1.0.2", "@originjs/vite-plugin-commonjs": "^1.0.3", - "@planetscale/database": "^1.16.0", + "@planetscale/database": "^1.19.0", "@sqlitecloud/drivers": "^1.0.653", "@tursodatabase/database": "0.2.1", "@types/better-sqlite3": "^7.6.13", @@ -69,14 +77,13 @@ "@types/json-diff": "^1.0.3", "@types/micromatch": "^4.0.9", "@types/minimatch": "^5.1.2", - "@types/node": "^18.11.15", + "@types/mssql": "^9.1.4", + "@types/node": "^24.7.2", "@types/pg": "^8.10.7", "@types/pluralize": "^0.0.33", "@types/semver": "^7.5.5", "@types/uuid": "^9.0.8", "@types/ws": "^8.5.10", - "@typescript-eslint/eslint-plugin": "^7.2.0", - "@typescript-eslint/parser": "^7.2.0", "@vercel/postgres": "^0.8.0", "ava": "^5.1.0", "better-sqlite3": "^11.9.1", @@ -86,13 +93,10 @@ "commander": "^12.1.0", "dockerode": "^4.0.6", "dotenv": "^16.0.3", - "drizzle-kit": "0.25.0-b1faa33", + "drizzle-kit": "^0.31.6", "drizzle-orm": "workspace:./drizzle-orm/dist", "env-paths": "^3.0.0", "esbuild-node-externals": "^1.9.0", - "eslint": "^8.57.0", - "eslint-config-prettier": "^9.1.0", - "eslint-plugin-prettier": "^5.1.3", "gel": "^2.0.0", "get-port": "^6.1.2", "glob": "^8.1.0", @@ -101,21 +105,20 @@ "json-diff": "1.0.6", "micromatch": "^4.0.8", "minimatch": "^7.4.3", + "mssql": "^12.0.0", "mysql2": "3.14.1", "node-fetch": "^3.3.2", "ohm-js": "^17.1.0", + "orm044": "npm:drizzle-orm@0.44.1", "pg": "^8.11.5", "pluralize": "^8.0.0", "postgres": "^3.4.4", "prettier": "^3.5.3", "semver": "^7.7.2", - "superjson": "^2.2.1", "tsup": "^8.3.5", - "tsx": "^3.12.1", - "typescript": "^5.9.2", + "tsx": "^4.20.6", + "typescript": "^5.9.3", "uuid": "^9.0.1", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "ws": "^8.18.2", "zod": "^3.20.2", "zx": "^8.3.2" @@ -133,17 +136,17 @@ "types": "./index.d.mts", "default": "./index.mjs" }, - "./api": { + "./api-postgres": { "import": { - "types": "./api.d.mts", - "default": "./api.mjs" + "types": "./api-postgres.d.mts", + "default": "./api-postgres.mjs" }, "require": { - "types": "./api.d.ts", - "default": "./api.js" + "types": "./api-postgres.d.ts", + "default": "./api-postgres.js" }, - "types": "./api.d.mts", - "default": "./api.mjs" + "types": "./api-postgres.d.mts", + "default": "./api-postgres.mjs" } } } diff --git a/drizzle-kit/src/@types/utils.ts b/drizzle-kit/src/@types/utils.ts index e71d45b896..4210b44801 100644 --- a/drizzle-kit/src/@types/utils.ts +++ b/drizzle-kit/src/@types/utils.ts @@ -5,7 +5,6 @@ declare global { capitalise(): string; camelCase(): string; snake_case(): string; - concatIf(it: string, condition: boolean): string; } diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts deleted file mode 100644 index 8fed70796f..0000000000 --- a/drizzle-kit/src/api.ts +++ /dev/null @@ -1,664 +0,0 @@ -import type { PGlite } from '@electric-sql/pglite'; -import { randomUUID } from 'crypto'; -import { is } from 'drizzle-orm'; -import { Relations } from 'drizzle-orm/_relations'; -import { LibSQLDatabase } from 'drizzle-orm/libsql'; -import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { AnyPgTable, getTableConfig as pgTableConfig, PgDatabase, PgTable } from 'drizzle-orm/pg-core'; -import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; -import { - AnySingleStoreTable, - getTableConfig as singlestoreTableConfig, - SingleStoreTable, -} from 'drizzle-orm/singlestore-core'; -import { AnySQLiteTable, SQLiteTable } from 'drizzle-orm/sqlite-core'; -import { - columnsResolver, - enumsResolver, - indPolicyResolver, - mySqlViewsResolver, - policyResolver, - roleResolver, - schemasResolver, - sequencesResolver, - sqliteViewsResolver, - tablesResolver, - viewsResolver, -} from './cli/commands/migrate'; -import { pgPushIntrospect } from './cli/commands/pgIntrospect'; -import { pgSuggestions } from './cli/commands/pgPushUtils'; -import { updateUpToV6 as upPgV6, updateUpToV7 as upPgV7 } from './cli/commands/pgUp'; -import { sqlitePushIntrospect } from './cli/commands/sqliteIntrospect'; -import { logSuggestionsAndReturn } from './cli/commands/sqlitePushUtils'; -import type { CasingType } from './cli/validations/common'; -import type { MysqlCredentials } from './cli/validations/mysql'; -import type { PostgresCredentials } from './cli/validations/postgres'; -import type { SingleStoreCredentials } from './cli/validations/singlestore'; -import type { SqliteCredentials } from './cli/validations/sqlite'; -import { getTablesFilterByExtensions } from './extensions/getTablesFilterByExtensions'; -import { originUUID } from './global'; -import type { Config } from './index'; -import { MySqlSchema as MySQLSchemaKit, mysqlSchema, squashMysqlScheme } from './serializer/mysqlSchema'; -import { generateMySqlSnapshot } from './serializer/mysqlSerializer'; -import { prepareFromExports } from './serializer/pgImports'; -import { PgSchema as PgSchemaKit, pgSchema, squashPgScheme } from './serializer/pgSchema'; -import { generatePgSnapshot } from './serializer/pgSerializer'; -import { - SingleStoreSchema as SingleStoreSchemaKit, - singlestoreSchema, - squashSingleStoreScheme, -} from './serializer/singlestoreSchema'; -import { generateSingleStoreSnapshot } from './serializer/singlestoreSerializer'; -import { SQLiteSchema as SQLiteSchemaKit, sqliteSchema, squashSqliteScheme } from './serializer/sqliteSchema'; -import { generateSqliteSnapshot } from './serializer/sqliteSerializer'; -import type { Setup } from './serializer/studio'; -import type { DB, SQLiteDB } from './utils'; -import { certs } from './utils/certs'; -export type DrizzleSnapshotJSON = PgSchemaKit; -export type DrizzleSQLiteSnapshotJSON = SQLiteSchemaKit; -export type DrizzleMySQLSnapshotJSON = MySQLSchemaKit; -export type DrizzleSingleStoreSnapshotJSON = SingleStoreSchemaKit; - -export const generateDrizzleJson = ( - imports: Record, - prevId?: string, - schemaFilters?: string[], - casing?: CasingType, -): PgSchemaKit => { - const prepared = prepareFromExports(imports); - - const id = randomUUID(); - - const snapshot = generatePgSnapshot( - prepared.tables, - prepared.enums, - prepared.schemas, - prepared.sequences, - prepared.roles, - prepared.policies, - prepared.views, - prepared.matViews, - casing, - schemaFilters, - ); - - return { - ...snapshot, - id, - prevId: prevId ?? originUUID, - }; -}; - -export const generateMigration = async ( - prev: DrizzleSnapshotJSON, - cur: DrizzleSnapshotJSON, -) => { - const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); - - const validatedPrev = pgSchema.parse(prev); - const validatedCur = pgSchema.parse(cur); - - const squashedPrev = squashPgScheme(validatedPrev); - const squashedCur = squashPgScheme(validatedCur); - - const { sqlStatements, _meta } = await applyPgSnapshotsDiff( - squashedPrev, - squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - ); - - return sqlStatements; -}; - -export const pushSchema = async ( - imports: Record, - drizzleInstance: PgDatabase, - schemaFilters?: string[], - tablesFilter?: string[], - extensionsFilters?: Config['extensionsFilters'], -) => { - const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); - const { sql } = await import('drizzle-orm'); - const filters = (tablesFilter ?? []).concat( - getTablesFilterByExtensions({ extensionsFilters, dialect: 'postgresql' }), - ); - - const db: DB = { - query: async (query: string, params?: any[]) => { - const res = await drizzleInstance.execute(sql.raw(query)); - return res.rows; - }, - }; - - const cur = generateDrizzleJson(imports); - const { schema: prev } = await pgPushIntrospect( - db, - filters, - schemaFilters ?? ['public'], - undefined, - ); - - const validatedPrev = pgSchema.parse(prev); - const validatedCur = pgSchema.parse(cur); - - const squashedPrev = squashPgScheme(validatedPrev, 'push'); - const squashedCur = squashPgScheme(validatedCur, 'push'); - - const { statements } = await applyPgSnapshotsDiff( - squashedPrev, - squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - const { shouldAskForApprove, statementsToExecute, infoToPrint } = await pgSuggestions(db, statements); - - return { - hasDataLoss: shouldAskForApprove, - warnings: infoToPrint, - statementsToExecute, - apply: async () => { - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - }, - }; -}; - -export const startStudioPostgresServer = async ( - imports: Record, - credentials: PostgresCredentials | { - driver: 'pglite'; - client: PGlite; - }, - options?: { - host?: string; - port?: number; - casing?: CasingType; - }, -) => { - const { drizzleForPostgres } = await import('./serializer/studio'); - - const pgSchema: Record> = {}; - const relations: Record = {}; - - Object.entries(imports).forEach(([k, t]) => { - if (is(t, PgTable)) { - const schema = pgTableConfig(t).schema || 'public'; - pgSchema[schema] = pgSchema[schema] || {}; - pgSchema[schema][k] = t; - } - - if (is(t, Relations)) { - relations[k] = t; - } - }); - - const setup = await drizzleForPostgres(credentials, pgSchema, relations, [], options?.casing); - await startServerFromSetup(setup, options); -}; - -// SQLite - -export const generateSQLiteDrizzleJson = async ( - imports: Record, - prevId?: string, - casing?: CasingType, -): Promise => { - const { prepareFromExports } = await import('./serializer/sqliteImports'); - - const prepared = prepareFromExports(imports); - - const id = randomUUID(); - - const snapshot = generateSqliteSnapshot(prepared.tables, prepared.views, casing); - - return { - ...snapshot, - id, - prevId: prevId ?? originUUID, - }; -}; - -export const generateSQLiteMigration = async ( - prev: DrizzleSQLiteSnapshotJSON, - cur: DrizzleSQLiteSnapshotJSON, -) => { - const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev); - const squashedCur = squashSqliteScheme(validatedCur); - - const { sqlStatements } = await applySqliteSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - ); - - return sqlStatements; -}; - -export const pushSQLiteSchema = async ( - imports: Record, - drizzleInstance: LibSQLDatabase, -) => { - const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); - const { sql } = await import('drizzle-orm'); - - const db: SQLiteDB = { - query: async (query: string, params?: any[]) => { - const res = drizzleInstance.all(sql.raw(query)); - return res; - }, - run: async (query: string) => { - return Promise.resolve(drizzleInstance.run(sql.raw(query))).then( - () => {}, - ); - }, - }; - - const cur = await generateSQLiteDrizzleJson(imports); - const { schema: prev } = await sqlitePushIntrospect(db, []); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); - const squashedCur = squashSqliteScheme(validatedCur, 'push'); - - const { statements, _meta } = await applySqliteSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( - db, - statements, - squashedPrev, - squashedCur, - _meta!, - ); - - return { - hasDataLoss: shouldAskForApprove, - warnings: infoToPrint, - statementsToExecute, - apply: async () => { - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - }, - }; -}; - -export const startStudioSQLiteServer = async ( - imports: Record, - credentials: SqliteCredentials, - options?: { - host?: string; - port?: number; - casing?: CasingType; - }, -) => { - const { drizzleForSQLite } = await import('./serializer/studio'); - - const sqliteSchema: Record> = {}; - const relations: Record = {}; - - Object.entries(imports).forEach(([k, t]) => { - if (is(t, SQLiteTable)) { - const schema = 'public'; // sqlite does not have schemas - sqliteSchema[schema] = sqliteSchema[schema] || {}; - sqliteSchema[schema][k] = t; - } - - if (is(t, Relations)) { - relations[k] = t; - } - }); - - const setup = await drizzleForSQLite(credentials, sqliteSchema, relations, [], options?.casing); - await startServerFromSetup(setup, options); -}; - -// MySQL - -export const generateMySQLDrizzleJson = async ( - imports: Record, - prevId?: string, - casing?: CasingType, -): Promise => { - const { prepareFromExports } = await import('./serializer/mysqlImports'); - - const prepared = prepareFromExports(imports); - - const id = randomUUID(); - - const snapshot = generateMySqlSnapshot(prepared.tables, prepared.views, casing); - - return { - ...snapshot, - id, - prevId: prevId ?? originUUID, - }; -}; - -export const generateMySQLMigration = async ( - prev: DrizzleMySQLSnapshotJSON, - cur: DrizzleMySQLSnapshotJSON, -) => { - const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); - - const validatedPrev = mysqlSchema.parse(prev); - const validatedCur = mysqlSchema.parse(cur); - - const squashedPrev = squashMysqlScheme(validatedPrev); - const squashedCur = squashMysqlScheme(validatedCur); - - const { sqlStatements } = await applyMysqlSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - ); - - return sqlStatements; -}; - -export const pushMySQLSchema = async ( - imports: Record, - drizzleInstance: MySql2Database, - databaseName: string, -) => { - const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); - const { logSuggestionsAndReturn } = await import( - './cli/commands/mysqlPushUtils' - ); - const { mysqlPushIntrospect } = await import( - './cli/commands/mysqlIntrospect' - ); - const { sql } = await import('drizzle-orm'); - - const db: DB = { - query: async (query: string, params?: any[]) => { - const res = await drizzleInstance.execute(sql.raw(query)); - return res[0] as unknown as any[]; - }, - }; - const cur = await generateMySQLDrizzleJson(imports); - const { schema: prev } = await mysqlPushIntrospect(db, databaseName, []); - - const validatedPrev = mysqlSchema.parse(prev); - const validatedCur = mysqlSchema.parse(cur); - - const squashedPrev = squashMysqlScheme(validatedPrev); - const squashedCur = squashMysqlScheme(validatedCur); - - const { statements } = await applyMysqlSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( - db, - statements, - validatedCur, - ); - - return { - hasDataLoss: shouldAskForApprove, - warnings: infoToPrint, - statementsToExecute, - apply: async () => { - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - }, - }; -}; - -export const startStudioMySQLServer = async ( - imports: Record, - credentials: MysqlCredentials, - options?: { - host?: string; - port?: number; - casing?: CasingType; - }, -) => { - const { drizzleForMySQL } = await import('./serializer/studio'); - - const mysqlSchema: Record> = {}; - const relations: Record = {}; - - Object.entries(imports).forEach(([k, t]) => { - if (is(t, MySqlTable)) { - const schema = mysqlTableConfig(t).schema || 'public'; - mysqlSchema[schema] = mysqlSchema[schema] || {}; - mysqlSchema[schema][k] = t; - } - - if (is(t, Relations)) { - relations[k] = t; - } - }); - - const setup = await drizzleForMySQL(credentials, mysqlSchema, relations, [], options?.casing); - await startServerFromSetup(setup, options); -}; - -// SingleStore - -export const generateSingleStoreDrizzleJson = async ( - imports: Record, - prevId?: string, - casing?: CasingType, -): Promise => { - const { prepareFromExports } = await import('./serializer/singlestoreImports'); - - const prepared = prepareFromExports(imports); - - const id = randomUUID(); - - const snapshot = generateSingleStoreSnapshot(prepared.tables, /* prepared.views, */ casing); - - return { - ...snapshot, - id, - prevId: prevId ?? originUUID, - }; -}; - -export const generateSingleStoreMigration = async ( - prev: DrizzleSingleStoreSnapshotJSON, - cur: DrizzleSingleStoreSnapshotJSON, -) => { - const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); - - const validatedPrev = singlestoreSchema.parse(prev); - const validatedCur = singlestoreSchema.parse(cur); - - const squashedPrev = squashSingleStoreScheme(validatedPrev); - const squashedCur = squashSingleStoreScheme(validatedCur); - - const { sqlStatements } = await applySingleStoreSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - 'push', - ); - - return sqlStatements; -}; - -export const pushSingleStoreSchema = async ( - imports: Record, - drizzleInstance: SingleStoreDriverDatabase, - databaseName: string, -) => { - const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); - const { logSuggestionsAndReturn } = await import( - './cli/commands/singlestorePushUtils' - ); - const { singlestorePushIntrospect } = await import( - './cli/commands/singlestoreIntrospect' - ); - const { sql } = await import('drizzle-orm'); - - const db: DB = { - query: async (query: string) => { - const res = await drizzleInstance.execute(sql.raw(query)); - return res[0] as unknown as any[]; - }, - }; - const cur = await generateSingleStoreDrizzleJson(imports); - const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []); - - const validatedPrev = singlestoreSchema.parse(prev); - const validatedCur = singlestoreSchema.parse(cur); - - const squashedPrev = squashSingleStoreScheme(validatedPrev); - const squashedCur = squashSingleStoreScheme(validatedCur); - - const { statements } = await applySingleStoreSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - 'push', - ); - - const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( - db, - statements, - validatedCur, - validatedPrev, - ); - - return { - hasDataLoss: shouldAskForApprove, - warnings: infoToPrint, - statementsToExecute, - apply: async () => { - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - }, - }; -}; - -export const startStudioSingleStoreServer = async ( - imports: Record, - credentials: SingleStoreCredentials, - options?: { - host?: string; - port?: number; - casing?: CasingType; - }, -) => { - const { drizzleForSingleStore } = await import('./serializer/studio'); - - const singleStoreSchema: Record> = {}; - const relations: Record = {}; - - Object.entries(imports).forEach(([k, t]) => { - if (is(t, SingleStoreTable)) { - const schema = singlestoreTableConfig(t).schema || 'public'; - singleStoreSchema[schema] = singleStoreSchema[schema] || {}; - singleStoreSchema[schema][k] = t; - } - - if (is(t, Relations)) { - relations[k] = t; - } - }); - - const setup = await drizzleForSingleStore(credentials, singleStoreSchema, relations, [], options?.casing); - await startServerFromSetup(setup, options); -}; - -const startServerFromSetup = async (setup: Setup, options?: { - host?: string; - port?: number; -}) => { - const { prepareServer } = await import('./serializer/studio'); - - const server = await prepareServer(setup); - - const host = options?.host || '127.0.0.1'; - const port = options?.port || 4983; - const { key, cert } = (await certs()) || {}; - server.start({ - host, - port, - key, - cert, - cb: (err) => { - if (err) { - console.error(err); - } else { - console.log(`Studio is running at ${key ? 'https' : 'http'}://${host}:${port}`); - } - }, - }); -}; - -export const upPgSnapshot = (snapshot: Record) => { - if (snapshot.version === '5') { - return upPgV7(upPgV6(snapshot)); - } - if (snapshot.version === '6') { - return upPgV7(snapshot); - } - return snapshot; -}; diff --git a/drizzle-kit/src/cli/commands/check.ts b/drizzle-kit/src/cli/commands/check.ts index 092057372d..5e6464ddc2 100644 --- a/drizzle-kit/src/cli/commands/check.ts +++ b/drizzle-kit/src/cli/commands/check.ts @@ -1,52 +1,52 @@ -import { Dialect } from '../../schemaValidator'; -import { prepareOutFolder, validateWithReport } from '../../utils'; - -export const checkHandler = (out: string, dialect: Dialect) => { - const { snapshots } = prepareOutFolder(out, dialect); - const report = validateWithReport(snapshots, dialect); - - if (report.nonLatest.length > 0) { - console.log( - report.nonLatest - .map((it) => { - return `${it} is not of the latest version, please run "drizzle-kit up"`; - }) - .join('\n'), - ); - process.exit(1); - } - - if (report.malformed.length) { - const message = report.malformed - .map((it) => { - return `${it} data is malformed`; - }) - .join('\n'); - console.log(message); - } - - const collisionEntries = Object.entries(report.idsMap).filter( - (it) => it[1].snapshots.length > 1, - ); - - const message = collisionEntries - .map((it) => { - const data = it[1]; - return `[${ - data.snapshots.join( - ', ', - ) - }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; - }) - .join('\n'); - - if (message) { - console.log(message); +import { readFileSync } from 'fs'; +import type { Dialect } from '../../utils/schemaValidator'; +import { prepareOutFolder, validatorForDialect } from '../../utils/utils-node'; +import { info } from '../views'; + +export const checkHandler = async (out: string, dialect: Dialect) => { + const { snapshots } = prepareOutFolder(out); + const validator = validatorForDialect(dialect); + + // const snapshotsData: PostgresSnapshot[] = []; + + for (const snapshot of snapshots) { + const raw = JSON.parse(readFileSync(`./${snapshot}`).toString()); + + // snapshotsData.push(raw); + + const res = validator(raw); + if (res.status === 'unsupported') { + console.log( + info( + `${snapshot} snapshot is of unsupported version, please update drizzle-kit`, + ), + ); + process.exit(0); + } + if (res.status === 'malformed') { + // more explanation + console.log(`${snapshot} data is malformed`); + process.exit(1); + } + + if (res.status === 'nonLatest') { + console.log(`${snapshot} is not of the latest version, please run "drizzle-kit up"`); + process.exit(1); + } } - const abort = report.malformed.length!! || collisionEntries.length > 0; - - if (abort) { - process.exit(1); - } + // try { + // const response = await detectNonCommutative(snapshots, dialect); + // if (response!.conflicts.length > 0) { + // console.log('\nNon-commutative migration branches detected:'); + // for (const c of response!.conflicts) { + // console.log(`- Parent ${c.parentId}${c.parentPath ? ` (${c.parentPath})` : ''}`); + // console.log(` A: ${c.branchA.headId} (${c.branchA.path})`); + // console.log(` B: ${c.branchB.headId} (${c.branchB.path})`); + // // for (const r of c.reasons) console.log(` • ${r}`); + // } + // } + // } catch (e) { + // console.error(e); + // } }; diff --git a/drizzle-kit/src/cli/commands/drop.ts b/drizzle-kit/src/cli/commands/drop.ts deleted file mode 100644 index 183e9459d2..0000000000 --- a/drizzle-kit/src/cli/commands/drop.ts +++ /dev/null @@ -1,60 +0,0 @@ -import chalk from 'chalk'; -import { readFileSync, rmSync, writeFileSync } from 'fs'; -import fs from 'fs'; -import { render } from 'hanji'; -import { join } from 'path'; -import { Journal } from '../../utils'; -import { DropMigrationView } from '../views'; -import { embeddedMigrations } from './migrate'; - -export const dropMigration = async ({ - out, - bundle, -}: { - out: string; - bundle: boolean; -}) => { - const metaFilePath = join(out, 'meta', '_journal.json'); - const journal = JSON.parse(readFileSync(metaFilePath, 'utf-8')) as Journal; - - if (journal.entries.length === 0) { - console.log( - `[${chalk.blue('i')}] no migration entries found in ${metaFilePath}`, - ); - return; - } - - const result = await render(new DropMigrationView(journal.entries)); - if (result.status === 'aborted') return; - - delete journal.entries[journal.entries.indexOf(result.data!)]; - - const resultJournal: Journal = { - ...journal, - entries: journal.entries.filter(Boolean), - }; - const sqlFilePath = join(out, `${result.data.tag}.sql`); - const snapshotFilePath = join( - out, - 'meta', - `${result.data.tag.split('_')[0]}_snapshot.json`, - ); - rmSync(sqlFilePath); - rmSync(snapshotFilePath); - writeFileSync(metaFilePath, JSON.stringify(resultJournal, null, 2)); - - if (bundle) { - fs.writeFileSync( - join(out, `migrations.js`), - embeddedMigrations(resultJournal), - ); - } - - console.log( - `[${chalk.green('✓')}] ${ - chalk.bold( - result.data.tag, - ) - } migration successfully dropped`, - ); -}; diff --git a/drizzle-kit/src/cli/commands/generate-cockroach.ts b/drizzle-kit/src/cli/commands/generate-cockroach.ts new file mode 100644 index 0000000000..645fb49e8b --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-cockroach.ts @@ -0,0 +1,82 @@ +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/cockroach/drizzle'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import type { + CheckConstraint, + CockroachEntities, + Column, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Schema, + Sequence, + View, +} from '../../dialects/cockroach/ddl'; +import { createDDL, interimToDDL } from '../../dialects/cockroach/ddl'; +import { ddlDiff, ddlDiffDry } from '../../dialects/cockroach/diff'; +import { prepareSnapshot } from '../../dialects/cockroach/serializer'; +import { resolver } from '../prompts'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const { out: outFolder, schema: schemaPath, casing } = config; + + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSnapshot(snapshots, schemaPath, casing); + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + + const { sqlStatements, renames } = await ddlDiff( + ddlPrev, + ddlCur, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'default', + ); + + writeResult({ + snapshot: snapshot, + sqlStatements, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + renames, + snapshots, + }); +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + + // TODO: do we wanna respect entity filter while exporting to sql? + // cc: @AleksandrSherman + const { schema } = fromDrizzleSchema(res, config.casing, () => true); + const { ddl } = interimToDDL(schema); + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl, 'default'); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/generate-common.ts b/drizzle-kit/src/cli/commands/generate-common.ts new file mode 100644 index 0000000000..0054a29c50 --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-common.ts @@ -0,0 +1,133 @@ +import chalk from 'chalk'; +import fs from 'fs'; +import { render } from 'hanji'; +import path, { join } from 'path'; +import type { CockroachSnapshot } from 'src/dialects/cockroach/snapshot'; +import type { MssqlSnapshot } from 'src/dialects/mssql/snapshot'; +import type { PostgresSnapshot } from 'src/dialects/postgres/snapshot'; +import type { SingleStoreSnapshot } from 'src/dialects/singlestore/snapshot'; +import type { MysqlSnapshot } from '../../dialects/mysql/snapshot'; +import type { SqliteSnapshot } from '../../dialects/sqlite/snapshot'; +import { BREAKPOINT } from '../../utils'; +import { prepareMigrationMetadata } from '../../utils/words'; +import type { Driver, Prefix } from '../validations/common'; + +export const writeResult = (config: { + snapshot: SqliteSnapshot | PostgresSnapshot | MysqlSnapshot | MssqlSnapshot | CockroachSnapshot | SingleStoreSnapshot; + sqlStatements: string[]; + outFolder: string; + breakpoints: boolean; + prefixMode: Prefix; + name?: string; + bundle?: boolean; + type?: 'introspect' | 'custom' | 'none'; + driver?: Driver; + renames: string[]; + snapshots: string[]; +}) => { + const { + snapshot, + sqlStatements, + outFolder, + breakpoints, + name, + renames, + bundle = false, + type = 'none', + driver, + snapshots, + } = config; + + if (type === 'none') { + if (sqlStatements.length === 0) { + console.log('No schema changes, nothing to migrate 😴'); + return; + } + } + + const { tag } = prepareMigrationMetadata(name); + + snapshot.renames = renames; + + fs.mkdirSync(join(outFolder, tag)); + fs.writeFileSync( + join(outFolder, `${tag}/snapshot.json`), + JSON.stringify(JSON.parse(JSON.stringify(snapshot)), null, 2), + ); + + const sqlDelimiter = breakpoints ? BREAKPOINT : '\n'; + let sql = sqlStatements.join(sqlDelimiter); + + if (type === 'introspect') { + sql = + `-- Current sql file was generated after introspecting the database\n-- If you want to run this migration please uncomment this code before executing migrations\n/*\n${sql}\n*/`; + } + + if (type === 'custom') { + console.log('Prepared empty file for your custom SQL migration!'); + sql = '-- Custom SQL migration file, put your code below! --'; + } + + fs.writeFileSync(join(outFolder, `${tag}/migration.sql`), sql); + + // js file with .sql imports for React Native / Expo and Durable Sqlite Objects + if (bundle) { + // adding new migration to the list of all migrations + const js = embeddedMigrations([...snapshots || [], join(outFolder, `${tag}/snapshot.json`)], driver); + fs.writeFileSync(`${outFolder}/migrations.js`, js); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your SQL migration ➜ ${ + chalk.bold.underline.blue( + path.join(`${outFolder}/${tag}`), + ) + } 🚀`, + ); +}; + +export const embeddedMigrations = (snapshots: string[], driver?: Driver) => { + let content = driver === 'expo' + ? '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n' + : ''; + + const migrations: Record = {}; + + snapshots.forEach((entry, idx) => { + const prefix = entry.split('/')[entry.split('/').length - 2]; + const importName = idx.toString().padStart(4, '0'); + content += `import m${importName} from './${prefix}/migration.sql';\n`; + migrations[prefix] = importName; + }); + + content += ` + export default { + migrations: { + ${Object.entries(migrations).map(([key, query]) => `"${key}": m${query}`).join(',\n')} +} + } + `; + + return content; +}; + +export const prepareSnapshotFolderName = (ms?: number) => { + const now = ms ? new Date(ms) : new Date(); + return `${now.getFullYear()}${two(now.getUTCMonth() + 1)}${ + two( + now.getUTCDate(), + ) + }${two(now.getUTCHours())}${two(now.getUTCMinutes())}${ + two( + now.getUTCSeconds(), + ) + }`; +}; + +const two = (input: number): string => { + return input.toString().padStart(2, '0'); +}; diff --git a/drizzle-kit/src/cli/commands/generate-libsql.ts b/drizzle-kit/src/cli/commands/generate-libsql.ts new file mode 100644 index 0000000000..0b3f7c9bab --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-libsql.ts @@ -0,0 +1 @@ +export { handle, handleExport } from './generate-sqlite'; diff --git a/drizzle-kit/src/cli/commands/generate-mssql.ts b/drizzle-kit/src/cli/commands/generate-mssql.ts new file mode 100644 index 0000000000..dddf9b0611 --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-mssql.ts @@ -0,0 +1,112 @@ +import chalk from 'chalk'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/mssql/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/mssql/drizzle'; +import { prepareSnapshot } from 'src/dialects/mssql/serializer'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import { createDDL, type DefaultConstraint, interimToDDL } from '../../dialects/mssql/ddl'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + MssqlEntities, + PrimaryKey, + Schema, + UniqueConstraint, + View, +} from '../../dialects/mssql/ddl'; +import { resolver } from '../prompts'; +import { withStyle } from '../validations/outputs'; +import { mssqlSchemaError } from '../views'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const { out: outFolder, schema: schemaPath, casing } = config; + + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSnapshot(snapshots, schemaPath, casing); + + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + + const { sqlStatements, renames, statements } = await ddlDiff( + ddlPrev, + ddlCur, + resolver('schema', 'dbo'), + resolver('table', 'dbo'), + resolver('column', 'dbo'), + resolver('view', 'dbo'), + resolver('unique', 'dbo'), // uniques + resolver('index', 'dbo'), // indexes + resolver('check', 'dbo'), // checks + resolver('primary key', 'dbo'), // pks + resolver('foreign key', 'dbo'), // fks + resolver('default', 'dbo'), // fks + 'default', + ); + + const recreateIdentity = statements.find((it) => it.type === 'recreate_identity_column'); + if ( + recreateIdentity && Boolean(recreateIdentity.column.identity?.to) + && !recreateIdentity.column.identity?.from + ) { + console.log( + withStyle.warning( + chalk.red.bold('You are about to add an identity property to an existing column.') + + '\n' + + chalk.red( + 'This operation may result in data loss as the column must be recreated. Identity columns cannot be added to existing ones and do not permit manual value insertion.', + ) + + '\n' + + chalk.red('All existing data in the column will be overwritten with new identity values'), + ), + ); + } + + writeResult({ + snapshot: snapshot, + sqlStatements, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + renames, + snapshots, + }); +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + + // TODO: do we want to respect config filter here? + // cc: @AleksandrSherman + const { schema, errors } = fromDrizzleSchema(res, config.casing, () => true); + + if (errors.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const { ddl, errors: errors2 } = interimToDDL(schema); + if (errors2.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl, 'default'); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/generate-mysql.ts b/drizzle-kit/src/cli/commands/generate-mysql.ts new file mode 100644 index 0000000000..53d7a369fd --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-mysql.ts @@ -0,0 +1,65 @@ +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/mysql/drizzle'; +import { prepareSnapshot } from 'src/dialects/mysql/serializer'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import { type Column, createDDL, interimToDDL, type Table, type View } from '../../dialects/mysql/ddl'; +import { ddlDiff, ddlDiffDry } from '../../dialects/mysql/diff'; +import { resolver } from '../prompts'; +import { explain } from '../views'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + const casing = config.casing; + + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSnapshot(snapshots, schemaPath, casing); + + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + + const { sqlStatements, renames, groupedStatements } = await ddlDiff( + ddlPrev, + ddlCur, + resolver('table'), + resolver('column'), + resolver('view'), + 'default', + ); + + const explainMessage = explain('mysql', groupedStatements, false, []); + if (explainMessage) console.log(explainMessage); + + writeResult({ + snapshot, + sqlStatements, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + renames, + snapshots, + }); +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + const schema = fromDrizzleSchema(res.tables, res.views, config.casing); + const { ddl } = interimToDDL(schema); + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl, 'default'); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/generate-postgres.ts b/drizzle-kit/src/cli/commands/generate-postgres.ts new file mode 100644 index 0000000000..b66ba5a6cd --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-postgres.ts @@ -0,0 +1,91 @@ +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/postgres/drizzle'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, +} from '../../dialects/postgres/ddl'; +import { createDDL, interimToDDL } from '../../dialects/postgres/ddl'; +import { ddlDiff, ddlDiffDry } from '../../dialects/postgres/diff'; +import { prepareSnapshot } from '../../dialects/postgres/serializer'; +import { resolver } from '../prompts'; +import { explain } from '../views'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const { out: outFolder, schema: schemaPath, casing } = config; + + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSnapshot(snapshots, schemaPath, casing); + + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + + const { sqlStatements, renames, groupedStatements } = await ddlDiff( + ddlPrev, + ddlCur, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('role'), + resolver('privilege'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'default', + ); + + const explainMessage = explain('mysql', groupedStatements, false, []); + if (explainMessage) console.log(explainMessage); + + writeResult({ + snapshot: snapshot, + sqlStatements, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + renames, + snapshots, + }); +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + // TODO: do we wan't to export everything or ignore .existing and respect entity filters in config + const { schema } = fromDrizzleSchema(res, config.casing, () => true); + const { ddl } = interimToDDL(schema); + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl, 'default'); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/generate-singlestore.ts b/drizzle-kit/src/cli/commands/generate-singlestore.ts new file mode 100644 index 0000000000..c840758cfd --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-singlestore.ts @@ -0,0 +1,62 @@ +import type { Column, Table, View } from 'src/dialects/mysql/ddl'; +import { createDDL, interimToDDL } from 'src/dialects/mysql/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/singlestore/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/singlestore/drizzle'; +import { prepareSnapshot } from 'src/dialects/singlestore/serializer'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import { resolver } from '../prompts'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + const casing = config.casing; + + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSnapshot(snapshots, schemaPath, casing); + + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + + const { sqlStatements, renames } = await ddlDiff( + ddlPrev, + ddlCur, + resolver
('table'), + resolver('column'), + resolver('view'), + 'default', + ); + + writeResult({ + snapshot, + sqlStatements, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + prefixMode: config.prefix, + renames, + snapshots, + }); +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + const schema = fromDrizzleSchema(res.tables, config.casing); + const { ddl } = interimToDDL(schema); + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/generate-sqlite.ts b/drizzle-kit/src/cli/commands/generate-sqlite.ts new file mode 100644 index 0000000000..e789d8846e --- /dev/null +++ b/drizzle-kit/src/cli/commands/generate-sqlite.ts @@ -0,0 +1,80 @@ +import { ddlDiff, ddlDiffDry } from 'src/dialects/sqlite/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/sqlite/drizzle'; +import { prepareFilenames, prepareOutFolder } from 'src/utils/utils-node'; +import { type Column, createDDL, interimToDDL, type SqliteEntities } from '../../dialects/sqlite/ddl'; +import { prepareSqliteSnapshot } from '../../dialects/sqlite/serializer'; +import { resolver } from '../prompts'; +import { warning } from '../views'; +import { writeResult } from './generate-common'; +import type { ExportConfig, GenerateConfig } from './utils'; + +export const handle = async (config: GenerateConfig) => { + const outFolder = config.out; + const schemaPath = config.schema; + const casing = config.casing; + + try { + const { snapshots } = prepareOutFolder(outFolder); + const { ddlCur, ddlPrev, snapshot, custom } = await prepareSqliteSnapshot( + snapshots, + schemaPath, + casing, + ); + if (config.custom) { + writeResult({ + snapshot: custom, + sqlStatements: [], + outFolder, + name: config.name, + breakpoints: config.breakpoints, + bundle: config.bundle, + type: 'custom', + prefixMode: config.prefix, + renames: [], + snapshots, + }); + return; + } + const { sqlStatements, warnings, renames } = await ddlDiff( + ddlPrev, + ddlCur, + resolver('table'), + resolver('column'), + 'default', + ); + + // for (const { jsonStatement } of groupedStatements) { + // const msg = sqliteExplain(jsonStatement); + // console.log(msg?.title); + // console.log(msg?.cause); + // } + + for (const w of warnings) { + warning(w); + } + + writeResult({ + snapshot: snapshot, + sqlStatements, + renames, + outFolder, + name: config.name, + breakpoints: config.breakpoints, + bundle: config.bundle, + prefixMode: config.prefix, + driver: config.driver, + snapshots, + }); + } catch (e) { + console.error(e); + } +}; + +export const handleExport = async (config: ExportConfig) => { + const filenames = prepareFilenames(config.schema); + const res = await prepareFromSchemaFiles(filenames); + const schema = fromDrizzleSchema(res.tables, res.views, config.casing); + const { ddl } = interimToDDL(schema); + const { sqlStatements } = await ddlDiffDry(createDDL(), ddl, 'default'); + console.log(sqlStatements.join('\n')); +}; diff --git a/drizzle-kit/src/cli/commands/introspect.ts b/drizzle-kit/src/cli/commands/introspect.ts deleted file mode 100644 index ea672331f3..0000000000 --- a/drizzle-kit/src/cli/commands/introspect.ts +++ /dev/null @@ -1,1114 +0,0 @@ -import chalk from 'chalk'; -import { writeFileSync } from 'fs'; -import { render, renderWithTask } from 'hanji'; -import { Minimatch } from 'minimatch'; -import { join } from 'path'; -import { plural, singular } from 'pluralize'; -import { GelSchema } from 'src/serializer/gelSchema'; -import { drySingleStore, SingleStoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; -import { assertUnreachable, originUUID } from '../../global'; -import { schemaToTypeScript as gelSchemaToTypeScript } from '../../introspect-gel'; -import { schemaToTypeScript as mysqlSchemaToTypeScript } from '../../introspect-mysql'; -import { paramNameFor, schemaToTypeScript as postgresSchemaToTypeScript } from '../../introspect-pg'; -import { schemaToTypeScript as singlestoreSchemaToTypeScript } from '../../introspect-singlestore'; -import { schemaToTypeScript as sqliteSchemaToTypeScript } from '../../introspect-sqlite'; -import { fromDatabase as fromGelDatabase } from '../../serializer/gelSerializer'; -import { dryMySql, MySqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; -import { fromDatabase as fromMysqlDatabase } from '../../serializer/mysqlSerializer'; -import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema'; -import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer'; -import { fromDatabase as fromSingleStoreDatabase } from '../../serializer/singlestoreSerializer'; -import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; -import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer'; -import { - applyLibSQLSnapshotsDiff, - applyMysqlSnapshotsDiff, - applyPgSnapshotsDiff, - applySingleStoreSnapshotsDiff, - applySqliteSnapshotsDiff, -} from '../../snapshotsDiffer'; -import { prepareOutFolder } from '../../utils'; -import { connectToMySQL, connectToSQLite } from '../connections'; -import { Entities } from '../validations/cli'; -import type { Casing, Prefix } from '../validations/common'; -import { GelCredentials } from '../validations/gel'; -import { LibSQLCredentials } from '../validations/libsql'; -import type { MysqlCredentials } from '../validations/mysql'; -import type { PostgresCredentials } from '../validations/postgres'; -import { SingleStoreCredentials } from '../validations/singlestore'; -import type { SqliteCredentials } from '../validations/sqlite'; -import { IntrospectProgress } from '../views'; -import { - columnsResolver, - enumsResolver, - indPolicyResolver, - mySqlViewsResolver, - policyResolver, - roleResolver, - schemasResolver, - sequencesResolver, - sqliteViewsResolver, - tablesResolver, - viewsResolver, - writeResult, -} from './migrate'; - -function postgresToRelationsPull(schema: PgSchema): SchemaForPull { - return Object.values(schema.tables).map((table) => ({ - schema: table.schema, - foreignKeys: Object.values(table.foreignKeys), - uniques: [ - ...Object.values(table.uniqueConstraints).map((unq) => ({ - columns: unq.columns, - })), - ...Object.values(table.indexes).map((idx) => ({ - columns: idx.columns.map((idxc) => { - if (!idxc.isExpression && idx.isUnique) { - return idxc.expression; - } - }).filter((item) => item !== undefined), - })), - ], - })); -} - -export const introspectPostgres = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: PostgresCredentials, - tablesFilter: string[], - schemasFilter: string[], - prefix: Prefix, - entities: Entities, - db?: Awaited>, -) => { - if (!db) { - const { preparePostgresDB } = await import('../connections'); - db = await preparePostgresDB(credentials); - } - - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(true); - - const res = await renderWithTask( - progress, - fromPostgresDatabase( - db, - filter, - schemasFilter, - entities, - (stage, count, status) => { - progress.update(stage, count, status); - }, - ), - ); - - const schema = { id: originUUID, prevId: '', ...res } as PgSchema; - const ts = postgresSchemaToTypeScript(schema, casing); - - const relationsTs = relationsToTypeScript(postgresToRelationsPull(schema), casing); - const { internal, ...schemaWithoutInternals } = schema; - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - const relationsFile = join(out, 'relations.ts'); - writeFileSync(relationsFile, relationsTs.file); - console.log(); - - const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); - - if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applyPgSnapshotsDiff( - squashPgScheme(dryPg), - squashPgScheme(schema), - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - dryPg, - schema, - ); - - writeResult({ - cur: schema, - sqlStatements, - journal, - _meta, - outFolder: out, - breakpoints, - type: 'introspect', - prefixMode: prefix, - }); - } else { - render( - `[${ - chalk.blue( - 'i', - ) - }] No SQL generated, you already have migrations in project`, - ); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); - render( - `[${ - chalk.green( - '✓', - ) - }] Your relations file is ready ➜ ${ - chalk.bold.underline.blue( - relationsFile, - ) - } 🚀`, - ); -}; - -function gelToRelationsPull(schema: GelSchema): SchemaForPull { - return Object.values(schema.tables).map((table) => ({ - schema: table.schema, - foreignKeys: Object.values(table.foreignKeys), - uniques: [ - ...Object.values(table.uniqueConstraints).map((unq) => ({ - columns: unq.columns, - })), - ...Object.values(table.indexes).map((idx) => ({ - columns: idx.columns.map((idxc) => { - if (!idxc.isExpression && idx.isUnique) { - return idxc.expression; - } - }).filter((item) => item !== undefined), - })), - ], - })); -} - -export const introspectGel = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: GelCredentials | undefined, - tablesFilter: string[], - schemasFilter: string[], - prefix: Prefix, - entities: Entities, - db?: Awaited>, -) => { - if (!db) { - const { prepareGelDB } = await import('../connections'); - db = await prepareGelDB(credentials); - } - - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(true); - - const res = await renderWithTask( - progress, - fromGelDatabase( - db, - filter, - schemasFilter, - entities, - (stage, count, status) => { - progress.update(stage, count, status); - }, - ), - ); - - const schema = { id: originUUID, prevId: '', ...res } as GelSchema; - const ts = gelSchemaToTypeScript(schema, casing); - // TODO - const relationsTs = relationsToTypeScript(gelToRelationsPull(schema), casing); - const { internal, ...schemaWithoutInternals } = schema; - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - const relationsFile = join(out, 'relations.ts'); - writeFileSync(relationsFile, relationsTs.file); - console.log(); - - // const { snapshots, journal } = prepareOutFolder(out, 'gel'); - - // if (snapshots.length === 0) { - // const { sqlStatements, _meta } = await applyGelSnapshotsDiff( - // squashGelScheme(dryGel), - // squashGelScheme(schema), - // schemasResolver, - // enumsResolver, - // sequencesResolver, - // policyResolver, - // indPolicyResolver, - // roleResolver, - // tablesResolver, - // columnsResolver, - // viewsResolver, - // dryPg, - // schema, - // ); - - // writeResult({ - // cur: schema, - // sqlStatements, - // journal, - // _meta, - // outFolder: out, - // breakpoints, - // type: 'introspect', - // prefixMode: prefix, - // }); - // } else { - // render( - // `[${ - // chalk.blue( - // 'i', - // ) - // }] No SQL generated, you already have migrations in project`, - // ); - // } - - render( - `[${ - chalk.green( - '✓', - ) - }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); - render( - `[${ - chalk.green( - '✓', - ) - }] Your relations file is ready ➜ ${ - chalk.bold.underline.blue( - relationsFile, - ) - } 🚀`, - ); -}; - -function mysqlToRelationsPull(schema: MySqlSchema): SchemaForPull { - return Object.values(schema.tables).map((table) => ({ - schema: undefined, - foreignKeys: Object.values(table.foreignKeys), - uniques: [ - ...Object.values(table.uniqueConstraints).map((unq) => ({ - columns: unq.columns, - })), - ...Object.values(table.indexes).map((idx) => ({ - columns: idx.columns, - })), - ], - })); -} - -export const introspectMysql = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: MysqlCredentials, - tablesFilter: string[], - prefix: Prefix, - connection?: Awaited>, -) => { - if (!connection) { - const { connectToMySQL } = await import('../connections'); - connection = await connectToMySQL(credentials); - } - - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(); - const res = await renderWithTask( - progress, - fromMysqlDatabase(connection.db, connection.database, filter, (stage, count, status) => { - progress.update(stage, count, status); - }), - ); - - const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; - const ts = mysqlSchemaToTypeScript(schema, casing); - // TODO - const relationsTs = relationsToTypeScript(mysqlToRelationsPull(schema), casing); - const { internal, ...schemaWithoutInternals } = schema; - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - const relationsFile = join(out, 'relations.ts'); - writeFileSync(relationsFile, relationsTs.file); - console.log(); - - const { snapshots, journal } = prepareOutFolder(out, 'mysql'); - - if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applyMysqlSnapshotsDiff( - squashMysqlScheme(dryMySql), - squashMysqlScheme(schema), - tablesResolver, - columnsResolver, - mySqlViewsResolver, - dryMySql, - schema, - ); - - writeResult({ - cur: schema, - sqlStatements, - journal, - _meta, - outFolder: out, - breakpoints, - type: 'introspect', - prefixMode: prefix, - }); - } else { - render( - `[${ - chalk.blue( - 'i', - ) - }] No SQL generated, you already have migrations in project`, - ); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); - render( - `[${ - chalk.green( - '✓', - ) - }] Your relations file is ready ➜ ${ - chalk.bold.underline.blue( - relationsFile, - ) - } 🚀`, - ); -}; - -export const introspectSingleStore = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: SingleStoreCredentials, - tablesFilter: string[], - prefix: Prefix, - connection?: Awaited>, -) => { - if (!connection) { - const { connectToSingleStore } = await import('../connections'); - connection = await connectToSingleStore(credentials); - } - - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(); - const res = await renderWithTask( - progress, - fromSingleStoreDatabase(connection.db, connection.database, filter, (stage, count, status) => { - progress.update(stage, count, status); - }), - ); - - const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; - const ts = singlestoreSchemaToTypeScript(schema, casing); - const { internal, ...schemaWithoutInternals } = schema; - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - console.log(); - - const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); - - if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( - squashSingleStoreScheme(drySingleStore), - squashSingleStoreScheme(schema), - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - drySingleStore, - schema, - ); - - writeResult({ - cur: schema, - sqlStatements, - journal, - _meta, - outFolder: out, - breakpoints, - type: 'introspect', - prefixMode: prefix, - }); - } else { - render( - `[${ - chalk.blue( - 'i', - ) - }] No SQL generated, you already have migrations in project`, - ); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); -}; - -function sqliteToRelationsPull(schema: SQLiteSchema): SchemaForPull { - return Object.values(schema.tables).map((table) => ({ - schema: undefined, - foreignKeys: Object.values(table.foreignKeys), - uniques: [ - ...Object.values(table.uniqueConstraints).map((unq) => ({ - columns: unq.columns, - })), - ...Object.values(table.indexes).map((idx) => ({ - columns: idx.columns, - })), - ], - })); -} - -export const introspectSqlite = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: SqliteCredentials, - tablesFilter: string[], - prefix: Prefix, - db?: Awaited>, -) => { - if (!db) { - const { connectToSQLite } = await import('../connections'); - db = await connectToSQLite(credentials); - } - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(); - const res = await renderWithTask( - progress, - fromSqliteDatabase(db, filter, (stage, count, status) => { - progress.update(stage, count, status); - }), - ); - - const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; - const ts = sqliteSchemaToTypeScript(schema, casing); - // TODO - const relationsTs = relationsToTypeScript(sqliteToRelationsPull(schema), casing); - - // check orm and orm-pg api version - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - const relationsFile = join(out, 'relations.ts'); - writeFileSync(relationsFile, relationsTs.file); - console.log(); - - const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); - - if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( - squashSqliteScheme(drySQLite), - squashSqliteScheme(schema), - tablesResolver, - columnsResolver, - sqliteViewsResolver, - drySQLite, - schema, - ); - - writeResult({ - cur: schema, - sqlStatements, - journal, - _meta, - outFolder: out, - breakpoints, - type: 'introspect', - prefixMode: prefix, - }); - } else { - render( - `[${ - chalk.blue( - 'i', - ) - }] No SQL generated, you already have migrations in project`, - ); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); - render( - `[${ - chalk.green( - '✓', - ) - }] You relations file is ready ➜ ${ - chalk.bold.underline.blue( - relationsFile, - ) - } 🚀`, - ); -}; - -export const introspectLibSQL = async ( - casing: Casing, - out: string, - breakpoints: boolean, - credentials: LibSQLCredentials, - tablesFilter: string[], - prefix: Prefix, - db?: Awaited>, -) => { - if (!db) { - const { connectToLibSQL } = await import('../connections'); - db = await connectToLibSQL(credentials); - } - - const matchers = tablesFilter.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(); - const res = await renderWithTask( - progress, - fromSqliteDatabase(db, filter, (stage, count, status) => { - progress.update(stage, count, status); - }), - ); - - const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; - const ts = sqliteSchemaToTypeScript(schema, casing); - // TODO - const relationsTs = relationsToTypeScript(sqliteToRelationsPull(schema), casing); - - // check orm and orm-pg api version - - const schemaFile = join(out, 'schema.ts'); - writeFileSync(schemaFile, ts.file); - const relationsFile = join(out, 'relations.ts'); - writeFileSync(relationsFile, relationsTs.file); - console.log(); - - const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); - - if (snapshots.length === 0) { - const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( - squashSqliteScheme(drySQLite), - squashSqliteScheme(schema), - tablesResolver, - columnsResolver, - sqliteViewsResolver, - drySQLite, - schema, - ); - - writeResult({ - cur: schema, - sqlStatements, - journal, - _meta, - outFolder: out, - breakpoints, - type: 'introspect', - prefixMode: prefix, - }); - } else { - render( - `[${ - chalk.blue( - 'i', - ) - }] No SQL generated, you already have migrations in project`, - ); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, - ); - render( - `[${ - chalk.green( - '✓', - ) - }] Your relations file is ready ➜ ${ - chalk.bold.underline.blue( - relationsFile, - ) - } 🚀`, - ); -}; - -const withCasing = (value: string, casing: Casing) => { - if (casing === 'preserve') { - return value; - } - if (casing === 'camel') { - return value.camelCase(); - } - - assertUnreachable(casing); -}; - -export type SchemaForPull = { - schema?: string; - foreignKeys: { - name: string; - tableFrom: string; - columnsFrom: string[]; - tableTo: string; - schemaTo?: string; - columnsTo: string[]; - onUpdate?: string | undefined; - onDelete?: string | undefined; - }[]; - // both unique constraints and unique indexes - uniques: { - columns: string[]; - }[]; -}[]; - -export const relationsToTypeScript = ( - schema: SchemaForPull, - casing: Casing, -) => { - const imports: string[] = []; - const tableRelations: Record< - string, - { - name: string; - type: 'one' | 'many' | 'through' | 'many-through' | 'one-one'; - tableFrom: string; - schemaFrom?: string; - columnsFrom: string[]; - tableTo: string; - schemaTo?: string; - columnsTo: string[]; - relationName?: string; - tableThrough?: string; - columnsThroughFrom?: string[]; - columnsThroughTo?: string[]; - }[] - > = {}; - - // Process all foreign keys as before. - schema.forEach((table) => { - const fks = Object.values(table.foreignKeys); - - if (fks.length === 2) { - const [fk1, fk2] = fks; - // reference to different tables, means it can be through many-many - const toTable1 = withCasing(paramNameFor(fk1.tableTo, fk1.schemaTo), casing); - const columnsTo1 = fk1.columnsTo.map((it) => withCasing(it, casing)); - - const toTable2 = withCasing(paramNameFor(fk2.tableTo, fk2.schemaTo), casing); - const columnsTo2 = fk2.columnsTo.map((it) => withCasing(it, casing)); - - const tableThrough = withCasing(paramNameFor(fk1.tableFrom, table.schema), casing); - const tableFrom2 = withCasing(paramNameFor(fk2.tableFrom, table.schema), casing); - const columnsThroughFrom = fk1.columnsFrom.map((it) => withCasing(it, casing)); - const columnsThroughTo = fk2.columnsFrom.map((it) => withCasing(it, casing)); - - if ( - toTable1 !== toTable2 - ) { - if (!tableRelations[toTable1]) { - tableRelations[toTable1] = []; - } - - tableRelations[toTable1].push({ - name: plural(toTable2), - type: 'through', - tableFrom: toTable1, - columnsFrom: columnsTo1, - tableTo: toTable2, - columnsTo: columnsTo2, - tableThrough, - columnsThroughFrom, - columnsThroughTo, - }); - - if (!tableRelations[toTable2]) { - tableRelations[toTable2] = []; - } - - tableRelations[toTable2].push({ - name: plural(toTable1), - // this type is used for .many() side of relation, when another side has .through() with from and to fields - type: 'many-through', - tableFrom: toTable2, - columnsFrom: fk2.columnsTo, - tableTo: toTable1, - columnsTo: columnsTo2, - tableThrough, - columnsThroughFrom, - columnsThroughTo, - }); - } - } else { - fks.forEach((fk) => { - const tableNameFrom = paramNameFor(fk.tableFrom, table.schema); - const tableNameTo = paramNameFor(fk.tableTo, fk.schemaTo); - const tableFrom = withCasing(tableNameFrom.replace(/:+/g, ''), casing); - const tableTo = withCasing(tableNameTo.replace(/:+/g, ''), casing); - const columnsFrom = fk.columnsFrom.map((it) => withCasing(it, casing)); - const columnsTo = fk.columnsTo.map((it) => withCasing(it, casing)); - - imports.push(tableTo, tableFrom); - - const keyFrom = tableFrom; - if (!tableRelations[keyFrom]) { - tableRelations[keyFrom] = []; - } - - tableRelations[keyFrom].push({ - name: singular(tableTo), - type: 'one', - tableFrom, - columnsFrom, - tableTo, - columnsTo, - }); - - const keyTo = tableTo; - if (!tableRelations[keyTo]) { - tableRelations[keyTo] = []; - } - - // if this table has a unique on a column, that is used for 1-m, then we can assume that it's 1-1 relation - // we will check that all of the fk columns are unique, so we can assume it's 1-1 - // not matter if it's 1 column, 2 columns or more - if ( - table.uniques.find((constraint) => - constraint.columns.length === columnsFrom.length - && constraint.columns.every((col, i) => col === columnsFrom[i]) - ) - ) { - // the difference between one and one-one is that one-one won't contain from and to - // maybe it can be done by introducing some sort of flag or just not providing columnsFrom and columnsTo - // but I decided just to have a different type field here - tableRelations[keyTo].push({ - name: plural(tableFrom), - type: 'one-one', - tableFrom: tableTo, - columnsFrom: columnsTo, - tableTo: tableFrom, - columnsTo: columnsFrom, - }); - } else { - tableRelations[keyTo].push({ - name: plural(tableFrom), - type: 'many', - tableFrom: tableTo, - columnsFrom: columnsTo, - tableTo: tableFrom, - columnsTo: columnsFrom, - }); - } - }); - } - }); - - const importsTs = `import { defineRelations } from "drizzle-orm";\nimport * as schema from "./schema";\n\n`; - - let relationString = `export const relations = defineRelations(schema, (r) => ({`; - - Object.entries(tableRelations).forEach(([table, relations]) => { - // Adjust duplicate names if needed. - const preparedRelations = relations.map( - (relation, relationIndex, originArray) => { - let name = relation.name; - let relationName; - const hasMultipleRelations = originArray.some( - (it, originIndex) => relationIndex !== originIndex && it.tableTo === relation.tableTo, - ); - if (hasMultipleRelations) { - // if one relation - we need to name a relation from this table to "many" table - if (relation.type === 'one') { - relationName = `${relation.tableFrom}_${relation.columnsFrom.join('_')}_${relation.tableTo}_${ - relation.columnsTo.join('_') - }`; - // if many relation - name in in different order, so alias names will match - } else if (relation.type === 'many' || relation.type === 'one-one') { - relationName = `${relation.tableTo}_${relation.columnsTo.join('_')}_${relation.tableFrom}_${ - relation.columnsFrom.join('_') - }`; - // if through relation - we need to name a relation from this table to "many" table and include "via" - } else if (relation.type === 'through') { - relationName = `${relation.tableFrom}_${relation.columnsFrom.join('_')}_${relation.tableTo}_${ - relation.columnsTo.join('_') - }_via_${relation.tableThrough}`; - // else is for many-through, meaning we need to reverse the order for tables and columns, but leave "via" the same - } else { - relationName = `${relation.tableTo}_${relation.columnsTo.join('_')}_${relation.tableFrom}_${ - relation.columnsFrom.join('_') - }_via_${relation.tableThrough}`; - } - } - const hasDuplicatedRelation = originArray.some( - (it, originIndex) => relationIndex !== originIndex && it.name === relation.name, - ); - if (hasDuplicatedRelation) { - name = `${relation.name}_${ - relation.type === 'through' - ? `via_${relation.tableThrough}` - : relation.type === 'many-through' - ? `via_${relation.tableThrough}` - : relation.type === 'one' - ? relation.columnsFrom.join('_') - : relation.columnsTo.join('_') - }`; - } - return { - ...relation, - name: withCasing(name, casing), - relationName, - }; - }, - ); - - relationString += `\n\t${table}: {`; - preparedRelations.forEach((relation) => { - if (relation.type === 'one') { - const from = relation.columnsFrom.length === 1 - ? `r.${relation.tableFrom}.${relation.columnsFrom[0]}` - : `[${ - relation.columnsFrom - .map((it) => `r.${relation.tableFrom}.${it}`) - .join(', ') - }]`; - const to = relation.columnsTo.length === 1 - ? `r.${relation.tableTo}.${relation.columnsTo[0]}` - : `[${ - relation.columnsTo - .map((it) => `r.${relation.tableTo}.${it}`) - .join(', ') - }]`; - - relationString += `\n\t\t${relation.name}: r.one.${relation.tableTo}({\n\t\t\tfrom: ${from},\n\t\t\tto: ${to}` - + (relation.relationName ? `,\n\t\t\talias: "${relation.relationName}"` : '') - + `\n\t\t}),`; - } else if (relation.type === 'many' || relation.type === 'many-through') { - relationString += `\n\t\t${relation.name}: r.many.${relation.tableTo}(` - + (relation.relationName ? `{\n\t\t\talias: "${relation.relationName}"\n\t\t}` : '') - + `),`; - } else if (relation.type === 'one-one') { - relationString += `\n\t\t${relation.name}: r.one.${relation.tableTo}(` - + (relation.relationName ? `{\n\t\t\talias: "${relation.relationName}"\n\t\t}` : '') - + `),`; - } else { - const from = relation.columnsThroughFrom!.length === 1 - ? `r.${relation.tableFrom}.${relation.columnsFrom[0]}.through(r.${relation.tableThrough}.${ - relation.columnsThroughFrom![0] - })` - : `[${ - relation.columnsThroughFrom! - .map((it) => `r.${relation.tableFrom}.${it}.through(${relation.tableThrough}.${it})`) - .join(', ') - }]`; - const to = relation.columnsThroughTo!.length === 1 - ? `r.${relation.tableTo}.${relation.columnsTo![0]}.through(r.${relation.tableThrough}.${ - relation.columnsThroughTo![0] - })` - : `[${ - relation.columnsThroughTo! - .map((it) => `r.${relation.tableTo}.${it}.through(${relation.tableThrough}.${it})`) - .join(', ') - }]`; - - relationString += `\n\t\t${relation.name}: r.many.${relation.tableTo}({\n\t\t\tfrom: ${from},\n\t\t\tto: ${to}` - + (relation.relationName ? `,\n\t\t\talias: "${relation.relationName}"` : '') - + `\n\t\t}),`; - } - }); - relationString += `\n\t},`; - }); - - relationString += `\n}))`; - - return { - file: importsTs + relationString, - }; -}; diff --git a/drizzle-kit/src/cli/commands/libSqlPushUtils.ts b/drizzle-kit/src/cli/commands/libSqlPushUtils.ts deleted file mode 100644 index 7084d94f4a..0000000000 --- a/drizzle-kit/src/cli/commands/libSqlPushUtils.ts +++ /dev/null @@ -1,355 +0,0 @@ -import chalk from 'chalk'; - -import { JsonStatement } from 'src/jsonStatements'; -import { findAddedAndRemoved, type SQLiteDB } from 'src/utils'; -import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema'; -import { - CreateSqliteIndexConvertor, - fromJson, - LibSQLModifyColumn, - SQLiteCreateTableConvertor, - SQLiteDropTableConvertor, - SqliteRenameTableConvertor, -} from '../../sqlgenerator'; - -export const getOldTableName = ( - tableName: string, - meta: SQLiteSchemaInternal['_meta'], -) => { - for (const key of Object.keys(meta.tables)) { - const value = meta.tables[key]; - if (`"${tableName}"` === value) { - return key.substring(1, key.length - 1); - } - } - return tableName; -}; - -export const _moveDataStatements = ( - tableName: string, - json: SQLiteSchemaSquashed, - dataLoss: boolean = false, -) => { - const statements: string[] = []; - - const newTableName = `__new_${tableName}`; - - // create table statement from a new json2 with proper name - const tableColumns = Object.values(json.tables[tableName].columns); - const referenceData = Object.values(json.tables[tableName].foreignKeys); - const compositePKs = Object.values( - json.tables[tableName].compositePrimaryKeys, - ).map((it) => SQLiteSquasher.unsquashPK(it)); - const checkConstraints = Object.values(json.tables[tableName].checkConstraints); - - const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); - - const mappedCheckConstraints: string[] = checkConstraints.map((it) => - it.replaceAll(`"${tableName}".`, `"${newTableName}".`) - .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) - .replaceAll(`${tableName}.`, `${newTableName}.`) - .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) - ); - - // create new table - statements.push( - new SQLiteCreateTableConvertor().convert({ - type: 'sqlite_create_table', - tableName: newTableName, - columns: tableColumns, - referenceData: fks, - compositePKs, - checkConstraints: mappedCheckConstraints, - }), - ); - - // move data - if (!dataLoss) { - const columns = Object.keys(json.tables[tableName].columns).map( - (c) => `"${c}"`, - ); - - statements.push( - `INSERT INTO \`${newTableName}\`(${ - columns.join( - ', ', - ) - }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`, - ); - } - - statements.push( - new SQLiteDropTableConvertor().convert({ - type: 'drop_table', - tableName: tableName, - schema: '', - }), - ); - - // rename table - statements.push( - new SqliteRenameTableConvertor().convert({ - fromSchema: '', - tableNameFrom: newTableName, - tableNameTo: tableName, - toSchema: '', - type: 'rename_table', - }), - ); - - for (const idx of Object.values(json.tables[tableName].indexes)) { - statements.push( - new CreateSqliteIndexConvertor().convert({ - type: 'create_index', - tableName: tableName, - schema: '', - data: idx, - }), - ); - } - return statements; -}; - -export const libSqlLogSuggestionsAndReturn = async ( - connection: SQLiteDB, - statements: JsonStatement[], - json1: SQLiteSchemaSquashed, - json2: SQLiteSchemaSquashed, - meta: SQLiteSchemaInternal['_meta'], -) => { - let shouldAskForApprove = false; - const statementsToExecute: string[] = []; - const infoToPrint: string[] = []; - - const tablesToRemove: string[] = []; - const columnsToRemove: string[] = []; - const tablesToTruncate: string[] = []; - - for (const statement of statements) { - if (statement.type === 'drop_table') { - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.tableName, - ) - } table with ${count} items`, - ); - tablesToRemove.push(statement.tableName); - shouldAskForApprove = true; - } - const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if (statement.type === 'alter_table_drop_column') { - const tableName = statement.tableName; - - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.columnName, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(`${tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - - const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if ( - statement.type === 'sqlite_alter_table_add_column' - && statement.column.notNull - && !statement.column.default - ) { - const newTableName = statement.tableName; - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${newTableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - statement.column.name, - ) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(newTableName); - statementsToExecute.push(`delete from ${newTableName};`); - - shouldAskForApprove = true; - } - - const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if (statement.type === 'alter_table_alter_column_set_notnull') { - const tableName = statement.tableName; - - if ( - statement.type === 'alter_table_alter_column_set_notnull' - && typeof statement.columnDefault === 'undefined' - ) { - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null constraint to ${ - chalk.underline( - statement.columnName, - ) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(tableName); - statementsToExecute.push(`delete from \`${tableName}\``); - shouldAskForApprove = true; - } - } - - const modifyStatements = new LibSQLModifyColumn().convert(statement, json2); - - statementsToExecute.push( - ...(Array.isArray(modifyStatements) ? modifyStatements : [modifyStatements]), - ); - } else if (statement.type === 'recreate_table') { - const tableName = statement.tableName; - - let dataLoss = false; - - const oldTableName = getOldTableName(tableName, meta); - - const prevColumnNames = Object.keys(json1.tables[oldTableName].columns); - const currentColumnNames = Object.keys(json2.tables[tableName].columns); - const { removedColumns, addedColumns } = findAddedAndRemoved( - prevColumnNames, - currentColumnNames, - ); - - if (removedColumns.length) { - for (const removedColumn of removedColumns) { - const res = await connection.query<{ count: string }>( - `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - removedColumn, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(removedColumn); - shouldAskForApprove = true; - } - } - } - - if (addedColumns.length) { - for (const addedColumn of addedColumns) { - const [res] = await connection.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - - const columnConf = json2.tables[tableName].columns[addedColumn]; - - const count = Number(res.count); - if (count > 0 && columnConf.notNull && !columnConf.default) { - dataLoss = true; - - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - addedColumn, - ) - } column without default value to table, which contains ${count} items`, - ); - shouldAskForApprove = true; - tablesToTruncate.push(tableName); - - statementsToExecute.push(`DELETE FROM \`${tableName}\`;`); - } - } - } - - // check if some tables referencing current for pragma - const tablesReferencingCurrent: string[] = []; - - for (const table of Object.values(json2.tables)) { - const tablesRefs = Object.values(json2.tables[table.name].foreignKeys) - .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName) - .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom); - - tablesReferencingCurrent.push(...tablesRefs); - } - - if (!tablesReferencingCurrent.length) { - statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); - continue; - } - - // recreate table - statementsToExecute.push( - ..._moveDataStatements(tableName, json2, dataLoss), - ); - } else if ( - statement.type === 'alter_table_alter_column_set_generated' - || statement.type === 'alter_table_alter_column_drop_generated' - ) { - const tableName = statement.tableName; - - const res = await connection.query<{ count: string }>( - `select count("${statement.columnName}") as count from \`${tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.columnName, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(`${tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else { - const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } - } - - return { - statementsToExecute: [...new Set(statementsToExecute)], - shouldAskForApprove, - infoToPrint, - columnsToRemove: [...new Set(columnsToRemove)], - tablesToTruncate: [...new Set(tablesToTruncate)], - tablesToRemove: [...new Set(tablesToRemove)], - }; -}; diff --git a/drizzle-kit/src/cli/commands/migrate.ts b/drizzle-kit/src/cli/commands/migrate.ts deleted file mode 100644 index f209af1c62..0000000000 --- a/drizzle-kit/src/cli/commands/migrate.ts +++ /dev/null @@ -1,1490 +0,0 @@ -import fs from 'fs'; -import { - prepareMySqlDbPushSnapshot, - prepareMySqlMigrationSnapshot, - preparePgDbPushSnapshot, - preparePgMigrationSnapshot, - prepareSingleStoreDbPushSnapshot, - prepareSingleStoreMigrationSnapshot, - prepareSQLiteDbPushSnapshot, - prepareSqliteMigrationSnapshot, -} from '../../migrationPreparator'; - -import chalk from 'chalk'; -import { render } from 'hanji'; -import path, { join } from 'path'; -import { SingleStoreSchema, singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; -import { TypeOf } from 'zod'; -import type { CommonSchema } from '../../schemaValidator'; -import { MySqlSchema, mysqlSchema, squashMysqlScheme, ViewSquashed } from '../../serializer/mysqlSchema'; -import { PgSchema, pgSchema, Policy, Role, squashPgScheme, View } from '../../serializer/pgSchema'; -import { SQLiteSchema, sqliteSchema, squashSqliteScheme, View as SQLiteView } from '../../serializer/sqliteSchema'; -import { - applyLibSQLSnapshotsDiff, - applyMysqlSnapshotsDiff, - applyPgSnapshotsDiff, - applySingleStoreSnapshotsDiff, - applySqliteSnapshotsDiff, - Column, - ColumnsResolverInput, - ColumnsResolverOutput, - Enum, - PolicyResolverInput, - PolicyResolverOutput, - ResolverInput, - ResolverOutput, - ResolverOutputWithMoved, - RolesResolverInput, - RolesResolverOutput, - Sequence, - Table, - TablePolicyResolverInput, - TablePolicyResolverOutput, -} from '../../snapshotsDiffer'; -import { assertV1OutFolder, Journal, prepareMigrationFolder } from '../../utils'; -import { prepareMigrationMetadata } from '../../utils/words'; -import { CasingType, Driver, Prefix } from '../validations/common'; -import { withStyle } from '../validations/outputs'; -import { - isRenamePromptItem, - RenamePropmtItem, - ResolveColumnSelect, - ResolveSchemasSelect, - ResolveSelect, - ResolveSelectNamed, - schema, -} from '../views'; -import { ExportConfig, GenerateConfig } from './utils'; - -export type Named = { - name: string; -}; - -export type NamedWithSchema = { - name: string; - schema: string; -}; - -export const schemasResolver = async ( - input: ResolverInput
, -): Promise> => { - try { - const { created, deleted, renamed } = await promptSchemasConflict( - input.created, - input.deleted, - ); - - return { created: created, deleted: deleted, renamed: renamed }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const tablesResolver = async ( - input: ResolverInput
, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'table', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const viewsResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'view', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const mySqlViewsResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'view', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -/* export const singleStoreViewsResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'view', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; */ - -export const sqliteViewsResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'view', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const sequencesResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'sequence', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const roleResolver = async ( - input: RolesResolverInput, -): Promise> => { - const result = await promptNamedConflict( - input.created, - input.deleted, - 'role', - ); - return { - created: result.created, - deleted: result.deleted, - renamed: result.renamed, - }; -}; - -export const policyResolver = async ( - input: TablePolicyResolverInput, -): Promise> => { - const result = await promptColumnsConflicts( - input.tableName, - input.created, - input.deleted, - ); - return { - tableName: input.tableName, - schema: input.schema, - created: result.created, - deleted: result.deleted, - renamed: result.renamed, - }; -}; - -export const indPolicyResolver = async ( - input: PolicyResolverInput, -): Promise> => { - const result = await promptNamedConflict( - input.created, - input.deleted, - 'policy', - ); - return { - created: result.created, - deleted: result.deleted, - renamed: result.renamed, - }; -}; - -export const enumsResolver = async ( - input: ResolverInput, -): Promise> => { - try { - const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( - input.created, - input.deleted, - 'enum', - ); - - return { - created: created, - deleted: deleted, - moved: moved, - renamed: renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const columnsResolver = async ( - input: ColumnsResolverInput, -): Promise> => { - const result = await promptColumnsConflicts( - input.tableName, - input.created, - input.deleted, - ); - return { - tableName: input.tableName, - schema: input.schema, - created: result.created, - deleted: result.deleted, - renamed: result.renamed, - }; -}; - -export const prepareAndMigratePg = async (config: GenerateConfig) => { - const outFolder = config.out; - const schemaPath = config.schema; - const casing = config.casing; - - try { - assertV1OutFolder(outFolder); - - const { snapshots, journal } = prepareMigrationFolder( - outFolder, - 'postgresql', - ); - - const { prev, cur, custom } = await preparePgMigrationSnapshot( - snapshots, - schemaPath, - casing, - ); - - const validatedPrev = pgSchema.parse(prev); - const validatedCur = pgSchema.parse(cur); - - if (config.custom) { - writeResult({ - cur: custom, - sqlStatements: [], - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - type: 'custom', - prefixMode: config.prefix, - }); - return; - } - - const squashedPrev = squashPgScheme(validatedPrev); - const squashedCur = squashPgScheme(validatedCur); - - const { sqlStatements, _meta } = await applyPgSnapshotsDiff( - squashedPrev, - squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - ); - - writeResult({ - cur, - sqlStatements, - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - prefixMode: config.prefix, - }); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndExportPg = async (config: ExportConfig) => { - const schemaPath = config.schema; - - try { - const { prev, cur } = await preparePgMigrationSnapshot( - [], // no snapshots before - schemaPath, - undefined, - ); - - const validatedPrev = pgSchema.parse(prev); - const validatedCur = pgSchema.parse(cur); - - const squashedPrev = squashPgScheme(validatedPrev); - const squashedCur = squashPgScheme(validatedCur); - - const { sqlStatements } = await applyPgSnapshotsDiff( - squashedPrev, - squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - ); - - console.log(sqlStatements.join('\n')); - } catch (e) { - console.error(e); - } -}; - -export const preparePgPush = async ( - cur: PgSchema, - prev: PgSchema, -) => { - const validatedPrev = pgSchema.parse(prev); - const validatedCur = pgSchema.parse(cur); - - const squashedPrev = squashPgScheme(validatedPrev, 'push'); - const squashedCur = squashPgScheme(validatedCur, 'push'); - - const { sqlStatements, statements, _meta } = await applyPgSnapshotsDiff( - squashedPrev, - squashedCur, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - return { sqlStatements, statements, squashedPrev, squashedCur }; -}; - -// Not needed for now -function mysqlSchemaSuggestions( - curSchema: TypeOf, - prevSchema: TypeOf, -) { - const suggestions: string[] = []; - const usedSuggestions: string[] = []; - const suggestionTypes = { - serial: withStyle.errorWarning( - `We deprecated the use of 'serial' for MySQL starting from version 0.20.0. In MySQL, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, - ), - }; - - for (const table of Object.values(curSchema.tables)) { - for (const column of Object.values(table.columns)) { - if (column.type === 'serial') { - if (!usedSuggestions.includes('serial')) { - suggestions.push(suggestionTypes['serial']); - } - - const uniqueForSerial = Object.values( - prevSchema.tables[table.name].uniqueConstraints, - ).find((it) => it.columns[0] === column.name); - - suggestions.push( - `\n` - + withStyle.suggestion( - `We are suggesting to change ${ - chalk.blue( - column.name, - ) - } column in ${ - chalk.blueBright( - table.name, - ) - } table from serial to bigint unsigned\n\n${ - chalk.blueBright( - `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ - uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' - })`, - ) - }`, - ), - ); - } - } - } - - return suggestions; -} - -// Intersect with prepareAnMigrate -export const prepareMySQLPush = async ( - schemaPath: string | string[], - snapshot: MySqlSchema, - casing: CasingType | undefined, -) => { - const { prev, cur } = await prepareMySqlDbPushSnapshot( - snapshot, - schemaPath, - casing, - ); - - const validatedPrev = mysqlSchema.parse(prev); - const validatedCur = mysqlSchema.parse(cur); - - const squashedPrev = squashMysqlScheme(validatedPrev); - const squashedCur = squashMysqlScheme(validatedCur); - - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - return { sqlStatements, statements, validatedCur, validatedPrev }; -}; - -export const prepareAndMigrateMysql = async (config: GenerateConfig) => { - const outFolder = config.out; - const schemaPath = config.schema; - const casing = config.casing; - - try { - // TODO: remove - assertV1OutFolder(outFolder); - - const { snapshots, journal } = prepareMigrationFolder(outFolder, 'mysql'); - const { prev, cur, custom } = await prepareMySqlMigrationSnapshot( - snapshots, - schemaPath, - casing, - ); - - const validatedPrev = mysqlSchema.parse(prev); - const validatedCur = mysqlSchema.parse(cur); - - if (config.custom) { - writeResult({ - cur: custom, - sqlStatements: [], - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - type: 'custom', - prefixMode: config.prefix, - }); - return; - } - - const squashedPrev = squashMysqlScheme(validatedPrev); - const squashedCur = squashMysqlScheme(validatedCur); - - const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - ); - - writeResult({ - cur, - sqlStatements, - journal, - _meta, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - prefixMode: config.prefix, - }); - } catch (e) { - console.error(e); - } -}; - -// Not needed for now -function singleStoreSchemaSuggestions( - curSchema: TypeOf, - prevSchema: TypeOf, -) { - const suggestions: string[] = []; - const usedSuggestions: string[] = []; - const suggestionTypes = { - // TODO: Check if SingleStore has serial type - serial: withStyle.errorWarning( - `We deprecated the use of 'serial' for SingleStore starting from version 0.20.0. In SingleStore, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, - ), - }; - - for (const table of Object.values(curSchema.tables)) { - for (const column of Object.values(table.columns)) { - if (column.type === 'serial') { - if (!usedSuggestions.includes('serial')) { - suggestions.push(suggestionTypes['serial']); - } - - const uniqueForSerial = Object.values( - prevSchema.tables[table.name].uniqueConstraints, - ).find((it) => it.columns[0] === column.name); - - suggestions.push( - `\n` - + withStyle.suggestion( - `We are suggesting to change ${ - chalk.blue( - column.name, - ) - } column in ${ - chalk.blueBright( - table.name, - ) - } table from serial to bigint unsigned\n\n${ - chalk.blueBright( - `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ - uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' - })`, - ) - }`, - ), - ); - } - } - } - - return suggestions; -} - -// Intersect with prepareAnMigrate -export const prepareSingleStorePush = async ( - schemaPath: string | string[], - snapshot: SingleStoreSchema, - casing: CasingType | undefined, -) => { - const { prev, cur } = await prepareSingleStoreDbPushSnapshot( - snapshot, - schemaPath, - casing, - ); - - const validatedPrev = singlestoreSchema.parse(prev); - const validatedCur = singlestoreSchema.parse(cur); - - const squashedPrev = squashSingleStoreScheme(validatedPrev); - const squashedCur = squashSingleStoreScheme(validatedCur); - - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - 'push', - ); - - return { sqlStatements, statements, validatedCur, validatedPrev }; -}; - -export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { - const outFolder = config.out; - const schemaPath = config.schema; - const casing = config.casing; - - try { - // TODO: remove - assertV1OutFolder(outFolder); - - const { snapshots, journal } = prepareMigrationFolder(outFolder, 'singlestore'); - const { prev, cur, custom } = await prepareSingleStoreMigrationSnapshot( - snapshots, - schemaPath, - casing, - ); - - const validatedPrev = singlestoreSchema.parse(prev); - const validatedCur = singlestoreSchema.parse(cur); - - if (config.custom) { - writeResult({ - cur: custom, - sqlStatements: [], - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - type: 'custom', - prefixMode: config.prefix, - }); - return; - } - - const squashedPrev = squashSingleStoreScheme(validatedPrev); - const squashedCur = squashSingleStoreScheme(validatedCur); - - const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - ); - - writeResult({ - cur, - sqlStatements, - journal, - _meta, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - prefixMode: config.prefix, - }); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndExportSinglestore = async (config: ExportConfig) => { - const schemaPath = config.schema; - - try { - const { prev, cur } = await prepareSingleStoreMigrationSnapshot( - [], - schemaPath, - undefined, - ); - - const validatedPrev = singlestoreSchema.parse(prev); - const validatedCur = singlestoreSchema.parse(cur); - - const squashedPrev = squashSingleStoreScheme(validatedPrev); - const squashedCur = squashSingleStoreScheme(validatedCur); - - const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - ); - - console.log(sqlStatements.join('\n')); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndExportMysql = async (config: ExportConfig) => { - const schemaPath = config.schema; - - try { - const { prev, cur, custom } = await prepareMySqlMigrationSnapshot( - [], - schemaPath, - undefined, - ); - - const validatedPrev = mysqlSchema.parse(prev); - const validatedCur = mysqlSchema.parse(cur); - - const squashedPrev = squashMysqlScheme(validatedPrev); - const squashedCur = squashMysqlScheme(validatedCur); - - const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - ); - - console.log(sqlStatements.join('\n')); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { - const outFolder = config.out; - const schemaPath = config.schema; - const casing = config.casing; - - try { - assertV1OutFolder(outFolder); - - const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite'); - const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( - snapshots, - schemaPath, - casing, - ); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - if (config.custom) { - writeResult({ - cur: custom, - sqlStatements: [], - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - bundle: config.bundle, - type: 'custom', - prefixMode: config.prefix, - }); - return; - } - - const squashedPrev = squashSqliteScheme(validatedPrev); - const squashedCur = squashSqliteScheme(validatedCur); - - const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - ); - - writeResult({ - cur, - sqlStatements, - journal, - _meta, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - bundle: config.bundle, - prefixMode: config.prefix, - driver: config.driver, - }); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndExportSqlite = async (config: ExportConfig) => { - const schemaPath = config.schema; - - try { - const { prev, cur } = await prepareSqliteMigrationSnapshot( - [], - schemaPath, - undefined, - ); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev); - const squashedCur = squashSqliteScheme(validatedCur); - - const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - ); - - console.log(sqlStatements.join('\n')); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { - const outFolder = config.out; - const schemaPath = config.schema; - const casing = config.casing; - - try { - assertV1OutFolder(outFolder); - - const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite'); - const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( - snapshots, - schemaPath, - casing, - ); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - if (config.custom) { - writeResult({ - cur: custom, - sqlStatements: [], - journal, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - bundle: config.bundle, - type: 'custom', - prefixMode: config.prefix, - }); - return; - } - - const squashedPrev = squashSqliteScheme(validatedPrev); - const squashedCur = squashSqliteScheme(validatedCur); - - const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - ); - - writeResult({ - cur, - sqlStatements, - journal, - _meta, - outFolder, - name: config.name, - breakpoints: config.breakpoints, - bundle: config.bundle, - prefixMode: config.prefix, - }); - } catch (e) { - console.error(e); - } -}; - -export const prepareAndExportLibSQL = async (config: ExportConfig) => { - const schemaPath = config.schema; - - try { - const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( - [], - schemaPath, - undefined, - ); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev); - const squashedCur = squashSqliteScheme(validatedCur); - - const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - ); - - console.log(sqlStatements.join('\n')); - } catch (e) { - console.error(e); - } -}; - -export const prepareSQLitePush = async ( - schemaPath: string | string[], - snapshot: SQLiteSchema, - casing: CasingType | undefined, -) => { - const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); - const squashedCur = squashSqliteScheme(validatedCur, 'push'); - - const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - return { - sqlStatements, - statements, - squashedPrev, - squashedCur, - meta: _meta, - }; -}; - -export const prepareLibSQLPush = async ( - schemaPath: string | string[], - snapshot: SQLiteSchema, - casing: CasingType | undefined, -) => { - const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing); - - const validatedPrev = sqliteSchema.parse(prev); - const validatedCur = sqliteSchema.parse(cur); - - const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); - const squashedCur = squashSqliteScheme(validatedCur, 'push'); - - const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff( - squashedPrev, - squashedCur, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - - return { - sqlStatements, - statements, - squashedPrev, - squashedCur, - meta: _meta, - }; -}; - -const freeeeeeze = (obj: any) => { - Object.freeze(obj); - for (let key in obj) { - if (obj.hasOwnProperty(key) && typeof obj[key] === 'object') { - freeeeeeze(obj[key]); - } - } -}; - -export const promptColumnsConflicts = async ( - tableName: string, - newColumns: T[], - missingColumns: T[], -) => { - if (newColumns.length === 0 || missingColumns.length === 0) { - return { created: newColumns, renamed: [], deleted: missingColumns }; - } - const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { - created: [], - renamed: [], - deleted: [], - }; - - let index = 0; - let leftMissing = [...missingColumns]; - - do { - const created = newColumns[index]; - - const renames: RenamePropmtItem[] = leftMissing.map((it) => { - return { from: it, to: created }; - }); - - const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; - - const { status, data } = await render( - new ResolveColumnSelect(tableName, created, promptData), - ); - if (status === 'aborted') { - console.error('ERROR'); - process.exit(1); - } - - if (isRenamePromptItem(data)) { - console.log( - `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ - chalk.gray( - 'column will be renamed', - ) - }`, - ); - result.renamed.push(data); - // this will make [item1, undefined, item2] - delete leftMissing[leftMissing.indexOf(data.from)]; - // this will make [item1, item2] - leftMissing = leftMissing.filter(Boolean); - } else { - console.log( - `${chalk.green('+')} ${data.name} ${ - chalk.gray( - 'column will be created', - ) - }`, - ); - result.created.push(created); - } - index += 1; - } while (index < newColumns.length); - console.log( - chalk.gray(`--- all columns conflicts in ${tableName} table resolved ---\n`), - ); - - result.deleted.push(...leftMissing); - return result; -}; - -export const promptNamedConflict = async ( - newItems: T[], - missingItems: T[], - entity: 'role' | 'policy', -): Promise<{ - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -}> => { - if (missingItems.length === 0 || newItems.length === 0) { - return { - created: newItems, - renamed: [], - deleted: missingItems, - }; - } - - const result: { - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; - } = { created: [], renamed: [], deleted: [] }; - let index = 0; - let leftMissing = [...missingItems]; - do { - const created = newItems[index]; - const renames: RenamePropmtItem[] = leftMissing.map((it) => { - return { from: it, to: created }; - }); - - const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; - - const { status, data } = await render( - new ResolveSelectNamed(created, promptData, entity), - ); - if (status === 'aborted') { - console.error('ERROR'); - process.exit(1); - } - - if (isRenamePromptItem(data)) { - console.log( - `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ - chalk.gray( - `${entity} will be renamed/moved`, - ) - }`, - ); - - if (data.from.name !== data.to.name) { - result.renamed.push(data); - } - - delete leftMissing[leftMissing.indexOf(data.from)]; - leftMissing = leftMissing.filter(Boolean); - } else { - console.log( - `${chalk.green('+')} ${data.name} ${ - chalk.gray( - `${entity} will be created`, - ) - }`, - ); - result.created.push(created); - } - index += 1; - } while (index < newItems.length); - console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); - result.deleted.push(...leftMissing); - return result; -}; - -export const promptNamedWithSchemasConflict = async ( - newItems: T[], - missingItems: T[], - entity: 'table' | 'enum' | 'sequence' | 'view', -): Promise<{ - created: T[]; - renamed: { from: T; to: T }[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - deleted: T[]; -}> => { - if (missingItems.length === 0 || newItems.length === 0) { - return { - created: newItems, - renamed: [], - moved: [], - deleted: missingItems, - }; - } - - const result: { - created: T[]; - renamed: { from: T; to: T }[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - deleted: T[]; - } = { created: [], renamed: [], moved: [], deleted: [] }; - let index = 0; - let leftMissing = [...missingItems]; - do { - const created = newItems[index]; - const renames: RenamePropmtItem[] = leftMissing.map((it) => { - return { from: it, to: created }; - }); - - const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; - - const { status, data } = await render( - new ResolveSelect(created, promptData, entity), - ); - if (status === 'aborted') { - console.error('ERROR'); - process.exit(1); - } - - if (isRenamePromptItem(data)) { - const schemaFromPrefix = !data.from.schema || data.from.schema === 'public' - ? '' - : `${data.from.schema}.`; - const schemaToPrefix = !data.to.schema || data.to.schema === 'public' - ? '' - : `${data.to.schema}.`; - - console.log( - `${chalk.yellow('~')} ${schemaFromPrefix}${data.from.name} › ${schemaToPrefix}${data.to.name} ${ - chalk.gray( - `${entity} will be renamed/moved`, - ) - }`, - ); - - if (data.from.name !== data.to.name) { - result.renamed.push(data); - } - - if (data.from.schema !== data.to.schema) { - result.moved.push({ - name: data.from.name, - schemaFrom: data.from.schema || 'public', - schemaTo: data.to.schema || 'public', - }); - } - - delete leftMissing[leftMissing.indexOf(data.from)]; - leftMissing = leftMissing.filter(Boolean); - } else { - console.log( - `${chalk.green('+')} ${data.name} ${ - chalk.gray( - `${entity} will be created`, - ) - }`, - ); - result.created.push(created); - } - index += 1; - } while (index < newItems.length); - console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); - result.deleted.push(...leftMissing); - return result; -}; - -export const promptSchemasConflict = async ( - newSchemas: T[], - missingSchemas: T[], -): Promise<{ created: T[]; renamed: { from: T; to: T }[]; deleted: T[] }> => { - if (missingSchemas.length === 0 || newSchemas.length === 0) { - return { created: newSchemas, renamed: [], deleted: missingSchemas }; - } - - const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { - created: [], - renamed: [], - deleted: [], - }; - let index = 0; - let leftMissing = [...missingSchemas]; - do { - const created = newSchemas[index]; - const renames: RenamePropmtItem[] = leftMissing.map((it) => { - return { from: it, to: created }; - }); - - const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; - - const { status, data } = await render( - new ResolveSchemasSelect(created, promptData), - ); - if (status === 'aborted') { - console.error('ERROR'); - process.exit(1); - } - - if (isRenamePromptItem(data)) { - console.log( - `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ - chalk.gray( - 'schema will be renamed', - ) - }`, - ); - result.renamed.push(data); - delete leftMissing[leftMissing.indexOf(data.from)]; - leftMissing = leftMissing.filter(Boolean); - } else { - console.log( - `${chalk.green('+')} ${data.name} ${ - chalk.gray( - 'schema will be created', - ) - }`, - ); - result.created.push(created); - } - index += 1; - } while (index < newSchemas.length); - console.log(chalk.gray('--- all schemas conflicts resolved ---\n')); - result.deleted.push(...leftMissing); - return result; -}; - -export const BREAKPOINT = '--> statement-breakpoint\n'; - -export const writeResult = ({ - cur, - sqlStatements, - journal, - _meta = { - columns: {}, - schemas: {}, - tables: {}, - }, - outFolder, - breakpoints, - name, - bundle = false, - type = 'none', - prefixMode, - driver, -}: { - cur: CommonSchema; - sqlStatements: string[]; - journal: Journal; - _meta?: any; - outFolder: string; - breakpoints: boolean; - prefixMode: Prefix; - name?: string; - bundle?: boolean; - type?: 'introspect' | 'custom' | 'none'; - driver?: Driver; -}) => { - if (type === 'none') { - console.log(schema(cur)); - - if (sqlStatements.length === 0) { - console.log('No schema changes, nothing to migrate 😴'); - return; - } - } - - // append entry to _migrations.json - // append entry to _journal.json->entries - // dialect in _journal.json - // append sql file to out folder - // append snapshot file to meta folder - const lastEntryInJournal = journal.entries[journal.entries.length - 1]; - const idx = typeof lastEntryInJournal === 'undefined' ? 0 : lastEntryInJournal.idx + 1; - - const { prefix, tag } = prepareMigrationMetadata(idx, prefixMode, name); - - const toSave = JSON.parse(JSON.stringify(cur)); - toSave['_meta'] = _meta; - - // todo: save results to a new migration folder - const metaFolderPath = join(outFolder, 'meta'); - const metaJournal = join(metaFolderPath, '_journal.json'); - - fs.writeFileSync( - join(metaFolderPath, `${prefix}_snapshot.json`), - JSON.stringify(toSave, null, 2), - ); - - const sqlDelimiter = breakpoints ? BREAKPOINT : '\n'; - let sql = sqlStatements.join(sqlDelimiter); - - if (type === 'introspect') { - sql = - `-- Current sql file was generated after introspecting the database\n-- If you want to run this migration please uncomment this code before executing migrations\n/*\n${sql}\n*/`; - } - - if (type === 'custom') { - console.log('Prepared empty file for your custom SQL migration!'); - sql = '-- Custom SQL migration file, put your code below! --'; - } - - journal.entries.push({ - idx, - version: cur.version, - when: +new Date(), - tag, - breakpoints: breakpoints, - }); - - fs.writeFileSync(metaJournal, JSON.stringify(journal, null, 2)); - - fs.writeFileSync(`${outFolder}/${tag}.sql`, sql); - - // js file with .sql imports for React Native / Expo and Durable Sqlite Objects - if (bundle) { - const js = embeddedMigrations(journal, driver); - fs.writeFileSync(`${outFolder}/migrations.js`, js); - } - - render( - `[${ - chalk.green( - '✓', - ) - }] Your SQL migration file ➜ ${ - chalk.bold.underline.blue( - path.join(`${outFolder}/${tag}.sql`), - ) - } 🚀`, - ); -}; - -export const embeddedMigrations = (journal: Journal, driver?: Driver) => { - let content = driver === 'expo' - ? '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n' - : ''; - - content += "import journal from './meta/_journal.json';\n"; - journal.entries.forEach((entry) => { - content += `import m${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.sql';\n`; - }); - - content += ` - export default { - journal, - migrations: { - ${ - journal.entries - .map((it) => `m${it.idx.toString().padStart(4, '0')}`) - .join(',\n') - } - } - } - `; - return content; -}; - -export const prepareSnapshotFolderName = () => { - const now = new Date(); - return `${now.getFullYear()}${two(now.getUTCMonth() + 1)}${ - two( - now.getUTCDate(), - ) - }${two(now.getUTCHours())}${two(now.getUTCMinutes())}${ - two( - now.getUTCSeconds(), - ) - }`; -}; - -const two = (input: number): string => { - return input.toString().padStart(2, '0'); -}; diff --git a/drizzle-kit/src/cli/commands/mysqlIntrospect.ts b/drizzle-kit/src/cli/commands/mysqlIntrospect.ts deleted file mode 100644 index f0132be647..0000000000 --- a/drizzle-kit/src/cli/commands/mysqlIntrospect.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { renderWithTask } from 'hanji'; -import { Minimatch } from 'minimatch'; -import { originUUID } from '../../global'; -import type { MySqlSchema } from '../../serializer/mysqlSchema'; -import { fromDatabase } from '../../serializer/mysqlSerializer'; -import type { DB } from '../../utils'; -import { ProgressView } from '../views'; - -export const mysqlPushIntrospect = async ( - db: DB, - databaseName: string, - filters: string[], -) => { - const matchers = filters.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new ProgressView( - 'Pulling schema from database...', - 'Pulling schema from database...', - ); - const res = await renderWithTask( - progress, - fromDatabase(db, databaseName, filter), - ); - - const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; - const { internal, ...schemaWithoutInternals } = schema; - return { schema: schemaWithoutInternals }; -}; diff --git a/drizzle-kit/src/cli/commands/mysqlPushUtils.ts b/drizzle-kit/src/cli/commands/mysqlPushUtils.ts deleted file mode 100644 index db1134e63b..0000000000 --- a/drizzle-kit/src/cli/commands/mysqlPushUtils.ts +++ /dev/null @@ -1,352 +0,0 @@ -import chalk from 'chalk'; -import { render } from 'hanji'; -import { TypeOf } from 'zod'; -import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; -import { mysqlSchema, MySqlSquasher } from '../../serializer/mysqlSchema'; -import type { DB } from '../../utils'; -import { Select } from '../selector-ui'; -import { withStyle } from '../validations/outputs'; - -export const filterStatements = ( - statements: JsonStatement[], - currentSchema: TypeOf, - prevSchema: TypeOf, -) => { - return statements.filter((statement) => { - if (statement.type === 'alter_table_alter_column_set_type') { - // Don't need to handle it on migrations step and introspection - // but for both it should be skipped - if ( - statement.oldDataType.startsWith('tinyint') - && statement.newDataType.startsWith('boolean') - ) { - return false; - } - - if ( - statement.oldDataType.startsWith('bigint unsigned') - && statement.newDataType.startsWith('serial') - ) { - return false; - } - - if ( - statement.oldDataType.startsWith('serial') - && statement.newDataType.startsWith('bigint unsigned') - ) { - return false; - } - } else if (statement.type === 'alter_table_alter_column_set_default') { - if ( - statement.newDefaultValue === false - && statement.oldDefaultValue === 0 - && statement.newDataType === 'boolean' - ) { - return false; - } - if ( - statement.newDefaultValue === true - && statement.oldDefaultValue === 1 - && statement.newDataType === 'boolean' - ) { - return false; - } - } else if (statement.type === 'delete_unique_constraint') { - const unsquashed = MySqlSquasher.unsquashUnique(statement.data); - // only if constraint was removed from a serial column, than treat it as removed - // const serialStatement = statements.find( - // (it) => it.type === "alter_table_alter_column_set_type" - // ) as JsonAlterColumnTypeStatement; - // if ( - // serialStatement?.oldDataType.startsWith("bigint unsigned") && - // serialStatement?.newDataType.startsWith("serial") && - // serialStatement.columnName === - // MySqlSquasher.unsquashUnique(statement.data).columns[0] - // ) { - // return false; - // } - // Check if uniqueindex was only on this column, that is serial - - // if now serial and was not serial and was unique index - if ( - unsquashed.columns.length === 1 - && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .type === 'serial' - && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .type === 'serial' - && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .name === unsquashed.columns[0] - ) { - return false; - } - } else if (statement.type === 'alter_table_alter_column_drop_notnull') { - // only if constraint was removed from a serial column, than treat it as removed - const serialStatement = statements.find( - (it) => it.type === 'alter_table_alter_column_set_type', - ) as JsonAlterColumnTypeStatement; - if ( - serialStatement?.oldDataType.startsWith('bigint unsigned') - && serialStatement?.newDataType.startsWith('serial') - && serialStatement.columnName === statement.columnName - && serialStatement.tableName === statement.tableName - ) { - return false; - } - if (statement.newDataType === 'serial' && !statement.columnNotNull) { - return false; - } - if (statement.columnAutoIncrement) { - return false; - } - } - - return true; - }); -}; - -export const logSuggestionsAndReturn = async ( - db: DB, - statements: JsonStatement[], - json2: TypeOf, -) => { - let shouldAskForApprove = false; - const statementsToExecute: string[] = []; - const infoToPrint: string[] = []; - - const tablesToRemove: string[] = []; - const columnsToRemove: string[] = []; - const schemasToRemove: string[] = []; - const tablesToTruncate: string[] = []; - - for (const statement of statements) { - if (statement.type === 'drop_table') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.tableName, - ) - } table with ${count} items`, - ); - tablesToRemove.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_drop_column') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.columnName, - ) - } column in ${statement.tableName} table with ${count} items`, - ); - columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - } else if (statement.type === 'drop_schema') { - const res = await db.query( - `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.name, - ) - } schema with ${count} tables`, - ); - schemasToRemove.push(statement.name); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_set_type') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${ - chalk.underline( - statement.columnName, - ) - } column type from ${ - chalk.underline( - statement.oldDataType, - ) - } to ${chalk.underline(statement.newDataType)} with ${count} items`, - ); - statementsToExecute.push(`truncate table ${statement.tableName};`); - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_drop_default') { - if (statement.columnNotNull) { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to remove default value from ${ - chalk.underline( - statement.columnName, - ) - } not-null column with ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - // shouldAskForApprove = true; - } else if (statement.type === 'alter_table_alter_column_set_notnull') { - if (typeof statement.columnDefault === 'undefined') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to set not-null constraint to ${ - chalk.underline( - statement.columnName, - ) - } column without default, which contains ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - } else if (statement.type === 'alter_table_alter_column_drop_pk') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - // if drop pk and json2 has autoincrement in table -> exit process with error - if ( - Object.values(json2.tables[statement.tableName].columns).filter( - (column) => column.autoincrement, - ).length > 0 - ) { - console.log( - `${ - withStyle.errorWarning( - `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, - ) - }`, - ); - process.exit(1); - } - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${ - chalk.underline( - statement.tableName, - ) - } primary key. This statements may fail and you table may left without primary key`, - ); - - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'delete_composite_pk') { - // if drop pk and json2 has autoincrement in table -> exit process with error - if ( - Object.values(json2.tables[statement.tableName].columns).filter( - (column) => column.autoincrement, - ).length > 0 - ) { - console.log( - `${ - withStyle.errorWarning( - `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, - ) - }`, - ); - process.exit(1); - } - } else if (statement.type === 'alter_table_add_column') { - if ( - statement.column.notNull - && typeof statement.column.default === 'undefined' - ) { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - statement.column.name, - ) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - } else if (statement.type === 'create_unique_constraint') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - const unsquashedUnique = MySqlSquasher.unsquashUnique(statement.data); - console.log( - `· You're about to add ${ - chalk.underline( - unsquashedUnique.name, - ) - } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ - chalk.underline( - statement.tableName, - ) - } table?\n`, - ); - const { status, data } = await render( - new Select([ - 'No, add the constraint without truncating the table', - `Yes, truncate the table`, - ]), - ); - if (data?.index === 1) { - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - shouldAskForApprove = true; - } - } - } - } - - return { - statementsToExecute, - shouldAskForApprove, - infoToPrint, - columnsToRemove: [...new Set(columnsToRemove)], - schemasToRemove: [...new Set(schemasToRemove)], - tablesToTruncate: [...new Set(tablesToTruncate)], - tablesToRemove: [...new Set(tablesToRemove)], - }; -}; diff --git a/drizzle-kit/src/cli/commands/mysqlUp.ts b/drizzle-kit/src/cli/commands/mysqlUp.ts index 8b467090b6..7d21b03348 100644 --- a/drizzle-kit/src/cli/commands/mysqlUp.ts +++ b/drizzle-kit/src/cli/commands/mysqlUp.ts @@ -1,10 +1,30 @@ -import chalk from 'chalk'; -import fs, { writeFileSync } from 'fs'; -import path from 'path'; -import { Column, MySqlSchema, MySqlSchemaV4, MySqlSchemaV5, mysqlSchemaV5, Table } from '../../serializer/mysqlSchema'; -import { prepareOutFolder, validateWithReport } from '../../utils'; +import { existsSync, readFileSync, rmSync, unlinkSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import type { Column, MySqlSchemaV4, MySqlSchemaV5, Table } from '../../legacy/mysql-v5/mysqlSchema'; +import type { Journal } from '../../utils'; -export const upMysqlHandler = (out: string) => {}; +export const upMysqlHandler = (out: string) => { + // if there is meta folder - and there is a journal - it's version <8 + const metaPath = join(out, 'meta'); + const journalPath = join(metaPath, '_journal.json'); + if (existsSync(metaPath) && existsSync(journalPath)) { + const journal: Journal = JSON.parse(readFileSync(journalPath).toString()); + if (Number(journal.version) < 8) { + for (const entry of journal.entries) { + const snapshotPrefix = entry.tag.split('_')[0]; + const oldSnapshot = readFileSync(join(metaPath, `${snapshotPrefix}_snapshot.json`)); + const oldSql = readFileSync(join(out, `${entry.tag}.sql`)); + + writeFileSync(join(out, `${entry.tag}/snapshot.json`), oldSnapshot); + writeFileSync(join(out, `${entry.tag}/migration.sql`), oldSql); + + unlinkSync(join(out, `${entry.tag}.sql`)); + } + + rmSync(metaPath); + } + } +}; export const upMySqlHandlerV4toV5 = (obj: MySqlSchemaV4): MySqlSchemaV5 => { const mappedTables: Record = {}; diff --git a/drizzle-kit/src/cli/commands/pgIntrospect.ts b/drizzle-kit/src/cli/commands/pgIntrospect.ts deleted file mode 100644 index 02867fae9f..0000000000 --- a/drizzle-kit/src/cli/commands/pgIntrospect.ts +++ /dev/null @@ -1,55 +0,0 @@ -import { renderWithTask } from 'hanji'; -import { Minimatch } from 'minimatch'; -import { originUUID } from '../../global'; -import type { PgSchema, PgSchemaInternal } from '../../serializer/pgSchema'; -import { fromDatabase } from '../../serializer/pgSerializer'; -import type { DB } from '../../utils'; -import { Entities } from '../validations/cli'; -import { ProgressView } from '../views'; - -export const pgPushIntrospect = async ( - db: DB, - filters: string[], - schemaFilters: string[], - entities: Entities, - tsSchema?: PgSchemaInternal, -) => { - const matchers = filters.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - const progress = new ProgressView( - 'Pulling schema from database...', - 'Pulling schema from database...', - ); - const res = await renderWithTask( - progress, - fromDatabase(db, filter, schemaFilters, entities, undefined, tsSchema), - ); - - const schema = { id: originUUID, prevId: '', ...res } as PgSchema; - const { internal, ...schemaWithoutInternals } = schema; - return { schema: schemaWithoutInternals }; -}; diff --git a/drizzle-kit/src/cli/commands/pgPushUtils.ts b/drizzle-kit/src/cli/commands/pgPushUtils.ts deleted file mode 100644 index 05322f738f..0000000000 --- a/drizzle-kit/src/cli/commands/pgPushUtils.ts +++ /dev/null @@ -1,269 +0,0 @@ -import chalk from 'chalk'; -import { render } from 'hanji'; -import type { JsonStatement } from '../../jsonStatements'; -import { PgSquasher } from '../../serializer/pgSchema'; -import { fromJson } from '../../sqlgenerator'; -import type { DB } from '../../utils'; -import { Select } from '../selector-ui'; - -// export const filterStatements = (statements: JsonStatement[]) => { -// return statements.filter((statement) => { -// if (statement.type === "alter_table_alter_column_set_type") { -// // Don't need to handle it on migrations step and introspection -// // but for both it should be skipped -// if ( -// statement.oldDataType.startsWith("tinyint") && -// statement.newDataType.startsWith("boolean") -// ) { -// return false; -// } -// } else if (statement.type === "alter_table_alter_column_set_default") { -// if ( -// statement.newDefaultValue === false && -// statement.oldDefaultValue === 0 && -// statement.newDataType === "boolean" -// ) { -// return false; -// } -// if ( -// statement.newDefaultValue === true && -// statement.oldDefaultValue === 1 && -// statement.newDataType === "boolean" -// ) { -// return false; -// } -// } -// return true; -// }); -// }; - -function concatSchemaAndTableName(schema: string | undefined, table: string) { - return schema ? `"${schema}"."${table}"` : `"${table}"`; -} - -function tableNameWithSchemaFrom( - schema: string | undefined, - tableName: string, - renamedSchemas: Record, - renamedTables: Record, -) { - const newSchemaName = schema ? (renamedSchemas[schema] ? renamedSchemas[schema] : schema) : undefined; - - const newTableName = renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] - ? renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] - : tableName; - - return concatSchemaAndTableName(newSchemaName, newTableName); -} - -export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { - let shouldAskForApprove = false; - const statementsToExecute: string[] = []; - const infoToPrint: string[] = []; - - const tablesToRemove: string[] = []; - const columnsToRemove: string[] = []; - const schemasToRemove: string[] = []; - const tablesToTruncate: string[] = []; - const matViewsToRemove: string[] = []; - - let renamedSchemas: Record = {}; - let renamedTables: Record = {}; - - for (const statement of statements) { - if (statement.type === 'rename_schema') { - renamedSchemas[statement.to] = statement.from; - } else if (statement.type === 'rename_table') { - renamedTables[concatSchemaAndTableName(statement.toSchema, statement.tableNameTo)] = statement.tableNameFrom; - } else if (statement.type === 'drop_table') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push(`· You're about to delete ${chalk.underline(statement.tableName)} table with ${count} items`); - // statementsToExecute.push( - // `truncate table ${tableNameWithSchemaFrom(statement)} cascade;` - // ); - tablesToRemove.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'drop_view' && statement.materialized) { - const res = await db.query(`select count(*) as count from "${statement.schema ?? 'public'}"."${statement.name}"`); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete "${chalk.underline(statement.name)}" materialized view with ${count} items`, - ); - - matViewsToRemove.push(statement.name); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_drop_column') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline(statement.columnName) - } column in ${statement.tableName} table with ${count} items`, - ); - columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - } else if (statement.type === 'drop_schema') { - const res = await db.query( - `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push(`· You're about to delete ${chalk.underline(statement.name)} schema with ${count} tables`); - schemasToRemove.push(statement.name); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_set_type') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${chalk.underline(statement.columnName)} column type from ${ - chalk.underline(statement.oldDataType) - } to ${ - chalk.underline( - statement.newDataType, - ) - } with ${count} items`, - ); - statementsToExecute.push( - `truncate table ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - } cascade;`, - ); - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_drop_pk') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${ - chalk.underline(statement.tableName) - } primary key. This statements may fail and you table may left without primary key`, - ); - - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - - const tableNameWithSchema = tableNameWithSchemaFrom( - statement.schema, - statement.tableName, - renamedSchemas, - renamedTables, - ); - - const pkNameResponse = await db.query( - `SELECT constraint_name FROM information_schema.table_constraints - WHERE table_schema = '${ - typeof statement.schema === 'undefined' || statement.schema === '' ? 'public' : statement.schema - }' - AND table_name = '${statement.tableName}' - AND constraint_type = 'PRIMARY KEY';`, - ); - - statementsToExecute.push( - `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${pkNameResponse[0].constraint_name}"`, - ); - // we will generate statement for drop pk here and not after all if-else statements - continue; - } else if (statement.type === 'alter_table_add_column') { - if (statement.column.notNull && typeof statement.column.default === 'undefined') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline(statement.column.name) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push( - `truncate table ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - } cascade;`, - ); - - shouldAskForApprove = true; - } - } - } else if (statement.type === 'create_unique_constraint') { - const res = await db.query( - `select count(*) as count from ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - }`, - ); - const count = Number(res[0].count); - if (count > 0) { - const unsquashedUnique = PgSquasher.unsquashUnique(statement.data); - console.log( - `· You're about to add ${ - chalk.underline( - unsquashedUnique.name, - ) - } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ - chalk.underline( - statement.tableName, - ) - } table?\n`, - ); - const { status, data } = await render( - new Select(['No, add the constraint without truncating the table', `Yes, truncate the table`]), - ); - if (data?.index === 1) { - tablesToTruncate.push(statement.tableName); - statementsToExecute.push( - `truncate table ${ - tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) - } cascade;`, - ); - shouldAskForApprove = true; - } - } - } - const stmnt = fromJson([statement], 'postgresql', 'push'); - if (typeof stmnt !== 'undefined') { - statementsToExecute.push(...stmnt); - } - } - - return { - statementsToExecute: [...new Set(statementsToExecute)], - shouldAskForApprove, - infoToPrint, - matViewsToRemove: [...new Set(matViewsToRemove)], - columnsToRemove: [...new Set(columnsToRemove)], - schemasToRemove: [...new Set(schemasToRemove)], - tablesToTruncate: [...new Set(tablesToTruncate)], - tablesToRemove: [...new Set(tablesToRemove)], - }; -}; diff --git a/drizzle-kit/src/cli/commands/pgUp.ts b/drizzle-kit/src/cli/commands/pgUp.ts deleted file mode 100644 index 52a2fc4a11..0000000000 --- a/drizzle-kit/src/cli/commands/pgUp.ts +++ /dev/null @@ -1,179 +0,0 @@ -import chalk from 'chalk'; -import { writeFileSync } from 'fs'; -import { - Column, - Index, - PgSchema, - PgSchemaV4, - PgSchemaV5, - pgSchemaV5, - PgSchemaV6, - pgSchemaV6, - Table, - TableV5, -} from '../../serializer/pgSchema'; -import { prepareOutFolder, validateWithReport } from '../../utils'; - -export const upPgHandler = (out: string) => { - const { snapshots } = prepareOutFolder(out, 'postgresql'); - const report = validateWithReport(snapshots, 'postgresql'); - - report.nonLatest - .map((it) => ({ - path: it, - raw: report.rawMap[it]!! as Record, - })) - .forEach((it) => { - const path = it.path; - - let resultV6 = it.raw; - if (it.raw.version === '5') { - resultV6 = updateUpToV6(it.raw); - } - - const result = updateUpToV7(resultV6); - - console.log(`[${chalk.green('✓')}] ${path}`); - - writeFileSync(path, JSON.stringify(result, null, 2)); - }); - - console.log("Everything's fine 🐶🔥"); -}; - -export const updateUpToV6 = (json: Record): PgSchemaV6 => { - const schema = pgSchemaV5.parse(json); - const tables = Object.fromEntries( - Object.entries(schema.tables).map((it) => { - const table = it[1]; - const schema = table.schema || 'public'; - return [`${schema}.${table.name}`, table]; - }), - ); - const enums = Object.fromEntries( - Object.entries(schema.enums).map((it) => { - const en = it[1]; - return [ - `public.${en.name}`, - { - name: en.name, - schema: 'public', - values: Object.values(en.values), - }, - ]; - }), - ); - return { - ...schema, - version: '6', - dialect: 'postgresql', - tables: tables, - enums, - }; -}; - -// Changed index format stored in snapshot for PostgreSQL in 0.22.0 -export const updateUpToV7 = (json: Record): PgSchema => { - const schema = pgSchemaV6.parse(json); - const tables = Object.fromEntries( - Object.entries(schema.tables).map((it) => { - const table = it[1]; - const mappedIndexes = Object.fromEntries( - Object.entries(table.indexes).map((idx) => { - const { columns, ...rest } = idx[1]; - const mappedColumns = columns.map((it) => { - return { - expression: it, - isExpression: false, - asc: true, - nulls: 'last', - opClass: undefined, - }; - }); - return [idx[0], { columns: mappedColumns, with: {}, ...rest }]; - }), - ); - return [it[0], { ...table, indexes: mappedIndexes, policies: {}, isRLSEnabled: false, checkConstraints: {} }]; - }), - ); - - return { - ...schema, - version: '7', - dialect: 'postgresql', - sequences: {}, - tables: tables, - policies: {}, - views: {}, - roles: {}, - }; -}; - -// major migration with of folder structure, etc... -export const upPgHandlerV4toV5 = (obj: PgSchemaV4): PgSchemaV5 => { - const mappedTables: Record = {}; - - for (const [key, table] of Object.entries(obj.tables)) { - const mappedColumns: Record = {}; - for (const [ckey, column] of Object.entries(table.columns)) { - let newDefault: any = column.default; - let newType: string = column.type; - if (column.type.toLowerCase() === 'date') { - if (typeof column.default !== 'undefined') { - if (column.default.startsWith("'") && column.default.endsWith("'")) { - newDefault = `'${ - column.default - .substring(1, column.default.length - 1) - .split('T')[0] - }'`; - } else { - newDefault = column.default.split('T')[0]; - } - } - } else if (column.type.toLowerCase().startsWith('timestamp')) { - if (typeof column.default !== 'undefined') { - if (column.default.startsWith("'") && column.default.endsWith("'")) { - newDefault = `'${ - column.default - .substring(1, column.default.length - 1) - .replace('T', ' ') - .slice(0, 23) - }'`; - } else { - newDefault = column.default.replace('T', ' ').slice(0, 23); - } - } - newType = column.type - .toLowerCase() - .replace('timestamp (', 'timestamp('); - } else if (column.type.toLowerCase().startsWith('time')) { - newType = column.type.toLowerCase().replace('time (', 'time('); - } else if (column.type.toLowerCase().startsWith('interval')) { - newType = column.type.toLowerCase().replace(' (', '('); - } - mappedColumns[ckey] = { ...column, default: newDefault, type: newType }; - } - - mappedTables[key] = { - ...table, - columns: mappedColumns, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }; - } - - return { - version: '5', - dialect: obj.dialect, - id: obj.id, - prevId: obj.prevId, - tables: mappedTables, - enums: obj.enums, - schemas: obj.schemas, - _meta: { - schemas: {} as Record, - tables: {} as Record, - columns: {} as Record, - }, - }; -}; diff --git a/drizzle-kit/src/cli/commands/pull-cockroach.ts b/drizzle-kit/src/cli/commands/pull-cockroach.ts new file mode 100644 index 0000000000..dfddac067a --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-cockroach.ts @@ -0,0 +1,143 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { TaskView } from 'hanji'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import { toJsonSnapshot } from 'src/dialects/cockroach/snapshot'; +import type { EntityFilter } from 'src/dialects/pull-utils'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { + CheckConstraint, + CockroachEntities, + Column, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Schema, + Sequence, + View, +} from '../../dialects/cockroach/ddl'; +import { cockroachToRelationsPull, createDDL, interimToDDL } from '../../dialects/cockroach/ddl'; +import { ddlDiff } from '../../dialects/cockroach/diff'; +import { fromDatabaseForDrizzle } from '../../dialects/cockroach/introspect'; +import { ddlToTypeScript as cockroachSequenceSchemaToTypeScript } from '../../dialects/cockroach/typescript'; +import { originUUID } from '../../utils'; +import type { DB } from '../../utils'; +import { prepareOutFolder } from '../../utils/utils-node'; +import type { prepareCockroach } from '../connections'; +import { resolver } from '../prompts'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CockroachCredentials } from '../validations/cockroach'; +import type { Casing, Prefix } from '../validations/common'; +import { IntrospectProgress } from '../views'; +import { writeResult } from './generate-common'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: CockroachCredentials, + filters: EntitiesFilterConfig, + prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { prepareCockroach } = await import('../connections'); + db = await prepareCockroach(credentials); + } + + const filter = prepareEntityFilter('cockroach', filters, []); + + const progress = new IntrospectProgress(true); + const task = fromDatabaseForDrizzle(db, filter, (stage, count, status) => { + progress.update(stage, count, status); + }); + const res = await renderWithTask(progress, task); + + const { ddl: ddl2, errors } = interimToDDL(res); + + if (errors.length > 0) { + // TODO: print errors + console.error(errors); + process.exit(1); + } + + const ts = cockroachSequenceSchemaToTypeScript(ddl2, res.viewColumns, casing); + const relationsTs = relationsToTypeScript(cockroachToRelationsPull(ddl2), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots } = prepareOutFolder(out); + if (snapshots.length === 0) { + const { sqlStatements, renames } = await ddlDiff( + createDDL(), // dry ddl + ddl2, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'push', + ); + + writeResult({ + snapshot: toJsonSnapshot(ddl2, [originUUID], renames), + sqlStatements, + renames, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] Your relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; + +export const introspect = async ( + db: DB, + filter: EntityFilter, + progress: TaskView, +) => { + const schema = await renderWithTask(progress, fromDatabaseForDrizzle(db, filter)); + return { schema }; +}; diff --git a/drizzle-kit/src/cli/commands/pull-common.ts b/drizzle-kit/src/cli/commands/pull-common.ts new file mode 100644 index 0000000000..85061e3f0a --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-common.ts @@ -0,0 +1,299 @@ +import { plural, singular } from 'pluralize'; +import { paramNameFor } from '../../dialects/postgres/typescript'; +import { assertUnreachable } from '../../utils'; +import type { Casing } from '../validations/common'; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return value; + } + if (casing === 'camel') { + return value.camelCase(); + } + + assertUnreachable(casing); +}; + +export type SchemaForPull = { + schema?: string; + foreignKeys: { + schema?: string; + table: string; + nameExplicit: boolean; + columns: string[]; + schemaTo?: string; + tableTo: string; + columnsTo: string[]; + onUpdate?: 'NO ACTION' | 'RESTRICT' | 'SET NULL' | 'CASCADE' | 'SET DEFAULT' | string | null; + onDelete?: 'NO ACTION' | 'RESTRICT' | 'SET NULL' | 'CASCADE' | 'SET DEFAULT' | string | null; + name: string; + entityType: 'fks'; + }[]; + // both unique constraints and unique indexes + uniques: { + columns: string[]; + }[]; +}[]; + +export const relationsToTypeScript = ( + schema: SchemaForPull, + casing: Casing, +) => { + const imports: string[] = []; + const tableRelations: Record< + string, + { + name: string; + type: 'one' | 'many' | 'through' | 'many-through' | 'one-one'; + tableFrom: string; + schemaFrom?: string; + columnsFrom: string[]; + tableTo: string; + schemaTo?: string; + columnsTo: string[]; + relationName?: string; + tableThrough?: string; + columnsThroughFrom?: string[]; + columnsThroughTo?: string[]; + }[] + > = {}; + + // Process all foreign keys as before. + schema.forEach((table) => { + const fks = Object.values(table.foreignKeys); + + if (fks.length === 2) { + const [fk1, fk2] = fks; + // reference to different tables, means it can be through many-many + const toTable1 = withCasing(paramNameFor(fk1.tableTo, fk1.schemaTo), casing); + const columnsTo1 = fk1.columnsTo.map((it) => withCasing(it, casing)); + + const toTable2 = withCasing(paramNameFor(fk2.tableTo, fk2.schemaTo), casing); + const columnsTo2 = fk2.columnsTo.map((it) => withCasing(it, casing)); + + const tableThrough = withCasing(paramNameFor(fk1.table, table.schema), casing); + // const tableFrom2 = withCasing(paramNameFor(fk2.table, table.schema), casing); + const columnsThroughFrom = fk1.columns.map((it) => withCasing(it, casing)); + const columnsThroughTo = fk2.columns.map((it) => withCasing(it, casing)); + + if ( + toTable1 !== toTable2 + ) { + if (!tableRelations[toTable1]) { + tableRelations[toTable1] = []; + } + + tableRelations[toTable1].push({ + name: plural(toTable2), + type: 'through', + tableFrom: toTable1, + columnsFrom: columnsTo1, + tableTo: toTable2, + columnsTo: columnsTo2, + tableThrough, + columnsThroughFrom, + columnsThroughTo, + }); + + if (!tableRelations[toTable2]) { + tableRelations[toTable2] = []; + } + + tableRelations[toTable2].push({ + name: plural(toTable1), + // this type is used for .many() side of relation, when another side has .through() with from and to fields + type: 'many-through', + tableFrom: toTable2, + columnsFrom: fk2.columnsTo, + tableTo: toTable1, + columnsTo: columnsTo2, + tableThrough, + columnsThroughFrom, + columnsThroughTo, + }); + } + } else { + fks.forEach((fk) => { + const tableNameFrom = paramNameFor(fk.table, table.schema); + const tableNameTo = paramNameFor(fk.tableTo, fk.schemaTo); + const tableFrom = withCasing(tableNameFrom.replace(/:+/g, ''), casing); + const tableTo = withCasing(tableNameTo.replace(/:+/g, ''), casing); + const columnsFrom = fk.columns.map((it) => withCasing(it, casing)); + const columnsTo = fk.columnsTo.map((it) => withCasing(it, casing)); + + imports.push(tableTo, tableFrom); + + const keyFrom = tableFrom; + if (!tableRelations[keyFrom]) { + tableRelations[keyFrom] = []; + } + + tableRelations[keyFrom].push({ + name: singular(tableTo), + type: 'one', + tableFrom, + columnsFrom, + tableTo, + columnsTo, + }); + + const keyTo = tableTo; + if (!tableRelations[keyTo]) { + tableRelations[keyTo] = []; + } + + // if this table has a unique on a column, that is used for 1-m, then we can assume that it's 1-1 relation + // we will check that all of the fk columns are unique, so we can assume it's 1-1 + // not matter if it's 1 column, 2 columns or more + if ( + table.uniques.find((constraint) => + constraint.columns.length === columnsFrom.length + && constraint.columns.every((col, i) => col === columnsFrom[i]) + ) + ) { + // the difference between one and one-one is that one-one won't contain from and to + // maybe it can be done by introducing some sort of flag or just not providing columnsFrom and columnsTo + // but I decided just to have a different type field here + tableRelations[keyTo].push({ + name: plural(tableFrom), + type: 'one-one', + tableFrom: tableTo, + columnsFrom: columnsTo, + tableTo: tableFrom, + columnsTo: columnsFrom, + }); + } else { + tableRelations[keyTo].push({ + name: plural(tableFrom), + type: 'many', + tableFrom: tableTo, + columnsFrom: columnsTo, + tableTo: tableFrom, + columnsTo: columnsFrom, + }); + } + }); + } + }); + + const importsTs = `import { defineRelations } from "drizzle-orm";\nimport * as schema from "./schema";\n\n`; + + let relationString = `export const relations = defineRelations(schema, (r) => ({`; + + Object.entries(tableRelations).forEach(([table, relations]) => { + // Adjust duplicate names if needed. + const preparedRelations = relations.map( + (relation, relationIndex, originArray) => { + let name = relation.name; + let relationName; + const hasMultipleRelations = originArray.some( + (it, originIndex) => relationIndex !== originIndex && it.tableTo === relation.tableTo, + ); + if (hasMultipleRelations) { + // if one relation - we need to name a relation from this table to "many" table + if (relation.type === 'one') { + relationName = `${relation.tableFrom}_${relation.columnsFrom.join('_')}_${relation.tableTo}_${ + relation.columnsTo.join('_') + }`; + // if many relation - name in in different order, so alias names will match + } else if (relation.type === 'many' || relation.type === 'one-one') { + relationName = `${relation.tableTo}_${relation.columnsTo.join('_')}_${relation.tableFrom}_${ + relation.columnsFrom.join('_') + }`; + // if through relation - we need to name a relation from this table to "many" table and include "via" + } else if (relation.type === 'through') { + relationName = `${relation.tableFrom}_${relation.columnsFrom.join('_')}_${relation.tableTo}_${ + relation.columnsTo.join('_') + }_via_${relation.tableThrough}`; + // else is for many-through, meaning we need to reverse the order for tables and columns, but leave "via" the same + } else { + relationName = `${relation.tableTo}_${relation.columnsTo.join('_')}_${relation.tableFrom}_${ + relation.columnsFrom.join('_') + }_via_${relation.tableThrough}`; + } + } + const hasDuplicatedRelation = originArray.some( + (it, originIndex) => relationIndex !== originIndex && it.name === relation.name, + ); + if (hasDuplicatedRelation) { + name = `${relation.name}_${ + relation.type === 'through' + ? `via_${relation.tableThrough}` + : relation.type === 'many-through' + ? `via_${relation.tableThrough}` + : relation.type === 'one' + ? relation.columnsFrom.join('_') + : relation.columnsTo.join('_') + }`; + } + return { + ...relation, + name: withCasing(name, casing), + relationName, + }; + }, + ); + + relationString += `\n\t${table}: {`; + preparedRelations.forEach((relation) => { + if (relation.type === 'one') { + const from = relation.columnsFrom.length === 1 + ? `r.${relation.tableFrom}.${relation.columnsFrom[0]}` + : `[${ + relation.columnsFrom + .map((it) => `r.${relation.tableFrom}.${it}`) + .join(', ') + }]`; + const to = relation.columnsTo.length === 1 + ? `r.${relation.tableTo}.${relation.columnsTo[0]}` + : `[${ + relation.columnsTo + .map((it) => `r.${relation.tableTo}.${it}`) + .join(', ') + }]`; + + relationString += `\n\t\t${relation.name}: r.one.${relation.tableTo}({\n\t\t\tfrom: ${from},\n\t\t\tto: ${to}` + + (relation.relationName ? `,\n\t\t\talias: "${relation.relationName}"` : '') + + `\n\t\t}),`; + } else if (relation.type === 'many' || relation.type === 'many-through') { + relationString += `\n\t\t${relation.name}: r.many.${relation.tableTo}(` + + (relation.relationName ? `{\n\t\t\talias: "${relation.relationName}"\n\t\t}` : '') + + `),`; + } else if (relation.type === 'one-one') { + relationString += `\n\t\t${relation.name}: r.one.${relation.tableTo}(` + + (relation.relationName ? `{\n\t\t\talias: "${relation.relationName}"\n\t\t}` : '') + + `),`; + } else { + const from = relation.columnsThroughFrom!.length === 1 + ? `r.${relation.tableFrom}.${relation.columnsFrom[0]}.through(r.${relation.tableThrough}.${ + relation.columnsThroughFrom![0] + })` + : `[${ + relation.columnsThroughFrom! + .map((it) => `r.${relation.tableFrom}.${it}.through(${relation.tableThrough}.${it})`) + .join(', ') + }]`; + const to = relation.columnsThroughTo!.length === 1 + ? `r.${relation.tableTo}.${relation.columnsTo![0]}.through(r.${relation.tableThrough}.${ + relation.columnsThroughTo![0] + })` + : `[${ + relation.columnsThroughTo! + .map((it) => `r.${relation.tableTo}.${it}.through(${relation.tableThrough}.${it})`) + .join(', ') + }]`; + + relationString += `\n\t\t${relation.name}: r.many.${relation.tableTo}({\n\t\t\tfrom: ${from},\n\t\t\tto: ${to}` + + (relation.relationName ? `,\n\t\t\talias: "${relation.relationName}"` : '') + + `\n\t\t}),`; + } + }); + relationString += `\n\t},`; + }); + + relationString += `\n}))`; + + return { + file: importsTs + relationString, + }; +}; diff --git a/drizzle-kit/src/cli/commands/pull-gel.ts b/drizzle-kit/src/cli/commands/pull-gel.ts new file mode 100644 index 0000000000..0a8662e5b4 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-gel.ts @@ -0,0 +1,73 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import { interimToDDL, postgresToRelationsPull } from 'src/dialects/postgres/ddl'; +import { ddlToTypeScript } from 'src/dialects/postgres/typescript'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { fromDatabase } from '../../dialects/postgres/introspect'; +import type { prepareGelDB } from '../connections'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { GelCredentials } from '../validations/gel'; +import { IntrospectProgress } from '../views'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: GelCredentials | undefined, + filters: EntitiesFilterConfig, + _prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { prepareGelDB } = await import('../connections'); + db = await prepareGelDB(credentials); + } + + const progress = new IntrospectProgress(true); + const entityFilter = prepareEntityFilter('gel', filters, []); + + const task = fromDatabase(db, entityFilter, (stage, count, status) => { + progress.update(stage, count, status); + }); + + const res = await renderWithTask(progress, task); + + const { ddl: ddl2, errors } = interimToDDL(res); + + if (errors.length > 0) { + // TODO: print errors + process.exit(1); + } + + const ts = ddlToTypeScript(ddl2, res.viewColumns, casing, 'gel'); + const relationsTs = relationsToTypeScript(postgresToRelationsPull(ddl2), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] Your relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; diff --git a/drizzle-kit/src/cli/commands/pull-libsql.ts b/drizzle-kit/src/cli/commands/pull-libsql.ts new file mode 100644 index 0000000000..6cb4a6a432 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-libsql.ts @@ -0,0 +1 @@ +export { handle } from './pull-sqlite'; diff --git a/drizzle-kit/src/cli/commands/pull-mssql.ts b/drizzle-kit/src/cli/commands/pull-mssql.ts new file mode 100644 index 0000000000..403ff3e9c4 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-mssql.ts @@ -0,0 +1,129 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { TaskView } from 'hanji'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import { toJsonSnapshot } from 'src/dialects/mssql/snapshot'; +import type { EntityFilter } from 'src/dialects/pull-utils'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { prepareOutFolder } from 'src/utils/utils-node'; +import type { + CheckConstraint, + Column, + DefaultConstraint, + ForeignKey, + Index, + MssqlEntities, + PrimaryKey, + Schema, + UniqueConstraint, + View, +} from '../../dialects/mssql/ddl'; +import { createDDL, interimToDDL } from '../../dialects/mssql/ddl'; +import { ddlDiff } from '../../dialects/mssql/diff'; +import { fromDatabaseForDrizzle } from '../../dialects/mssql/introspect'; +import { ddlToTypeScript } from '../../dialects/mssql/typescript'; +import { type DB, originUUID } from '../../utils'; +import type { connectToMsSQL } from '../connections'; +import { resolver } from '../prompts'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { MssqlCredentials } from '../validations/mssql'; +import { IntrospectProgress, mssqlSchemaError } from '../views'; +import { writeResult } from './generate-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: MssqlCredentials, + filters: EntitiesFilterConfig, + prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { connectToMsSQL } = await import('../connections'); + db = await connectToMsSQL(credentials); + } + + const filter = prepareEntityFilter('mssql', filters, []); + + const progress = new IntrospectProgress(true); + const task = fromDatabaseForDrizzle(db.db, filter, (stage, count, status) => { + progress.update(stage, count, status); + }); + + const res = await renderWithTask(progress, task); + + const { ddl: ddl2, errors } = interimToDDL(res); + + if (errors.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const ts = ddlToTypeScript(ddl2, res.viewColumns, casing); + // const relationsTs = relationsToTypeScript(ddl2.fks.list(), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + // const relationsFile = join(out, 'relations.ts'); + // writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots } = prepareOutFolder(out); + if (snapshots.length === 0) { + const { sqlStatements, renames } = await ddlDiff( + createDDL(), // dry ddl + ddl2, + resolver('schema'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique', 'dbo'), // uniques + resolver('index', 'dbo'), // indexes + resolver('check', 'dbo'), // checks + resolver('primary key', 'dbo'), // pks + resolver('foreign key', 'dbo'), // fks + resolver('default', 'dbo'), // defaults + 'default', + ); + + writeResult({ + snapshot: toJsonSnapshot(ddl2, [originUUID], renames), + sqlStatements, + renames, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); +}; + +export const introspect = async ( + db: DB, + filter: EntityFilter, + progress: TaskView, +) => { + const schema = await renderWithTask(progress, fromDatabaseForDrizzle(db, filter)); + + return { schema }; +}; diff --git a/drizzle-kit/src/cli/commands/pull-mysql.ts b/drizzle-kit/src/cli/commands/pull-mysql.ts new file mode 100644 index 0000000000..edafb9d293 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-mysql.ts @@ -0,0 +1,131 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { TaskView } from 'hanji'; +import { renderWithTask } from 'hanji'; +import { render } from 'hanji'; +import { join } from 'path'; +import type { EntityFilter } from 'src/dialects/pull-utils'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { createDDL, interimToDDL, mysqlToRelationsPull } from '../../dialects/mysql/ddl'; +import { ddlDiff } from '../../dialects/mysql/diff'; +import { fromDatabaseForDrizzle } from '../../dialects/mysql/introspect'; +import { toJsonSnapshot } from '../../dialects/mysql/snapshot'; +import { ddlToTypeScript } from '../../dialects/mysql/typescript'; +import type { DB } from '../../utils'; +import { mockResolver } from '../../utils/mocks'; +import { prepareOutFolder } from '../../utils/utils-node'; +import type { connectToMySQL } from '../connections'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { MysqlCredentials } from '../validations/mysql'; +import type { IntrospectStage, IntrospectStatus } from '../views'; +import { IntrospectProgress } from '../views'; +import { writeResult } from './generate-common'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: MysqlCredentials, + filters: EntitiesFilterConfig, + prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { connectToMySQL } = await import('../connections'); + db = await connectToMySQL(credentials); + } + + const filter = prepareEntityFilter('mysql', filters, []); + const progress = new IntrospectProgress(); + const { schema } = await introspect({ + db: db.db, + database: db.database, + progress, + progressCallback: (stage, count, status) => { + progress.update(stage, count, status); + }, + filter, + }); + const { ddl } = interimToDDL(schema); + + const ts = ddlToTypeScript(ddl, schema.viewColumns, casing, 'mysql'); + const relations = relationsToTypeScript(mysqlToRelationsPull(ddl), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relations.file); + console.log(); + + const { snapshots } = prepareOutFolder(out); + + if (snapshots.length === 0) { + const { sqlStatements } = await ddlDiff( + createDDL(), + ddl, + mockResolver(new Set()), + mockResolver(new Set()), + mockResolver(new Set()), + 'push', + ); + + writeResult({ + snapshot: toJsonSnapshot(ddl, [], []), + sqlStatements, + renames: [], + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] Your relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; + +export const introspect = async (props: { + db: DB; + database: string; + filter: EntityFilter; + progress: TaskView; + progressCallback?: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void; +}) => { + const { db, database, progress, filter } = props; + const pcb = props.progressCallback ?? (() => {}); + + const res = await renderWithTask(progress, fromDatabaseForDrizzle(db, database, filter, pcb)); + return { schema: res }; +}; diff --git a/drizzle-kit/src/cli/commands/pull-postgres.ts b/drizzle-kit/src/cli/commands/pull-postgres.ts new file mode 100644 index 0000000000..2f8cf0ffd9 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-postgres.ts @@ -0,0 +1,154 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { TaskView } from 'hanji'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import { toJsonSnapshot } from 'src/dialects/postgres/snapshot'; +import type { EntityFilter } from 'src/dialects/pull-utils'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, +} from '../../dialects/postgres/ddl'; +import { createDDL, interimToDDL, postgresToRelationsPull } from '../../dialects/postgres/ddl'; +import { ddlDiff } from '../../dialects/postgres/diff'; +import { fromDatabaseForDrizzle } from '../../dialects/postgres/introspect'; +import { ddlToTypeScript as postgresSchemaToTypeScript } from '../../dialects/postgres/typescript'; +import { originUUID } from '../../utils'; +import type { DB } from '../../utils'; +import { prepareOutFolder } from '../../utils/utils-node'; +import type { preparePostgresDB } from '../connections'; +import { resolver } from '../prompts'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { PostgresCredentials } from '../validations/postgres'; +import type { IntrospectStage, IntrospectStatus } from '../views'; +import { IntrospectProgress } from '../views'; +import { writeResult } from './generate-common'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: PostgresCredentials, + filtersConfig: EntitiesFilterConfig, + prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { preparePostgresDB } = await import('../connections'); + db = await preparePostgresDB(credentials); + } + + const progress = new IntrospectProgress(true); + const entityFilter = prepareEntityFilter('postgresql', filtersConfig, []); + + const { schema: res } = await renderWithTask( + progress, + introspect(db, entityFilter, progress, (stage, count, status) => { + progress.update(stage, count, status); + }), + ); + + const { ddl: ddl2, errors } = interimToDDL(res); + + if (errors.length > 0) { + // TODO: print errors + console.error(errors); + process.exit(1); + } + + const ts = postgresSchemaToTypeScript(ddl2, res.viewColumns, casing, 'pg'); + const relationsTs = relationsToTypeScript(postgresToRelationsPull(ddl2), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + console.log(); + + const { snapshots } = prepareOutFolder(out); + if (snapshots.length === 0) { + // const blanks = new Set(); + const { sqlStatements, renames } = await ddlDiff( + createDDL(), // dry ddl + ddl2, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('role'), + resolver('privilege'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'push', + ); + + writeResult({ + snapshot: toJsonSnapshot(ddl2, [originUUID], renames), + sqlStatements, + renames, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] Your relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; + +export const introspect = async ( + db: DB, + filter: EntityFilter, + progress: TaskView, + callback?: (stage: IntrospectStage, count: number, status: IntrospectStatus) => void, +) => { + const schema = await renderWithTask(progress, fromDatabaseForDrizzle(db, filter, callback)); + return { schema }; +}; diff --git a/drizzle-kit/src/cli/commands/pull-singlestore.ts b/drizzle-kit/src/cli/commands/pull-singlestore.ts new file mode 100644 index 0000000000..d0257a7ee3 --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-singlestore.ts @@ -0,0 +1,105 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import { createDDL, interimToDDL, mysqlToRelationsPull } from 'src/dialects/mysql/ddl'; +import { fromDatabaseForDrizzle } from 'src/dialects/mysql/introspect'; +import { ddlToTypeScript } from 'src/dialects/mysql/typescript'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { ddlDiff } from 'src/dialects/singlestore/diff'; +import { toJsonSnapshot } from 'src/dialects/singlestore/snapshot'; +import { mockResolver } from 'src/utils/mocks'; +import { prepareOutFolder } from '../../utils/utils-node'; +import type { connectToSingleStore } from '../connections'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { SingleStoreCredentials } from '../validations/singlestore'; +import { IntrospectProgress } from '../views'; +import { writeResult } from './generate-common'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: SingleStoreCredentials, + filters: EntitiesFilterConfig, + prefix: Prefix, + db?: Awaited>, +) => { + if (!db) { + const { connectToSingleStore } = await import('../connections'); + db = await connectToSingleStore(credentials); + } + + const filter = prepareEntityFilter('singlestore', filters, []); + + const progress = new IntrospectProgress(); + const task = fromDatabaseForDrizzle(db.db, db.database, filter, (stage, count, status) => { + progress.update(stage, count, status); + }); + const res = await renderWithTask(progress, task); + + const { ddl } = interimToDDL(res); + + const ts = ddlToTypeScript(ddl, res.viewColumns, casing, 'singlestore'); + const relations = relationsToTypeScript(mysqlToRelationsPull(ddl), casing); + + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relations.file); + console.log(); + + const { snapshots } = prepareOutFolder(out); + + if (snapshots.length === 0) { + const { sqlStatements } = await ddlDiff( + createDDL(), + ddl, + mockResolver(new Set()), + mockResolver(new Set()), + mockResolver(new Set()), + 'push', + ); + + writeResult({ + snapshot: toJsonSnapshot(ddl, [], []), + sqlStatements, + renames: [], + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] Your relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; diff --git a/drizzle-kit/src/cli/commands/pull-sqlite.ts b/drizzle-kit/src/cli/commands/pull-sqlite.ts new file mode 100644 index 0000000000..57de7dbb7e --- /dev/null +++ b/drizzle-kit/src/cli/commands/pull-sqlite.ts @@ -0,0 +1,114 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { TaskView } from 'hanji'; +import { render, renderWithTask } from 'hanji'; +import { join } from 'path'; +import type { EntityFilter } from 'src/dialects/pull-utils'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { createDDL, interimToDDL, sqliteToRelationsPull } from 'src/dialects/sqlite/ddl'; +import { toJsonSnapshot } from 'src/dialects/sqlite/snapshot'; +import { ddlDiffDry } from '../../dialects/sqlite/diff'; +import { fromDatabaseForDrizzle } from '../../dialects/sqlite/introspect'; +import { ddlToTypeScript } from '../../dialects/sqlite/typescript'; +import { originUUID } from '../../utils'; +import type { SQLiteDB } from '../../utils'; +import { prepareOutFolder } from '../../utils/utils-node'; +import type { connectToSQLite } from '../connections'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { Casing, Prefix } from '../validations/common'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { IntrospectProgress, type IntrospectStage, type IntrospectStatus } from '../views'; +import { writeResult } from './generate-common'; +import { relationsToTypeScript } from './pull-common'; + +export const handle = async ( + casing: Casing, + out: string, + breakpoints: boolean, + credentials: SqliteCredentials, + filters: EntitiesFilterConfig, + prefix: Prefix, + type: 'sqlite' | 'libsql' = 'sqlite', + db?: Awaited>, +) => { + if (!db) { + const { connectToSQLite } = await import('../connections'); + db = await connectToSQLite(credentials); + } + + const progress = new IntrospectProgress(); + const filter = prepareEntityFilter('sqlite', filters, []); + const { ddl, viewColumns } = await introspect(db, filter, progress, (stage, count, status) => { + progress.update(stage, count, status); + }); + + const ts = ddlToTypeScript(ddl, casing, viewColumns, type); + const relationsTs = relationsToTypeScript(sqliteToRelationsPull(ddl), casing); + + // check orm and orm-pg api version + const schemaFile = join(out, 'schema.ts'); + writeFileSync(schemaFile, ts.file); + + const relationsFile = join(out, 'relations.ts'); + writeFileSync(relationsFile, relationsTs.file); + + console.log(); + const { snapshots } = prepareOutFolder(out); + + if (snapshots.length === 0) { + const { sqlStatements, renames } = await ddlDiffDry(createDDL(), ddl, 'default'); + + writeResult({ + snapshot: toJsonSnapshot(ddl, originUUID, [], renames), + sqlStatements, + renames, + outFolder: out, + breakpoints, + type: 'introspect', + prefixMode: prefix, + snapshots, + }); + } else { + render( + `[${ + chalk.blue( + 'i', + ) + }] No SQL generated, you already have migrations in project`, + ); + } + + render( + `[${ + chalk.green( + '✓', + ) + }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, + ); + render( + `[${ + chalk.green( + '✓', + ) + }] You relations file is ready ➜ ${ + chalk.bold.underline.blue( + relationsFile, + ) + } 🚀`, + ); +}; + +export const introspect = async ( + db: SQLiteDB, + filter: EntityFilter, + taskView: TaskView, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +) => { + const schema = await renderWithTask(taskView, fromDatabaseForDrizzle(db, filter, progressCallback)); + const res = interimToDDL(schema); + return { ...res, viewColumns: schema.viewsToColumns }; +}; diff --git a/drizzle-kit/src/cli/commands/push-cockroach.ts b/drizzle-kit/src/cli/commands/push-cockroach.ts new file mode 100644 index 0000000000..b0e5c4863b --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-cockroach.ts @@ -0,0 +1,289 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { extractCrdbExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { + CheckConstraint, + CockroachEntities, + Column, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Schema, + Sequence, + View, +} from '../../dialects/cockroach/ddl'; +import { interimToDDL } from '../../dialects/cockroach/ddl'; +import { ddlDiff } from '../../dialects/cockroach/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from '../../dialects/cockroach/drizzle'; +import type { JsonStatement } from '../../dialects/cockroach/statements'; +import type { DB } from '../../utils'; +import { prepareFilenames } from '../../utils/utils-node'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CockroachCredentials } from '../validations/cockroach'; +import type { CasingType } from '../validations/common'; +import { withStyle } from '../validations/outputs'; +import { postgresSchemaError, postgresSchemaWarning, ProgressView } from '../views'; + +export const handle = async ( + schemaPath: string | string[], + verbose: boolean, + credentials: CockroachCredentials, + filters: EntitiesFilterConfig, + force: boolean, + casing: CasingType | undefined, +) => { + const { prepareCockroach } = await import('../connections'); + const { introspect: cockroachPushIntrospect } = await import('./pull-cockroach'); + + const db = await prepareCockroach(credentials); + const filenames = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(filenames); + + const existing = extractCrdbExisting(res.schemas, res.views, res.matViews); + const filter = prepareEntityFilter('cockroach', filters, existing); + + const { schema: schemaTo, errors, warnings } = fromDrizzleSchema(res, casing, filter); + + if (warnings.length > 0) { + console.log(warnings.map((it) => postgresSchemaWarning(it)).join('\n\n')); + } + + if (errors.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const progress = new ProgressView('Pulling schema from database...', 'Pulling schema from database...'); + const { schema: schemaFrom } = await cockroachPushIntrospect(db, filter, progress); + + const { ddl: ddl1, errors: errors1 } = interimToDDL(schemaFrom); + const { ddl: ddl2 } = interimToDDL(schemaTo); + // todo: handle errors? + + if (errors1.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + // const blanks = new Set(); + const { sqlStatements, statements: jsonStatements } = await ddlDiff( + ddl1, + ddl2, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'push', + ); + + if (sqlStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + return; + } + + const { losses, hints } = await suggestions(db, jsonStatements); + + if (verbose) { + console.log(); + console.log(withStyle.warning('You are about to execute these statements:')); + console.log(); + console.log(losses.map((s) => chalk.blue(s)).join('\n')); + console.log(); + } + + if (!force && hints.length === 0) { + const { data } = await render(new Select(['No, abort', 'Yes, I want to execute all statements'])); + + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + if (!force && hints.length > 0) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(hints.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { data } = await render(new Select(['No, abort', `Yes, proceed`])); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const statement of [...losses, ...sqlStatements]) { + await db.query(statement); + } + + render(`[${chalk.green('✓')}] Changes applied`); +}; + +const identifier = (it: { schema?: string; name: string }) => { + const { schema, name } = it; + const schemakey = schema && schema !== 'public' ? `"${schema}".` : ''; + return `${schemakey}"${name}"`; +}; + +export const suggestions = async (db: DB, jsonStatements: JsonStatement[]) => { + const statements: string[] = []; + const hints = [] as string[]; + + const filtered = jsonStatements.filter((it) => { + // discussion - + if (it.type === 'recreate_view') return false; + + /* + drizzle-kit push does not handle alternations of views definitions + just like with check constraints we can only reliably handle this with introduction of shadow db + + for now we encourage developers to `remove view from drizzle schema -> push -> add view to drizzle schema -> push` + */ + if (it.type === 'alter_column' && it.diff.generated) return false; + + /* + [Update] it does now, we have origin of creation + + drizzle-kit push does not handle alternation of check constraints + that's a limitation due to a nature of in-database way of persisting check constraints values + + in order to properly support one - we'd need to either fully implement in-database DDL, + or implement proper commutativity checks or use shadow DB for push command(the most reasonable way) + */ + // if (it.type === 'alter_column') return false; + + return true; + }); + + for (const statement of filtered) { + if (statement.type === 'drop_table') { + const res = await db.query(`select 1 from ${statement.key} limit 1`); + + if (res.length > 0) hints.push(`· You're about to delete non-empty ${statement.key} table`); + continue; + } + + if (statement.type === 'drop_view' && statement.view.materialized) { + const id = identifier(statement.view); + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + hints.push(`· You're about to delete non-empty ${id} materialized view`); + continue; + } + + if (statement.type === 'drop_column') { + const column = statement.column; + const id = identifier({ schema: column.schema, name: column.table }); + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + hints.push(`· You're about to delete non-empty ${column.name} column in ${id} table`); + continue; + } + + if (statement.type === 'drop_schema') { + // count tables in schema + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, + ); + const count = Number(res[0].count); + if (count === 0) continue; + + hints.push(`· You're about to delete ${chalk.underline(statement.name)} schema with ${count} tables`); + continue; + } + + // drop pk + if (statement.type === 'drop_pk') { + const schema = statement.pk.schema ?? 'public'; + const table = statement.pk.table; + const id = `"${schema}"."${table}"`; + const res = await db.query( + `select 1 from ${id} limit 1`, + ); + + if (res.length > 0) { + hints.push( + `· You're about to drop ${ + chalk.underline(id) + } primary key, this statements may fail and your table may loose primary key`, + ); + } + + const [{ name: pkName }] = await db.query<{ name: string }>(` + SELECT constraint_name as name + FROM information_schema.table_constraints + WHERE + table_schema = '${schema}' + AND table_name = '${table}' + AND constraint_type = 'PRIMARY KEY';`); + + statements.push(`ALTER TABLE ${id} DROP CONSTRAINT "${pkName}"`); + continue; + } + + if (statement.type === 'add_column' && statement.column.notNull && statement.column.default === null) { + const column = statement.column; + const id = identifier({ schema: column.schema, name: column.table }); + const res = await db.query(`select 1 from ${id} limit 1`); + + if (res.length === 0) continue; + hints.push( + `· You're about to add not-null ${ + chalk.underline(statement.column.name) + } column without default value to a non-empty ${id} table`, + ); + + // statementsToExecute.push(`truncate table ${id} cascade;`); + continue; + } + + if (statement.type === 'create_index' && statement.index.isUnique && !statement.newTable) { + const unique = statement.index; + const id = identifier({ schema: unique.schema, name: unique.table }); + + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + console.log( + `· You're about to add ${chalk.underline(unique.name)} unique index to a non-empty ${id} table which may fail`, + ); + // const { status, data } = await render( + // new Select(['No, add the constraint without truncating the table', `Yes, truncate the table`]), + // ); + // if (data?.index === 1) { + // statementsToExecute.push( + // `truncate table ${ + // tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) + // } cascade;`, + // ); + // } + continue; + } + } + + return { + losses: statements, + hints, + }; +}; diff --git a/drizzle-kit/src/cli/commands/push-libsql.ts b/drizzle-kit/src/cli/commands/push-libsql.ts new file mode 100644 index 0000000000..9248c93f94 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-libsql.ts @@ -0,0 +1,18 @@ +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { LibSQLCredentials } from '../validations/libsql'; +import { handle as sqliteHandle } from './push-sqlite'; + +export const handle = async ( + schemaPath: string | string[], + verbose: boolean, + credentials: LibSQLCredentials, + filters: EntitiesFilterConfig, + force: boolean, + casing: CasingType | undefined, + explainFlag: boolean, +) => { + const { connectToLibSQL } = await import('../connections'); + const db = await connectToLibSQL(credentials); + return sqliteHandle(schemaPath, verbose, credentials, filters, force, casing, explainFlag, db); +}; diff --git a/drizzle-kit/src/cli/commands/push-mssql.ts b/drizzle-kit/src/cli/commands/push-mssql.ts new file mode 100644 index 0000000000..314c72e799 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-mssql.ts @@ -0,0 +1,326 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { extractMssqlExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { prepareFilenames } from 'src/utils/utils-node'; +import type { + CheckConstraint, + Column, + DefaultConstraint, + ForeignKey, + Index, + MssqlDDL, + MssqlEntities, + PrimaryKey, + Schema, + UniqueConstraint, + View, +} from '../../dialects/mssql/ddl'; +import { interimToDDL } from '../../dialects/mssql/ddl'; +import { ddlDiff } from '../../dialects/mssql/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from '../../dialects/mssql/drizzle'; +import type { JsonStatement } from '../../dialects/mssql/statements'; +import type { DB } from '../../utils'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { MssqlCredentials } from '../validations/mssql'; +import { withStyle } from '../validations/outputs'; +import { mssqlSchemaError, ProgressView } from '../views'; + +export const handle = async ( + schemaPath: string | string[], + verbose: boolean, + credentials: MssqlCredentials, + filters: EntitiesFilterConfig, + force: boolean, + casing: CasingType | undefined, +) => { + const { connectToMsSQL } = await import('../connections'); + const { introspect } = await import('./pull-mssql'); + + const { db } = await connectToMsSQL(credentials); + const filenames = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(filenames); + + const existing = extractMssqlExisting(res.schemas, res.views); + const filter = prepareEntityFilter('mssql', filters, existing); + + const { schema: schemaTo, errors } = fromDrizzleSchema(res, casing, filter); + + if (errors.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const progress = new ProgressView('Pulling schema from database...', 'Pulling schema from database...'); + const { schema: schemaFrom } = await introspect(db, filter, progress); + + const { ddl: ddl1, errors: errors1 } = interimToDDL(schemaFrom); + const { ddl: ddl2, errors: errors2 } = interimToDDL(schemaTo); + + if (errors1.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + if (errors2.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const { sqlStatements, statements: jsonStatements } = await ddlDiff( + ddl1, + ddl2, + resolver('schema', 'dbo'), + resolver('table', 'dbo'), + resolver('column', 'dbo'), + resolver('view', 'dbo'), + resolver('unique', 'dbo'), // uniques + resolver('index', 'dbo'), // indexes + resolver('check', 'dbo'), // checks + resolver('primary key', 'dbo'), // pks + resolver('foreign key', 'dbo'), // fks + resolver('default', 'dbo'), // fks + 'push', + ); + + if (sqlStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + return; + } + + const { losses, hints } = await suggestions(db, jsonStatements, ddl2); + + const statementsToExecute = [...losses, ...sqlStatements]; + if (verbose) { + console.log(); + console.log(withStyle.warning('You are about to execute these statements:')); + console.log(); + console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); + console.log(); + } + + if (!force && hints.length === 0) { + const { data } = await render(new Select(['No, abort', 'Yes, I want to execute all statements'])); + + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + if (!force && hints.length > 0) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(hints.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { data } = await render(new Select(['No, abort', `Yes, proceed`])); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const statement of statementsToExecute) { + await db.query(statement); + } + + render(`[${chalk.green('✓')}] Changes applied`); +}; + +const identifier = (it: { schema?: string; table: string }) => { + const { schema, table } = it; + + const schemaKey = schema && schema !== 'dbo' ? `[${schema}].` : ''; + const tableKey = `[${table}]`; + + return `${schemaKey}${tableKey}`; +}; + +export const suggestions = async (db: DB, jsonStatements: JsonStatement[], ddl2: MssqlDDL) => { + const losses: string[] = []; + const hints = [] as string[]; + + const filtered = jsonStatements.filter((it) => { + if (it.type === 'alter_column' && it.diff.generated) return false; + + return true; + }); + + for (const statement of filtered) { + if (statement.type === 'drop_table') { + const tableName = identifier({ schema: statement.table.schema, table: statement.table.name }); + const res = await db.query(`select top(1) 1 from ${tableName};`); + + if (res.length > 0) hints.push(`· You're about to delete non-empty [${statement.table.name}] table`); + continue; + } + + if (statement.type === 'drop_column') { + const column = statement.column; + + const key = identifier({ schema: column.schema, table: column.table }); + + const res = await db.query(`SELECT TOP(1) 1 FROM ${key} WHERE [${column.name}] IS NOT NULL;`); + if (res.length === 0) continue; + + hints.push(`· You're about to delete non-empty [${column.name}] column in [${column.table}] table`); + continue; + } + + if (statement.type === 'drop_schema') { + // count tables in schema + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, + ); + const count = Number(res[0].count); + if (count === 0) continue; + + const tableGrammar = count === 1 ? 'table' : 'tables'; + hints.push( + `· You're about to delete [${statement.name}] schema with ${count} ${tableGrammar}`, + ); + continue; + } + + // add column with not null without default + if ( + statement.type === 'add_column' && statement.column.notNull + && !ddl2.defaults.one({ + column: statement.column.name, + schema: statement.column.schema, + table: statement.column.table, + }) + ) { + const column = statement.column; + const key = identifier({ schema: column.schema, table: column.table }); + const res = await db.query(`select top(1) 1 from ${key}`); + + if (res.length === 0) continue; + + hints.push( + `· You're about to add not-null [${column.name}] column without default value to a non-empty ${key} table`, + ); + + losses.push(`DELETE FROM ${key};`); + + continue; + } + + // add not null without default + if ( + statement.type === 'alter_column' && statement.diff.$right.notNull + && !ddl2.defaults.one({ + column: statement.diff.$right.name, + schema: statement.diff.$right.schema, + table: statement.diff.$right.table, + }) + ) { + const column = statement.diff.$right; + const key = identifier({ schema: column.schema, table: column.table }); + const res = await db.query(`select top(1) 1 from ${key};`); + + if (res.length === 0) continue; + hints.push( + `· You're about to add not-null to [${statement.diff.$right.name}] column without default value to a non-empty ${key} table`, + ); + + losses.push(`DELETE FROM ${key};`); + + continue; + } + + if (statement.type === 'drop_pk') { + const schema = statement.pk.schema ?? 'dbo'; + const table = statement.pk.table; + const id = identifier({ table: table, schema: schema }); + const res = await db.query( + `select top(1) 1 from ${id};`, + ); + + if (res.length > 0) { + hints.push( + `· You're about to drop ${ + chalk.underline(id) + } primary key, this statements may fail and your table may loose primary key`, + ); + } + + continue; + } + + if (statement.type === 'add_unique') { + const unique = statement.unique; + const id = identifier({ schema: unique.schema, table: unique.table }); + + const res = await db.query(`select top(1) 1 from ${id};`); + if (res.length === 0) continue; + + hints.push( + `· You're about to add ${ + chalk.underline(unique.name) + } unique constraint to a non-empty ${id} table which may fail`, + ); + + continue; + } + + // TODO should we abort process here? + if ( + statement.type === 'rename_column' + && ddl2.checks.one({ schema: statement.to.schema, table: statement.to.table }) + ) { + const left = statement.from; + const right = statement.to; + + hints.push( + `· You are trying to rename column from ${left.name} to ${right.name}, but it is not possible to rename a column if it is used in a check constraint on the table. +To rename the column, first drop the check constraint, then rename the column, and finally recreate the check constraint`, + ); + + continue; + } + + if (statement.type === 'rename_schema') { + const left = statement.from; + const right = statement.to; + + hints.push( + `· You are trying to rename schema ${left.name} to ${right.name}, but it is not supported to rename a schema in mssql. +You should create new schema and transfer everything to it`, + ); + + continue; + } + + // TODO add this in future for corner cases + // Probably we should add `isDrizzleSql` field to grammar.ts types + // This will help us to validate that if drizzle sql changed to other drizzle sql + // Then we should hint user that database can store this in different format and that probably can be same, but diff will be found anyway + // ex: drizzleSql: 10 + 10 + 10 => db: ((10) + (10)) + (10) + // if (statement.type === 'recreate_default' && statement.from.default && statement.to.default && statement.baseType) { + // hints.push( + // `· You are about to drop and recreate a DEFAULT constraint. + // Your current value: ${statement.to.default} + // Value returned from the database: ${statement.from.default} + + // If both values are the same for you, it's recommended to replace your SQL with the value returned from the database to avoid unnecessary changes`, + // ); + // continue; + // } + } + + return { + losses, + hints, + }; +}; diff --git a/drizzle-kit/src/cli/commands/push-mysql.ts b/drizzle-kit/src/cli/commands/push-mysql.ts new file mode 100644 index 0000000000..f5219094c9 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-mysql.ts @@ -0,0 +1,243 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { extractMysqlExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { Column, Table, View } from '../../dialects/mysql/ddl'; +import { interimToDDL } from '../../dialects/mysql/ddl'; +import { ddlDiff } from '../../dialects/mysql/diff'; +import type { JsonStatement } from '../../dialects/mysql/statements'; +import type { DB } from '../../utils'; +import { prepareFilenames } from '../../utils/utils-node'; +import { connectToMySQL } from '../connections'; +import { highlightSQL } from '../highlighter'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { MysqlCredentials } from '../validations/mysql'; +import { explain, ProgressView } from '../views'; +import { introspect } from './pull-mysql'; + +export const handle = async ( + schemaPath: string | string[], + credentials: MysqlCredentials, + verbose: boolean, + force: boolean, + casing: CasingType | undefined, + filters: EntitiesFilterConfig, + explainFlag: boolean, +) => { + const { prepareFromSchemaFiles, fromDrizzleSchema } = await import('../../dialects/mysql/drizzle'); + + const filenames = prepareFilenames(schemaPath); + console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); + const res = await prepareFromSchemaFiles(filenames); + + const existing = extractMysqlExisting(res.views); + const filter = prepareEntityFilter('mysql', filters, existing); + + const { db, database } = await connectToMySQL(credentials); + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + + const { schema: interimFromDB } = await introspect({ db, database, progress, filter }); + + const interimFromFiles = fromDrizzleSchema(res.tables, res.views, casing); + + const { ddl: ddl1 } = interimToDDL(interimFromDB); + const { ddl: ddl2 } = interimToDDL(interimFromFiles); + // TODO: handle errors + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + resolver
('table'), + resolver('column'), + resolver('view'), + 'push', + ); + + const filteredStatements = statements; + if (filteredStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } + + const hints = await suggestions(db, filteredStatements); + const explainMessage = explain('mysql', groupedStatements, explainFlag, hints); + + if (explainMessage) console.log(explainMessage); + if (explainFlag) return; + + if (!force && hints.length > 0) { + const { data } = await render(new Select(['No, abort', 'Yes, I want to execute all statements'])); + + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + const lossStatements = hints.map((x) => x.statement).filter((x) => typeof x !== 'undefined'); + + for (const statement of [...lossStatements, ...sqlStatements]) { + if (verbose) console.log(highlightSQL(statement)); + + await db.query(statement); + } + + if (filteredStatements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } +}; + +const identifier = ({ table, column }: { table?: string; column?: string }) => { + return [table, column].filter(Boolean).map((t) => `\`${t}\``).join('.'); +}; +export const suggestions = async (db: DB, jsonStatements: JsonStatement[]) => { + const grouped: { hint: string; statement?: string }[] = []; + + const filtered = jsonStatements.filter((it) => { + if (it.type === 'alter_column' && it.diff.generated) return false; + + return true; + }); + + for (const statement of filtered) { + if (statement.type === 'drop_table') { + const res = await db.query(`select 1 from ${identifier({ table: statement.table })} limit 1`); + + if (res.length > 0) { + grouped.push({ hint: `· You're about to delete non-empty ${chalk.underline(statement.table)} table` }); + } + continue; + } + + if (statement.type === 'drop_column') { + const column = statement.column; + const res = await db.query(`select 1 from ${identifier({ table: column.table })} limit 1`); + if (res.length === 0) continue; + + grouped.push({ + hint: `· You're about to delete non-empty ${chalk.underline(column.name)} column in ${ + chalk.underline(column.table) + } table`, + }); + continue; + } + + // drop pk + if (statement.type === 'drop_pk') { + const table = statement.pk.table; + const id = identifier({ table }); + const res = await db.query( + `select 1 from ${id} limit 1`, + ); + + if (res.length === 0) continue; + + const hint = `· You're about to drop ${ + chalk.underline(table) + } primary key, this statements may fail and your table may loose primary key`; + + grouped.push({ hint }); + continue; + } + + if ( + statement.type === 'add_column' && statement.column.notNull && statement.column.default === null + && !statement.column.generated + ) { + const column = statement.column; + const id = identifier({ table: column.table }); + const res = await db.query(`select 1 from ${id} limit 1`); + + if (res.length === 0) continue; + const hint = `· You're about to add not-null ${ + chalk.underline(statement.column.name) + } column without default value to a non-empty ${chalk.underline(statement.column.table)} table`; + + grouped.push({ hint }); + continue; + } + + if (statement.type === 'alter_column') { + const tableName = identifier({ table: statement.origin.table }); + const columnName = identifier({ column: statement.origin.column }); + + // add not null without default or generated + if ( + statement.diff.notNull && statement.diff.notNull.to && statement.column.default === null + && !statement.column.generated + ) { + const columnRes = await db.query(`select ${columnName} from ${tableName} WHERE ${columnName} IS NULL limit 1`); + + if (columnRes.length > 0) { + const hint = `· You're about to add not-null to a non-empty ${ + chalk.underline(columnName) + } column without default value in ${chalk.underline(statement.column.table)} table`; + + grouped.push({ hint }); + } + } + + // Do not think that dropping default in not empty column could somehow break something + // author: @AlexSherman + + // if ( + // statement.diff.default && statement.diff.default.to === null && statement.column.notNull + // && !statement.column.generated + // ) { + // const column = statement.column; + // const tableName = identifier({ table: column.table }); + // const columnName = identifier({ column: column.name }); + // const res = await db.query(`select ${columnName} from ${tableName} WHERE ${columnName} IS NULL limit 1`); + + // if (res.length > 0) { + // const hint = + // `· You're about to drop default from ${columnName} column with not null in a non-empty ${tableName} table`; + + // grouped.push({ hint }); + // } + // } + + if (statement.diff.type) { + const hint = `· You're about to change ${ + chalk.underline( + columnName, + ) + } column type in ${tableName} from ${ + chalk.underline( + statement.diff.type.from, + ) + } to ${chalk.underline(statement.diff.type.to)}`; + + grouped.push({ hint }); + } + + continue; + } + + if (statement.type === 'create_index') { + if (!statement.index.isUnique) continue; + + const unique = statement.index; + const id = identifier({ table: unique.table }); + + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + grouped.push({ + hint: `· You're about to add ${chalk.underline(unique.name)} unique index to a non-empty ${ + chalk.underline(unique.table) + } table which may fail`, + }); + continue; + } + } + + return grouped; +}; diff --git a/drizzle-kit/src/cli/commands/push-postgres.ts b/drizzle-kit/src/cli/commands/push-postgres.ts new file mode 100644 index 0000000000..2496960c3c --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-postgres.ts @@ -0,0 +1,284 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { extractPostgresExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, +} from '../../dialects/postgres/ddl'; +import { interimToDDL } from '../../dialects/postgres/ddl'; +import { ddlDiff } from '../../dialects/postgres/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from '../../dialects/postgres/drizzle'; +import type { JsonStatement } from '../../dialects/postgres/statements'; +import type { DB } from '../../utils'; +import { prepareFilenames } from '../../utils/utils-node'; +import { highlightSQL } from '../highlighter'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { PostgresCredentials } from '../validations/postgres'; +import { explain, postgresSchemaError, postgresSchemaWarning, ProgressView } from '../views'; + +export const handle = async ( + schemaPath: string | string[], + verbose: boolean, + credentials: PostgresCredentials, + filters: EntitiesFilterConfig, + force: boolean, + casing: CasingType | undefined, + explainFlag: boolean, +) => { + const { preparePostgresDB } = await import('../connections'); + const { introspect } = await import('./pull-postgres'); + + const db = await preparePostgresDB(credentials); + const filenames = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(filenames); + + const existing = extractPostgresExisting(res.schemas, res.views, res.matViews); + const entityFilter = prepareEntityFilter('postgresql', filters, existing); + + const { schema: schemaTo, errors, warnings } = fromDrizzleSchema(res, casing, entityFilter); + + if (warnings.length > 0) { + console.log(warnings.map((it) => postgresSchemaWarning(it)).join('\n\n')); + } + + if (errors.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const progress = new ProgressView('Pulling schema from database...', 'Pulling schema from database...'); + + const { schema: schemaFrom } = await introspect(db, entityFilter, progress); + + const { ddl: ddl1, errors: errors1 } = interimToDDL(schemaFrom); + const { ddl: ddl2 } = interimToDDL(schemaTo); + // TODO: handle errors? + + if (errors1.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + // const blanks = new Set(); + const { sqlStatements, statements: jsonStatements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('role'), + resolver('privilege'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'push', + ); + + if (sqlStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + return; + } + + const hints = await suggestions(db, jsonStatements); + const explainMessage = explain('postgres', groupedStatements, explainFlag, hints); + + if (explainMessage) console.log(explainMessage); + if (explainFlag) return; + + if (!force && hints.length > 0) { + const { data } = await render(new Select(['No, abort', 'Yes, I want to execute all statements'])); + + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + const lossStatements = hints.map((x) => x.statement).filter((x) => typeof x !== 'undefined'); + + for (const statement of [...lossStatements, ...sqlStatements]) { + if (verbose) console.log(highlightSQL(statement)); + + await db.query(statement); + } + + render(`[${chalk.green('✓')}] Changes applied`); +}; + +const identifier = (it: { schema?: string; name: string }) => { + const { schema, name } = it; + const schemakey = schema && schema !== 'public' ? `"${schema}".` : ''; + return `${schemakey}"${name}"`; +}; + +export const suggestions = async (db: DB, jsonStatements: JsonStatement[]) => { + const grouped: { hint: string; statement?: string }[] = []; + + const filtered = jsonStatements.filter((it) => { + // discussion - + if (it.type === 'recreate_view') return false; + + /* + drizzle-kit push does not handle alternations of postgres views definitions + just like with check constraints we can only reliably handle this with introduction of shadow db + + for now we encourage developers to `remove view from drizzle schema -> push -> add view to drizzle schema -> push` + */ + if (it.type === 'alter_column' && it.diff.generated) return false; + + /* + [Update] it does now, we have origin of creation + + drizzle-kit push does not handle alternation of check constraints + that's a limitation due to a nature of in-database way of persisting check constraints values + + in order to properly support one - we'd need to either fully implement in-database DDL, + or implement proper commutativity checks or use shadow DB for push command(the most reasonable way) + */ + // if (it.type === 'alter_column') return false; + + return true; + }); + + for (const statement of filtered) { + if (statement.type === 'drop_table') { + const res = await db.query(`select 1 from ${statement.key} limit 1`); + + if (res.length > 0) { + grouped.push({ hint: `· You're about to delete non-empty ${statement.key} table` }); + } + continue; + } + + if (statement.type === 'drop_view' && statement.view.materialized) { + const id = identifier(statement.view); + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + grouped.push({ hint: `· You're about to delete non-empty ${id} materialized view` }); + continue; + } + + if (statement.type === 'drop_column') { + const column = statement.column; + const id = identifier({ schema: column.schema, name: column.table }); + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + grouped.push({ hint: `· You're about to delete non-empty ${column.name} column in ${id} table` }); + continue; + } + + if (statement.type === 'drop_schema') { + // count tables in schema + const res = await db.query( + `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, + ); + const count = Number(res[0].count); + if (count === 0) continue; + + grouped.push({ hint: `· You're about to delete ${chalk.underline(statement.name)} schema with ${count} tables` }); + continue; + } + + // drop pk + if (statement.type === 'drop_pk') { + const schema = statement.pk.schema ?? 'public'; + const table = statement.pk.table; + const id = `"${schema}"."${table}"`; + const res = await db.query( + `select 1 from ${id} limit 1`, + ); + + if (res.length === 0) continue; + + const hint = `· You're about to drop ${ + chalk.underline(id) + } primary key, this statements may fail and your table may loose primary key`; + + if (statement.pk.nameExplicit) { + grouped.push({ hint }); + continue; + } + + const [{ name: pkName }] = await db.query<{ name: string }>(` + SELECT constraint_name as name + FROM information_schema.table_constraints + WHERE + table_schema = '${schema}' + AND table_name = '${table}' + AND constraint_type = 'PRIMARY KEY';`); + + grouped.push({ hint, statement: `ALTER TABLE ${id} DROP CONSTRAINT "${pkName}"` }); + continue; + } + + // todo: alter column to not null no default + if ( + statement.type === 'add_column' && statement.column.notNull && statement.column.default === null + && !statement.column.generated && !statement.column.identity + ) { + const column = statement.column; + const id = identifier({ schema: column.schema, name: column.table }); + const res = await db.query(`select 1 from ${id} limit 1`); + + if (res.length === 0) continue; + const hint = `· You're about to add not-null ${ + chalk.underline(statement.column.name) + } column without default value to a non-empty ${id} table`; + + grouped.push({ hint }); + // statementsToExecute.push(`truncate table ${id} cascade;`); + continue; + } + + if (statement.type === 'add_unique') { + const unique = statement.unique; + const id = identifier({ schema: unique.schema, name: unique.table }); + + const res = await db.query(`select 1 from ${id} limit 1`); + if (res.length === 0) continue; + + grouped.push({ + hint: `· You're about to add ${ + chalk.underline(unique.name) + } unique constraint to a non-empty ${id} table which may fail`, + }); + // const { status, data } = await render( + // new Select(['No, add the constraint without truncating the table', `Yes, truncate the table`]), + // ); + // if (data?.index === 1) { + // statementsToExecute.push( + // `truncate table ${ + // tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) + // } cascade;`, + // ); + // } + continue; + } + } + + return grouped; +}; diff --git a/drizzle-kit/src/cli/commands/push-singlestore.ts b/drizzle-kit/src/cli/commands/push-singlestore.ts new file mode 100644 index 0000000000..99b5623014 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-singlestore.ts @@ -0,0 +1,318 @@ +import chalk from 'chalk'; +import { render, renderWithTask } from 'hanji'; +import type { Column, Table, View } from 'src/dialects/mysql/ddl'; +import { interimToDDL } from 'src/dialects/mysql/ddl'; +import type { JsonStatement } from 'src/dialects/mysql/statements'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { prepareFilenames } from 'src/utils/utils-node'; +import { ddlDiff } from '../../dialects/singlestore/diff'; +import type { DB } from '../../utils'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { MysqlCredentials } from '../validations/mysql'; +import { withStyle } from '../validations/outputs'; +import { ProgressView } from '../views'; + +export const handle = async ( + schemaPath: string | string[], + credentials: MysqlCredentials, + filters: EntitiesFilterConfig, + verbose: boolean, + force: boolean, + casing: CasingType | undefined, +) => { + const { connectToSingleStore } = await import('../connections'); + const { fromDatabaseForDrizzle } = await import('../../dialects/mysql/introspect'); + + /* + schemas in singlestore are ignored just like in mysql + there're now views in singlestore either, so no entities with .existing() for now + */ + const filter = prepareEntityFilter('singlestore', filters, []); + + const { db, database } = await connectToSingleStore(credentials); + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + const interimFromDB = await renderWithTask( + progress, + fromDatabaseForDrizzle(db, database, filter), + ); + + const filenames = prepareFilenames(schemaPath); + + console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); + + const { prepareFromSchemaFiles, fromDrizzleSchema } = await import('../../dialects/singlestore/drizzle'); + + const res = await prepareFromSchemaFiles(filenames); + const interimFromFiles = fromDrizzleSchema(res.tables, casing); + + const { ddl: ddl1 } = interimToDDL(interimFromDB); + const { ddl: ddl2 } = interimToDDL(interimFromFiles); + // TODO: handle errors + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + resolver
('table'), + resolver('column'), + resolver('view'), + 'push', + ); + + const filteredStatements = statements; + if (filteredStatements.length === 0) { + render(`[${chalk.blue('i')}] No changes detected`); + } else { + const { hints, truncates } = await suggestions(db, filteredStatements); + + const combinedStatements = [...truncates, ...sqlStatements]; + if (verbose) { + console.log(); + console.log( + withStyle.warning('You are about to execute current statements:'), + ); + console.log(); + console.log(combinedStatements.map((s) => chalk.blue(s)).join('\n')); + console.log(); + } + + if (!force && hints.length > 0) { + const { data } = await render( + new Select(['No, abort', `Yes, I want to execute all statements`]), + ); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + if (!force && hints.length > 0) { + console.log(withStyle.warning('Found data-loss statements:')); + console.log(truncates.join('\n')); + console.log(); + console.log( + chalk.red.bold( + 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', + ), + ); + + console.log(chalk.white('Do you still want to push changes?')); + + const { data } = await render(new Select(['No, abort', `Yes, execute`])); + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + for (const st of combinedStatements) { + await db.query(st); + } + + if (filteredStatements.length > 0) { + render(`[${chalk.green('✓')}] Changes applied`); + } else { + render(`[${chalk.blue('i')}] No changes detected`); + } + } +}; + +// TODO: check +// export const filterStatements = ( +// statements: JsonStatement[], +// currentSchema: TypeOf, +// prevSchema: TypeOf, +// ) => { +// return statements.filter((statement) => { +// if (statement.type === 'alter_table_alter_column_set_type') { +// // Don't need to handle it on migrations step and introspection +// // but for both it should be skipped +// if ( +// statement.oldDataType.startsWith('tinyint') +// && statement.newDataType.startsWith('boolean') +// ) { +// return false; +// } + +// if ( +// statement.oldDataType.startsWith('bigint unsigned') +// && statement.newDataType.startsWith('serial') +// ) { +// return false; +// } + +// if ( +// statement.oldDataType.startsWith('serial') +// && statement.newDataType.startsWith('bigint unsigned') +// ) { +// return false; +// } +// } else if (statement.type === 'alter_table_alter_column_set_default') { +// if ( +// statement.newDefaultValue === false +// && statement.oldDefaultValue === 0 +// && statement.newDataType === 'boolean' +// ) { +// return false; +// } +// if ( +// statement.newDefaultValue === true +// && statement.oldDefaultValue === 1 +// && statement.newDataType === 'boolean' +// ) { +// return false; +// } +// } else if (statement.type === 'delete_unique_constraint') { +// const unsquashed = MySqlSquasher.unsquashUnique(statement.data); +// // only if constraint was removed from a serial column, than treat it as removed +// // const serialStatement = statements.find( +// // (it) => it.type === "alter_table_alter_column_set_type" +// // ) as JsonAlterColumnTypeStatement; +// // if ( +// // serialStatement?.oldDataType.startsWith("bigint unsigned") && +// // serialStatement?.newDataType.startsWith("serial") && +// // serialStatement.columnName === +// // MySqlSquasher.unsquashUnique(statement.data).columns[0] +// // ) { +// // return false; +// // } +// // Check if uniqueindex was only on this column, that is serial + +// // if now serial and was not serial and was unique index +// if ( +// unsquashed.columns.length === 1 +// && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] +// .type === 'serial' +// && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] +// .type === 'serial' +// && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] +// .name === unsquashed.columns[0] +// ) { +// return false; +// } +// } else if (statement.type === 'alter_table_alter_column_drop_notnull') { +// // only if constraint was removed from a serial column, than treat it as removed +// const serialStatement = statements.find( +// (it) => it.type === 'alter_table_alter_column_set_type', +// ) as JsonAlterColumnTypeStatement; +// if ( +// serialStatement?.oldDataType.startsWith('bigint unsigned') +// && serialStatement?.newDataType.startsWith('serial') +// && serialStatement.columnName === statement.columnName +// && serialStatement.tableName === statement.tableName +// ) { +// return false; +// } +// if (statement.newDataType === 'serial' && !statement.columnNotNull) { +// return false; +// } +// if (statement.columnAutoIncrement) { +// return false; +// } +// } + +// return true; +// }); +// }; + +export const suggestions = async (_db: DB, _statements: JsonStatement[]) => { + const hints: string[] = []; + const truncates: string[] = []; + + return { hints, truncates }; + + // TODO: update and implement + // for (const statement of statements) { + // if (statement.type === 'drop_table') { + // const res = await db.query(`select 1 from \`${statement.table}\` limit 1`); + // if (res.length > 0) { + // hints.push(`· You're about to delete non-empty ${chalk.underline(statement.table)} table`); + // } + // } else if (statement.type === 'drop_column') { + // const res = await db.query( + // `select 1 from \`${statement.column.table}\` limit 1`, + // ); + // if (res.length > 0) { + // hints.push( + // `· You're about to delete ${ + // chalk.underline( + // statement.column.name, + // ) + // } column in a non-empty ${statement.column.table} table with`, + // ); + // } + // } else if (statement.type === 'alter_column') { + // // alter column set type + // // alter column set not null + // `· You're about to set not-null constraint to ${ + // chalk.underline(statement.columnName) + // } column without default, which contains ${count} items`; + // `· You're about to remove default value from ${ + // chalk.underline(statement.columnName) + // } not-null column with ${count} items`; + + // // if drop pk and json2 has autoincrement in table -> exit process with error + // `${ + // withStyle.errorWarning( + // `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, + // ) + // }`; + // `· You're about to change ${ + // chalk.underline(statement.tableName) + // } primary key. This statements may fail and you table may left without primary key`; + + // // if drop pk and json2 has autoincrement in table -> exit process with error + // `· You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`; + // `· You're about to add not-null ${ + // chalk.underline(statement.column.name) + // } column without default value, which contains ${count} items`; + + // const res = await db.query( + // `select count(*) as count from \`${statement.tableName}\``, + // ); + // const count = Number(res[0].count); + // if (count > 0) { + // `· You're about to change ${ + // chalk.underline( + // statement.columnName, + // ) + // } column type from ${ + // chalk.underline( + // statement.oldDataType, + // ) + // } to ${chalk.underline(statement.newDataType)} with ${count} items`; + // } + // } else if (statement.type === 'create_index' && statement.index.unique) { + // const res = await db.query( + // `select 1 from \`${statement.index.table}\` limit 1`, + // ); + // const count = Number(res[0].count); + // if (count > 0) { + // console.log( + // `· You're about to add ${ + // chalk.underline( + // statement.index.name, + // ) + // } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ + // chalk.underline( + // statement.index.table, + // ) + // } table?\n`, + // ); + // const { status, data } = await render( + // new Select([ + // 'No, add the constraint without truncating the table', + // `Yes, truncate the table`, + // ]), + // ); + // } + // } + // } + + // return { hints, truncates }; +}; diff --git a/drizzle-kit/src/cli/commands/push-sqlite.ts b/drizzle-kit/src/cli/commands/push-sqlite.ts new file mode 100644 index 0000000000..af48804d29 --- /dev/null +++ b/drizzle-kit/src/cli/commands/push-sqlite.ts @@ -0,0 +1,167 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import { extractSqliteExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import type { Column, Table } from 'src/dialects/sqlite/ddl'; +import { interimToDDL } from 'src/dialects/sqlite/ddl'; +import { ddlDiff } from 'src/dialects/sqlite/diff'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/sqlite/drizzle'; +import type { JsonStatement } from 'src/dialects/sqlite/statements'; +import type { SQLiteDB } from '../../utils'; +import { prepareFilenames } from '../../utils/utils-node'; +import { highlightSQL } from '../highlighter'; +import { resolver } from '../prompts'; +import { Select } from '../selector-ui'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import type { CasingType } from '../validations/common'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { explain, ProgressView } from '../views'; + +export const handle = async ( + schemaPath: string | string[], + verbose: boolean, + credentials: SqliteCredentials, + filters: EntitiesFilterConfig, + force: boolean, + casing: CasingType | undefined, + explainFlag: boolean, + sqliteDB?: SQLiteDB, +) => { + const { connectToSQLite } = await import('../connections'); + const { introspect: sqliteIntrospect } = await import('./pull-sqlite'); + + const db = sqliteDB ?? await connectToSQLite(credentials); + const files = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(files); + + const existing = extractSqliteExisting(res.views); + const filter = prepareEntityFilter('sqlite', filters, existing); + + const { ddl: ddl2 } = interimToDDL(fromDrizzleSchema(res.tables, res.views, casing)); + const progress = new ProgressView( + 'Pulling schema from database...', + 'Pulling schema from database...', + ); + + const { ddl: ddl1 } = await sqliteIntrospect(db, filter, progress); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + resolver
('table'), + resolver('column'), + 'push', + ); + + if (sqlStatements.length === 0) { + render(`\n[${chalk.blue('i')}] No changes detected`); + return; + } + + const hints = await suggestions(db, statements); + + const explainMessage = explain('sqlite', groupedStatements, explainFlag, hints); + + if (explainMessage) console.log(explainMessage); + if (explainFlag) return; + + if (!force && hints.length > 0) { + const { data } = await render(new Select(['No, abort', 'Yes, I want to execute all statements'])); + + if (data?.index === 0) { + render(`[${chalk.red('x')}] All changes were aborted`); + process.exit(0); + } + } + + const lossStatements = hints.map((x) => x.statement).filter((x) => typeof x !== 'undefined'); + + if (sqlStatements.length === 0) { + render(`\n[${chalk.blue('i')}] No changes detected`); + } else { + if (!('driver' in credentials)) { + // D1-HTTP does not support transactions + // there might a be a better way to fix this + // in the db connection itself + const isD1 = 'driver' in credentials && credentials.driver === 'd1-http'; + if (!isD1) await db.run('begin'); + try { + for (const statement of [...lossStatements, ...sqlStatements]) { + if (verbose) console.log(highlightSQL(statement)); + + await db.run(statement); + } + if (!isD1) await db.run('commit'); + } catch (e) { + console.error(e); + + if (!isD1) await db.run('rollback'); + process.exit(1); + } + } + render(`[${chalk.green('✓')}] Changes applied`); + } +}; + +export const suggestions = async ( + connection: SQLiteDB, + jsonStatements: JsonStatement[], +) => { + const grouped: { hint: string; statement?: string }[] = []; + + // TODO: generate truncations/recreates ?? + for (const statement of jsonStatements) { + if (statement.type === 'drop_table') { + const name = statement.tableName; + const res = await connection.query(`select 1 from "${name}" limit 1;`); + + if (res.length > 0) grouped.push({ hint: `· You're about to delete non-empty '${name}' table` }); + continue; + } + + if (statement.type === 'drop_column') { + const { table, name } = statement.column; + + const res = await connection.query(`select 1 from "${table}" limit 1;`); + if (res.length > 0) { + grouped.push({ hint: `· You're about to delete '${name}' column in a non-empty '${table}' table` }); + } + continue; + } + + if (statement.type === 'add_column' && (statement.column.notNull && !statement.column.default)) { + const { table, name } = statement.column; + const res = await connection.query(`select 1 from "${table}" limit 1`); + if (res.length > 0) { + grouped.push( + { + hint: `· You're about to add not-null '${name}' column without default value to non-empty '${table}' table`, + statement: `DELETE FROM "${table}" where true;`, + }, + ); + } + + continue; + } + + if (statement.type === 'recreate_table') { + const droppedColumns = statement.from.columns.filter((col) => + !statement.to.columns.some((c) => c.name === col.name) + ); + if (droppedColumns.length === 0) continue; + + const res = await connection.query(`select 1 from "${statement.from.name}" limit 1`); + if (res.length > 0) { + grouped.push( + { + hint: `· You're about to drop ${ + droppedColumns.map((col) => `'${col.name}'`).join(', ') + } column(s) in a non-empty '${statement.from.name}' table`, + }, + ); + } + } + } + + return grouped; +}; diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts deleted file mode 100644 index 31dd78722e..0000000000 --- a/drizzle-kit/src/cli/commands/push.ts +++ /dev/null @@ -1,639 +0,0 @@ -import chalk from 'chalk'; -import { randomUUID } from 'crypto'; -import { render } from 'hanji'; -import { serializePg } from 'src/serializer'; -import { fromJson } from '../../sqlgenerator'; -import { Select } from '../selector-ui'; -import { Entities } from '../validations/cli'; -import { CasingType } from '../validations/common'; -import { LibSQLCredentials } from '../validations/libsql'; -import type { MysqlCredentials } from '../validations/mysql'; -import { withStyle } from '../validations/outputs'; -import type { PostgresCredentials } from '../validations/postgres'; -import { SingleStoreCredentials } from '../validations/singlestore'; -import type { SqliteCredentials } from '../validations/sqlite'; -import { libSqlLogSuggestionsAndReturn } from './libSqlPushUtils'; -import { - filterStatements as mySqlFilterStatements, - logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn, -} from './mysqlPushUtils'; -import { pgSuggestions } from './pgPushUtils'; -import { - filterStatements as singleStoreFilterStatements, - logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn, -} from './singlestorePushUtils'; -import { logSuggestionsAndReturn as sqliteSuggestions } from './sqlitePushUtils'; - -export const mysqlPush = async ( - schemaPath: string | string[], - credentials: MysqlCredentials, - tablesFilter: string[], - strict: boolean, - verbose: boolean, - force: boolean, - casing: CasingType | undefined, -) => { - const { connectToMySQL } = await import('../connections'); - const { mysqlPushIntrospect } = await import('./mysqlIntrospect'); - - const { db, database } = await connectToMySQL(credentials); - - const { schema } = await mysqlPushIntrospect(db, database, tablesFilter); - const { prepareMySQLPush } = await import('./migrate'); - - const statements = await prepareMySQLPush(schemaPath, schema, casing); - - const filteredStatements = mySqlFilterStatements( - statements.statements ?? [], - statements.validatedCur, - statements.validatedPrev, - ); - - try { - if (filteredStatements.length === 0) { - render(`[${chalk.blue('i')}] No changes detected`); - } else { - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - } = await mySqlLogSuggestionsAndReturn( - db, - filteredStatements, - statements.validatedCur, - ); - - const filteredSqlStatements = fromJson(filteredStatements, 'mysql'); - - const uniqueSqlStatementsToExecute: string[] = []; - statementsToExecute.forEach((ss) => { - if (!uniqueSqlStatementsToExecute.includes(ss)) { - uniqueSqlStatementsToExecute.push(ss); - } - }); - const uniqueFilteredSqlStatements: string[] = []; - filteredSqlStatements.forEach((ss) => { - if (!uniqueFilteredSqlStatements.includes(ss)) { - uniqueFilteredSqlStatements.push(ss); - } - }); - - if (verbose) { - console.log(); - console.log( - withStyle.warning('You are about to execute current statements:'), - ); - console.log(); - console.log( - [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] - .map((s) => chalk.blue(s)) - .join('\n'), - ); - console.log(); - } - - if (!force && strict) { - if (!shouldAskForApprove) { - const { status, data } = await render( - new Select(['No, abort', `Yes, I want to execute all statements`]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - } - - if (!force && shouldAskForApprove) { - console.log(withStyle.warning('Found data-loss statements:')); - console.log(infoToPrint.join('\n')); - console.log(); - console.log( - chalk.red.bold( - 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', - ), - ); - - console.log(chalk.white('Do you still want to push changes?')); - - const { status, data } = await render( - new Select([ - 'No, abort', - `Yes, I want to${ - tablesToRemove.length > 0 - ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` - : ' ' - }${ - columnsToRemove.length > 0 - ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` - : ' ' - }${ - tablesToTruncate.length > 0 - ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` - : '' - }` - .replace(/(^,)|(,$)/g, '') - .replace(/ +(?= )/g, ''), - ]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - - for (const dStmnt of uniqueSqlStatementsToExecute) { - await db.query(dStmnt); - } - - for (const statement of uniqueFilteredSqlStatements) { - await db.query(statement); - } - if (filteredStatements.length > 0) { - render(`[${chalk.green('✓')}] Changes applied`); - } else { - render(`[${chalk.blue('i')}] No changes detected`); - } - } - } catch (e) { - console.log(e); - } -}; - -export const singlestorePush = async ( - schemaPath: string | string[], - credentials: SingleStoreCredentials, - tablesFilter: string[], - strict: boolean, - verbose: boolean, - force: boolean, - casing: CasingType | undefined, -) => { - const { connectToSingleStore } = await import('../connections'); - const { singlestorePushIntrospect } = await import('./singlestoreIntrospect'); - - const { db, database } = await connectToSingleStore(credentials); - - const { schema } = await singlestorePushIntrospect( - db, - database, - tablesFilter, - ); - const { prepareSingleStorePush } = await import('./migrate'); - - const statements = await prepareSingleStorePush(schemaPath, schema, casing); - - const filteredStatements = singleStoreFilterStatements( - statements.statements ?? [], - statements.validatedCur, - statements.validatedPrev, - ); - - try { - if (filteredStatements.length === 0) { - render(`[${chalk.blue('i')}] No changes detected`); - } else { - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - schemasToRemove, - } = await singleStoreLogSuggestionsAndReturn( - db, - filteredStatements, - statements.validatedCur, - statements.validatedPrev, - ); - - if (verbose) { - console.log(); - console.log( - withStyle.warning('You are about to execute current statements:'), - ); - console.log(); - console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); - console.log(); - } - - if (!force && strict) { - if (!shouldAskForApprove) { - const { status, data } = await render( - new Select(['No, abort', `Yes, I want to execute all statements`]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - } - - if (!force && shouldAskForApprove) { - console.log(withStyle.warning('Found data-loss statements:')); - console.log(infoToPrint.join('\n')); - console.log(); - console.log( - chalk.red.bold( - 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', - ), - ); - - console.log(chalk.white('Do you still want to push changes?')); - - const { status, data } = await render( - new Select([ - 'No, abort', - `Yes, I want to${ - tablesToRemove.length > 0 - ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` - : ' ' - }${ - columnsToRemove.length > 0 - ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` - : ' ' - }${ - tablesToTruncate.length > 0 - ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` - : '' - }` - .replace(/(^,)|(,$)/g, '') - .replace(/ +(?= )/g, ''), - ]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - - if (filteredStatements.length > 0) { - render(`[${chalk.green('✓')}] Changes applied`); - } else { - render(`[${chalk.blue('i')}] No changes detected`); - } - } - } catch (e) { - console.log(e); - } -}; - -export const pgPush = async ( - schemaPath: string | string[], - verbose: boolean, - strict: boolean, - credentials: PostgresCredentials, - tablesFilter: string[], - schemasFilter: string[], - entities: Entities, - force: boolean, - casing: CasingType | undefined, -) => { - const { preparePostgresDB } = await import('../connections'); - const { pgPushIntrospect } = await import('./pgIntrospect'); - - const db = await preparePostgresDB(credentials); - const serialized = await serializePg(schemaPath, casing, schemasFilter); - - const { schema } = await pgPushIntrospect(db, tablesFilter, schemasFilter, entities, serialized); - - const { preparePgPush } = await import('./migrate'); - - const statements = await preparePgPush( - { id: randomUUID(), prevId: schema.id, ...serialized }, - schema, - ); - - try { - if (statements.sqlStatements.length === 0) { - render(`[${chalk.blue('i')}] No changes detected`); - } else { - // const filteredStatements = filterStatements(statements.statements); - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - matViewsToRemove, - tablesToTruncate, - infoToPrint, - schemasToRemove, - } = await pgSuggestions(db, statements.statements); - - if (verbose) { - console.log(); - // console.log(chalk.gray('Verbose logs:')); - console.log( - withStyle.warning('You are about to execute current statements:'), - ); - console.log(); - console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); - console.log(); - } - - if (!force && strict) { - if (!shouldAskForApprove) { - const { status, data } = await render( - new Select(['No, abort', `Yes, I want to execute all statements`]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - } - - if (!force && shouldAskForApprove) { - console.log(withStyle.warning('Found data-loss statements:')); - console.log(infoToPrint.join('\n')); - console.log(); - console.log( - chalk.red.bold( - 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', - ), - ); - - console.log(chalk.white('Do you still want to push changes?')); - - const { status, data } = await render( - new Select([ - 'No, abort', - `Yes, I want to${ - tablesToRemove.length > 0 - ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` - : ' ' - }${ - columnsToRemove.length > 0 - ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` - : ' ' - }${ - tablesToTruncate.length > 0 - ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` - : '' - }${ - matViewsToRemove.length > 0 - ? ` remove ${matViewsToRemove.length} ${ - matViewsToRemove.length > 1 ? 'materialized views' : 'materialize view' - },` - : ' ' - }` - .replace(/(^,)|(,$)/g, '') - .replace(/ +(?= )/g, ''), - ]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - - for (const dStmnt of statementsToExecute) { - await db.query(dStmnt); - } - - if (statements.statements.length > 0) { - render(`[${chalk.green('✓')}] Changes applied`); - } else { - render(`[${chalk.blue('i')}] No changes detected`); - } - } - } catch (e) { - console.error(e); - } -}; - -export const sqlitePush = async ( - schemaPath: string | string[], - verbose: boolean, - strict: boolean, - credentials: SqliteCredentials, - tablesFilter: string[], - force: boolean, - casing: CasingType | undefined, -) => { - const { connectToSQLite } = await import('../connections'); - const { sqlitePushIntrospect } = await import('./sqliteIntrospect'); - - const db = await connectToSQLite(credentials); - const { schema } = await sqlitePushIntrospect(db, tablesFilter); - const { prepareSQLitePush } = await import('./migrate'); - - const statements = await prepareSQLitePush(schemaPath, schema, casing); - - if (statements.sqlStatements.length === 0) { - render(`\n[${chalk.blue('i')}] No changes detected`); - } else { - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - schemasToRemove, - } = await sqliteSuggestions( - db, - statements.statements, - statements.squashedPrev, - statements.squashedCur, - statements.meta!, - ); - - if (verbose && statementsToExecute.length > 0) { - console.log(); - console.log( - withStyle.warning('You are about to execute current statements:'), - ); - console.log(); - console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); - console.log(); - } - - if (!force && strict) { - if (!shouldAskForApprove) { - const { status, data } = await render( - new Select(['No, abort', `Yes, I want to execute all statements`]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - } - - if (!force && shouldAskForApprove) { - console.log(withStyle.warning('Found data-loss statements:')); - console.log(infoToPrint.join('\n')); - console.log(); - console.log( - chalk.red.bold( - 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', - ), - ); - - console.log(chalk.white('Do you still want to push changes?')); - - const { status, data } = await render( - new Select([ - 'No, abort', - `Yes, I want to${ - tablesToRemove.length > 0 - ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` - : ' ' - }${ - columnsToRemove.length > 0 - ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` - : ' ' - }${ - tablesToTruncate.length > 0 - ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` - : '' - }` - .trimEnd() - .replace(/(^,)|(,$)/g, '') - .replace(/ +(?= )/g, ''), - ]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - - if (statementsToExecute.length === 0) { - render(`\n[${chalk.blue('i')}] No changes detected`); - } else { - // D1-HTTP does not support transactions - // there might a be a better way to fix this - // in the db connection itself - const isNotD1 = !('driver' in credentials && credentials.driver === 'd1-http'); - isNotD1 ?? await db.run('begin'); - try { - for (const dStmnt of statementsToExecute) { - await db.run(dStmnt); - } - isNotD1 ?? await db.run('commit'); - } catch (e) { - console.error(e); - isNotD1 ?? await db.run('rollback'); - process.exit(1); - } - render(`[${chalk.green('✓')}] Changes applied`); - } - } -}; - -export const libSQLPush = async ( - schemaPath: string | string[], - verbose: boolean, - strict: boolean, - credentials: LibSQLCredentials, - tablesFilter: string[], - force: boolean, - casing: CasingType | undefined, -) => { - const { connectToLibSQL } = await import('../connections'); - const { sqlitePushIntrospect } = await import('./sqliteIntrospect'); - - const db = await connectToLibSQL(credentials); - const { schema } = await sqlitePushIntrospect(db, tablesFilter); - - const { prepareLibSQLPush } = await import('./migrate'); - - const statements = await prepareLibSQLPush(schemaPath, schema, casing); - - if (statements.sqlStatements.length === 0) { - render(`\n[${chalk.blue('i')}] No changes detected`); - } else { - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - } = await libSqlLogSuggestionsAndReturn( - db, - statements.statements, - statements.squashedPrev, - statements.squashedCur, - statements.meta!, - ); - - if (verbose && statementsToExecute.length > 0) { - console.log(); - console.log( - withStyle.warning('You are about to execute current statements:'), - ); - console.log(); - console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); - console.log(); - } - - if (!force && strict) { - if (!shouldAskForApprove) { - const { status, data } = await render( - new Select(['No, abort', `Yes, I want to execute all statements`]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - } - - if (!force && shouldAskForApprove) { - console.log(withStyle.warning('Found data-loss statements:')); - console.log(infoToPrint.join('\n')); - console.log(); - console.log( - chalk.red.bold( - 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', - ), - ); - - console.log(chalk.white('Do you still want to push changes?')); - - const { status, data } = await render( - new Select([ - 'No, abort', - `Yes, I want to${ - tablesToRemove.length > 0 - ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` - : ' ' - }${ - columnsToRemove.length > 0 - ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` - : ' ' - }${ - tablesToTruncate.length > 0 - ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` - : '' - }` - .trimEnd() - .replace(/(^,)|(,$)/g, '') - .replace(/ +(?= )/g, ''), - ]), - ); - if (data?.index === 0) { - render(`[${chalk.red('x')}] All changes were aborted`); - process.exit(0); - } - } - - if (statementsToExecute.length === 0) { - render(`\n[${chalk.blue('i')}] No changes detected`); - } else { - await db.batchWithPragma!(statementsToExecute); - render(`[${chalk.green('✓')}] Changes applied`); - } - } -}; diff --git a/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts b/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts deleted file mode 100644 index 27d8c59c50..0000000000 --- a/drizzle-kit/src/cli/commands/singlestoreIntrospect.ts +++ /dev/null @@ -1,53 +0,0 @@ -import { renderWithTask } from 'hanji'; -import { Minimatch } from 'minimatch'; -import { originUUID } from '../../global'; -import type { SingleStoreSchema } from '../../serializer/singlestoreSchema'; -import { fromDatabase } from '../../serializer/singlestoreSerializer'; -import type { DB } from '../../utils'; -import { ProgressView } from '../views'; - -export const singlestorePushIntrospect = async ( - db: DB, - databaseName: string, - filters: string[], -) => { - const matchers = filters.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new ProgressView( - 'Pulling schema from database...', - 'Pulling schema from database...', - ); - const res = await renderWithTask( - progress, - fromDatabase(db, databaseName, filter), - ); - - const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; - const { internal, ...schemaWithoutInternals } = schema; - return { schema: schemaWithoutInternals }; -}; diff --git a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts deleted file mode 100644 index 5a550a2397..0000000000 --- a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts +++ /dev/null @@ -1,456 +0,0 @@ -import chalk from 'chalk'; -import { render } from 'hanji'; -import { fromJson } from 'src/sqlgenerator'; -import { TypeOf } from 'zod'; -import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; -import { Column, SingleStoreSchemaSquashed, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; -import { singlestoreSchema } from '../../serializer/singlestoreSchema'; -import { type DB, findAddedAndRemoved } from '../../utils'; -import { Select } from '../selector-ui'; -import { withStyle } from '../validations/outputs'; - -export const filterStatements = ( - statements: JsonStatement[], - currentSchema: TypeOf, - prevSchema: TypeOf, -) => { - return statements.filter((statement) => { - if (statement.type === 'alter_table_alter_column_set_type') { - // Don't need to handle it on migrations step and introspection - // but for both it should be skipped - if ( - statement.oldDataType.startsWith('tinyint') - && statement.newDataType.startsWith('boolean') - ) { - return false; - } - - if ( - statement.oldDataType.startsWith('bigint unsigned') - && statement.newDataType.startsWith('serial') - ) { - return false; - } - - if ( - statement.oldDataType.startsWith('serial') - && statement.newDataType.startsWith('bigint unsigned') - ) { - return false; - } - } else if (statement.type === 'alter_table_alter_column_set_default') { - if ( - statement.newDefaultValue === false - && statement.oldDefaultValue === 0 - && statement.newDataType === 'boolean' - ) { - return false; - } - if ( - statement.newDefaultValue === true - && statement.oldDefaultValue === 1 - && statement.newDataType === 'boolean' - ) { - return false; - } - } else if (statement.type === 'delete_unique_constraint') { - const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); - // only if constraint was removed from a serial column, than treat it as removed - // const serialStatement = statements.find( - // (it) => it.type === "alter_table_alter_column_set_type" - // ) as JsonAlterColumnTypeStatement; - // if ( - // serialStatement?.oldDataType.startsWith("bigint unsigned") && - // serialStatement?.newDataType.startsWith("serial") && - // serialStatement.columnName === - // SingleStoreSquasher.unsquashUnique(statement.data).columns[0] - // ) { - // return false; - // } - // Check if uniqueindex was only on this column, that is serial - - // if now serial and was not serial and was unique index - if ( - unsquashed.columns.length === 1 - && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .type === 'serial' - && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .type === 'serial' - && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] - .name === unsquashed.columns[0] - ) { - return false; - } - } else if (statement.type === 'alter_table_alter_column_drop_notnull') { - // only if constraint was removed from a serial column, than treat it as removed - const serialStatement = statements.find( - (it) => it.type === 'alter_table_alter_column_set_type', - ) as JsonAlterColumnTypeStatement; - if ( - serialStatement?.oldDataType.startsWith('bigint unsigned') - && serialStatement?.newDataType.startsWith('serial') - && serialStatement.columnName === statement.columnName - && serialStatement.tableName === statement.tableName - ) { - return false; - } - if (statement.newDataType === 'serial' && !statement.columnNotNull) { - return false; - } - if (statement.columnAutoIncrement) { - return false; - } - } - - return true; - }); -}; - -export function findColumnTypeAlternations( - columns1: Record, - columns2: Record, -): string[] { - const changes: string[] = []; - - for (const key in columns1) { - if (columns1.hasOwnProperty(key) && columns2.hasOwnProperty(key)) { - const col1 = columns1[key]; - const col2 = columns2[key]; - if (col1.type !== col2.type) { - changes.push(col2.name); - } - } - } - - return changes; -} - -export const logSuggestionsAndReturn = async ( - db: DB, - statements: JsonStatement[], - json2: TypeOf, - json1: TypeOf, -) => { - let shouldAskForApprove = false; - const statementsToExecute: string[] = []; - const infoToPrint: string[] = []; - - const tablesToRemove: string[] = []; - const columnsToRemove: string[] = []; - const schemasToRemove: string[] = []; - const tablesToTruncate: string[] = []; - - for (const statement of statements) { - if (statement.type === 'drop_table') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.tableName, - ) - } table with ${count} items`, - ); - tablesToRemove.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_drop_column') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.columnName, - ) - } column in ${statement.tableName} table with ${count} items`, - ); - columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - } else if (statement.type === 'drop_schema') { - const res = await db.query( - `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.name, - ) - } schema with ${count} tables`, - ); - schemasToRemove.push(statement.name); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_set_type') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${ - chalk.underline( - statement.columnName, - ) - } column type from ${ - chalk.underline( - statement.oldDataType, - ) - } to ${chalk.underline(statement.newDataType)} with ${count} items`, - ); - statementsToExecute.push(`truncate table ${statement.tableName};`); - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'alter_table_alter_column_drop_default') { - if (statement.columnNotNull) { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to remove default value from ${ - chalk.underline( - statement.columnName, - ) - } not-null column with ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - // shouldAskForApprove = true; - } else if (statement.type === 'alter_table_alter_column_set_notnull') { - if (typeof statement.columnDefault === 'undefined') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to set not-null constraint to ${ - chalk.underline( - statement.columnName, - ) - } column without default, which contains ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - } else if (statement.type === 'alter_table_alter_column_drop_pk') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - - // if drop pk and json2 has autoincrement in table -> exit process with error - if ( - Object.values(json2.tables[statement.tableName].columns).filter( - (column) => column.autoincrement, - ).length > 0 - ) { - console.log( - `${ - withStyle.errorWarning( - `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, - ) - }`, - ); - process.exit(1); - } - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to change ${ - chalk.underline( - statement.tableName, - ) - } primary key. This statements may fail and you table may left without primary key`, - ); - - tablesToTruncate.push(statement.tableName); - shouldAskForApprove = true; - } - } else if (statement.type === 'delete_composite_pk') { - // if drop pk and json2 has autoincrement in table -> exit process with error - if ( - Object.values(json2.tables[statement.tableName].columns).filter( - (column) => column.autoincrement, - ).length > 0 - ) { - console.log( - `${ - withStyle.errorWarning( - `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, - ) - }`, - ); - process.exit(1); - } - } else if (statement.type === 'alter_table_add_column') { - if ( - statement.column.notNull - && typeof statement.column.default === 'undefined' - ) { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - statement.column.name, - ) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - - shouldAskForApprove = true; - } - } - } else if (statement.type === 'create_unique_constraint') { - const res = await db.query( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - const unsquashedUnique = SingleStoreSquasher.unsquashUnique(statement.data); - console.log( - `· You're about to add ${ - chalk.underline( - unsquashedUnique.name, - ) - } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ - chalk.underline( - statement.tableName, - ) - } table?\n`, - ); - const { status, data } = await render( - new Select([ - 'No, add the constraint without truncating the table', - `Yes, truncate the table`, - ]), - ); - if (data?.index === 1) { - tablesToTruncate.push(statement.tableName); - statementsToExecute.push(`truncate table ${statement.tableName};`); - shouldAskForApprove = true; - } - } - } else if (statement.type === 'singlestore_recreate_table') { - const tableName = statement.tableName; - - const prevColumns = json1.tables[tableName].columns; - const currentColumns = json2.tables[tableName].columns; - const { removedColumns, addedColumns } = findAddedAndRemoved( - Object.keys(prevColumns), - Object.keys(currentColumns), - ); - - if (removedColumns.length) { - for (const removedColumn of removedColumns) { - const res = await db.query<{ count: string }>( - `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - removedColumn, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(removedColumn); - shouldAskForApprove = true; - } - } - } - - if (addedColumns.length) { - for (const addedColumn of addedColumns) { - const [res] = await db.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - - const columnConf = json2.tables[tableName].columns[addedColumn]; - - const count = Number(res.count); - if (count > 0 && columnConf.notNull && !columnConf.default) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - addedColumn, - ) - } column without default value to table, which contains ${count} items`, - ); - shouldAskForApprove = true; - tablesToTruncate.push(tableName); - - statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); - } - } - } - - const columnWithChangedType = findColumnTypeAlternations(prevColumns, currentColumns); - for (const column of columnWithChangedType) { - const [res] = await db.query<{ count: string }>( - `select count(*) as count from \`${tableName}\` WHERE \`${tableName}\`.\`${column}\` IS NOT NULL;`, - ); - - const count = Number(res.count); - if (count > 0) { - infoToPrint.push( - `· You're about recreate ${chalk.underline(tableName)} table with data type changing for ${ - chalk.underline( - column, - ) - } column, which contains ${count} items`, - ); - shouldAskForApprove = true; - tablesToTruncate.push(tableName); - - statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); - } - } - } - - const stmnt = fromJson([statement], 'singlestore', 'push'); - if (typeof stmnt !== 'undefined') { - statementsToExecute.push(...stmnt); - } - } - - return { - statementsToExecute, - shouldAskForApprove, - infoToPrint, - columnsToRemove: [...new Set(columnsToRemove)], - schemasToRemove: [...new Set(schemasToRemove)], - tablesToTruncate: [...new Set(tablesToTruncate)], - tablesToRemove: [...new Set(tablesToRemove)], - }; -}; diff --git a/drizzle-kit/src/cli/commands/singlestoreUp.ts b/drizzle-kit/src/cli/commands/singlestoreUp.ts index dc5004ed09..8868dd8023 100644 --- a/drizzle-kit/src/cli/commands/singlestoreUp.ts +++ b/drizzle-kit/src/cli/commands/singlestoreUp.ts @@ -1 +1,26 @@ -export const upSinglestoreHandler = (out: string) => {}; +import { existsSync, readFileSync, rmSync, unlinkSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import type { Journal } from 'src/utils'; + +export const upSinglestoreHandler = (out: string) => { + // if there is meta folder - and there is a journal - it's version <8 + const metaPath = join(out, 'meta'); + const journalPath = join(metaPath, '_journal.json'); + if (existsSync(metaPath) && existsSync(journalPath)) { + const journal: Journal = JSON.parse(readFileSync(journalPath).toString()); + if (Number(journal.version) < 8) { + for (const entry of journal.entries) { + const snapshotPrefix = entry.tag.split('_')[0]; + const oldSnapshot = readFileSync(join(metaPath, `${snapshotPrefix}_snapshot.json`)); + const oldSql = readFileSync(join(out, `${entry.tag}.sql`)); + + writeFileSync(join(out, `${entry.tag}/snapshot.json`), oldSnapshot); + writeFileSync(join(out, `${entry.tag}/migration.sql`), oldSql); + + unlinkSync(join(out, `${entry.tag}.sql`)); + } + + rmSync(metaPath); + } + } +}; diff --git a/drizzle-kit/src/cli/commands/sqliteIntrospect.ts b/drizzle-kit/src/cli/commands/sqliteIntrospect.ts deleted file mode 100644 index 90a5b241e1..0000000000 --- a/drizzle-kit/src/cli/commands/sqliteIntrospect.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { renderWithTask } from 'hanji'; -import { Minimatch } from 'minimatch'; -import type { SQLiteDB } from 'src/utils'; -import { originUUID } from '../../global'; -import { schemaToTypeScript } from '../../introspect-sqlite'; -import type { SQLiteSchema } from '../../serializer/sqliteSchema'; -import { fromDatabase } from '../../serializer/sqliteSerializer'; -import { Casing } from '../validations/common'; -import type { SqliteCredentials } from '../validations/sqlite'; -import { IntrospectProgress, ProgressView } from '../views'; - -export const sqliteIntrospect = async ( - credentials: SqliteCredentials, - filters: string[], - casing: Casing, -) => { - const { connectToSQLite } = await import('../connections'); - const db = await connectToSQLite(credentials); - - const matchers = filters.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new IntrospectProgress(); - const res = await renderWithTask( - progress, - fromDatabase(db, filter, (stage, count, status) => { - progress.update(stage, count, status); - }), - ); - - const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; - const ts = schemaToTypeScript(schema, casing); - return { schema, ts }; -}; - -export const sqlitePushIntrospect = async (db: SQLiteDB, filters: string[]) => { - const matchers = filters.map((it) => { - return new Minimatch(it); - }); - - const filter = (tableName: string) => { - if (matchers.length === 0) return true; - - let flags: boolean[] = []; - - for (let matcher of matchers) { - if (matcher.negate) { - if (!matcher.match(tableName)) { - flags.push(false); - } - } - - if (matcher.match(tableName)) { - flags.push(true); - } - } - - if (flags.length > 0) { - return flags.every(Boolean); - } - return false; - }; - - const progress = new ProgressView( - 'Pulling schema from database...', - 'Pulling schema from database...', - ); - const res = await renderWithTask(progress, fromDatabase(db, filter)); - - const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; - return { schema }; -}; diff --git a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts b/drizzle-kit/src/cli/commands/sqlitePushUtils.ts deleted file mode 100644 index a18b369451..0000000000 --- a/drizzle-kit/src/cli/commands/sqlitePushUtils.ts +++ /dev/null @@ -1,322 +0,0 @@ -import chalk from 'chalk'; - -import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema'; -import { - CreateSqliteIndexConvertor, - fromJson, - SQLiteCreateTableConvertor, - SQLiteDropTableConvertor, - SqliteRenameTableConvertor, -} from '../../sqlgenerator'; - -import type { JsonStatement } from '../../jsonStatements'; -import { findAddedAndRemoved, type SQLiteDB } from '../../utils'; - -export const _moveDataStatements = ( - tableName: string, - json: SQLiteSchemaSquashed, - dataLoss: boolean = false, -) => { - const statements: string[] = []; - - const newTableName = `__new_${tableName}`; - - // create table statement from a new json2 with proper name - const tableColumns = Object.values(json.tables[tableName].columns); - const referenceData = Object.values(json.tables[tableName].foreignKeys); - const compositePKs = Object.values( - json.tables[tableName].compositePrimaryKeys, - ).map((it) => SQLiteSquasher.unsquashPK(it)); - const checkConstraints = Object.values(json.tables[tableName].checkConstraints); - - const mappedCheckConstraints: string[] = checkConstraints.map((it) => - it.replaceAll(`"${tableName}".`, `"${newTableName}".`) - .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) - .replaceAll(`${tableName}.`, `${newTableName}.`) - .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) - ); - - const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); - - // create new table - statements.push( - new SQLiteCreateTableConvertor().convert({ - type: 'sqlite_create_table', - tableName: newTableName, - columns: tableColumns, - referenceData: fks, - compositePKs, - checkConstraints: mappedCheckConstraints, - }), - ); - - // move data - if (!dataLoss) { - const columns = Object.keys(json.tables[tableName].columns).map( - (c) => `"${c}"`, - ); - - statements.push( - `INSERT INTO \`${newTableName}\`(${ - columns.join( - ', ', - ) - }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`, - ); - } - - statements.push( - new SQLiteDropTableConvertor().convert({ - type: 'drop_table', - tableName: tableName, - schema: '', - }), - ); - - // rename table - statements.push( - new SqliteRenameTableConvertor().convert({ - fromSchema: '', - tableNameFrom: newTableName, - tableNameTo: tableName, - toSchema: '', - type: 'rename_table', - }), - ); - - for (const idx of Object.values(json.tables[tableName].indexes)) { - statements.push( - new CreateSqliteIndexConvertor().convert({ - type: 'create_index', - tableName: tableName, - schema: '', - data: idx, - }), - ); - } - - return statements; -}; - -export const getOldTableName = ( - tableName: string, - meta: SQLiteSchemaInternal['_meta'], -) => { - for (const key of Object.keys(meta.tables)) { - const value = meta.tables[key]; - if (`"${tableName}"` === value) { - return key.substring(1, key.length - 1); - } - } - return tableName; -}; - -export const getNewTableName = ( - tableName: string, - meta: SQLiteSchemaInternal['_meta'], -) => { - if (typeof meta.tables[`"${tableName}"`] !== 'undefined') { - return meta.tables[`"${tableName}"`].substring( - 1, - meta.tables[`"${tableName}"`].length - 1, - ); - } - return tableName; -}; - -export const logSuggestionsAndReturn = async ( - connection: SQLiteDB, - statements: JsonStatement[], - json1: SQLiteSchemaSquashed, - json2: SQLiteSchemaSquashed, - meta: SQLiteSchemaInternal['_meta'], -) => { - let shouldAskForApprove = false; - const statementsToExecute: string[] = []; - const infoToPrint: string[] = []; - - const tablesToRemove: string[] = []; - const columnsToRemove: string[] = []; - const schemasToRemove: string[] = []; - const tablesToTruncate: string[] = []; - - for (const statement of statements) { - if (statement.type === 'drop_table') { - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${statement.tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - statement.tableName, - ) - } table with ${count} items`, - ); - tablesToRemove.push(statement.tableName); - shouldAskForApprove = true; - } - - const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if (statement.type === 'alter_table_drop_column') { - const tableName = statement.tableName; - const columnName = statement.columnName; - - const res = await connection.query<{ count: string }>( - `select count(\`${tableName}\`.\`${columnName}\`) as count from \`${tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - columnName, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(`${tableName}_${statement.columnName}`); - shouldAskForApprove = true; - } - - const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if ( - statement.type === 'sqlite_alter_table_add_column' - && (statement.column.notNull && !statement.column.default) - ) { - const tableName = statement.tableName; - const columnName = statement.column.name; - const res = await connection.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - columnName, - ) - } column without default value, which contains ${count} items`, - ); - - tablesToTruncate.push(tableName); - statementsToExecute.push(`delete from ${tableName};`); - - shouldAskForApprove = true; - } - - const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } else if (statement.type === 'recreate_table') { - const tableName = statement.tableName; - const oldTableName = getOldTableName(tableName, meta); - - let dataLoss = false; - - const prevColumnNames = Object.keys(json1.tables[oldTableName].columns); - const currentColumnNames = Object.keys(json2.tables[tableName].columns); - const { removedColumns, addedColumns } = findAddedAndRemoved( - prevColumnNames, - currentColumnNames, - ); - - if (removedColumns.length) { - for (const removedColumn of removedColumns) { - const res = await connection.query<{ count: string }>( - `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, - ); - - const count = Number(res[0].count); - if (count > 0) { - infoToPrint.push( - `· You're about to delete ${ - chalk.underline( - removedColumn, - ) - } column in ${tableName} table with ${count} items`, - ); - columnsToRemove.push(removedColumn); - shouldAskForApprove = true; - } - } - } - - if (addedColumns.length) { - for (const addedColumn of addedColumns) { - const [res] = await connection.query<{ count: string }>( - `select count(*) as count from \`${tableName}\``, - ); - - const columnConf = json2.tables[tableName].columns[addedColumn]; - - const count = Number(res.count); - if (count > 0 && columnConf.notNull && !columnConf.default) { - dataLoss = true; - infoToPrint.push( - `· You're about to add not-null ${ - chalk.underline( - addedColumn, - ) - } column without default value to table, which contains ${count} items`, - ); - shouldAskForApprove = true; - tablesToTruncate.push(tableName); - - statementsToExecute.push(`DELETE FROM \`${tableName}\`;`); - } - } - } - - // check if some tables referencing current for pragma - const tablesReferencingCurrent: string[] = []; - - for (const table of Object.values(json2.tables)) { - const tablesRefs = Object.values(json2.tables[table.name].foreignKeys) - .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName) - .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom); - - tablesReferencingCurrent.push(...tablesRefs); - } - - if (!tablesReferencingCurrent.length) { - statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); - continue; - } - - const [{ foreign_keys: pragmaState }] = await connection.query<{ - foreign_keys: number; - }>(`PRAGMA foreign_keys;`); - - if (pragmaState) { - statementsToExecute.push(`PRAGMA foreign_keys=OFF;`); - } - statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); - if (pragmaState) { - statementsToExecute.push(`PRAGMA foreign_keys=ON;`); - } - } else { - const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); - statementsToExecute.push( - ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), - ); - } - } - - return { - statementsToExecute, - shouldAskForApprove, - infoToPrint, - columnsToRemove: [...new Set(columnsToRemove)], - schemasToRemove: [...new Set(schemasToRemove)], - tablesToTruncate: [...new Set(tablesToTruncate)], - tablesToRemove: [...new Set(tablesToRemove)], - }; -}; diff --git a/drizzle-kit/src/cli/commands/sqliteUp.ts b/drizzle-kit/src/cli/commands/sqliteUp.ts deleted file mode 100644 index aaa1fa7b91..0000000000 --- a/drizzle-kit/src/cli/commands/sqliteUp.ts +++ /dev/null @@ -1,52 +0,0 @@ -import chalk from 'chalk'; -import { writeFileSync } from 'fs'; -import { mapEntries } from 'src/global'; -import { SQLiteSchema, sqliteSchemaV5 } from 'src/serializer/sqliteSchema'; -import { prepareOutFolder, validateWithReport } from 'src/utils'; - -export const upSqliteHandler = (out: string) => { - const { snapshots } = prepareOutFolder(out, 'sqlite'); - const report = validateWithReport(snapshots, 'sqlite'); - - report.nonLatest - .map((it) => ({ - path: it, - raw: report.rawMap[it]!! as Record, - })) - .forEach((it) => { - const path = it.path; - const result = updateUpToV6(it.raw); - - console.log(`[${chalk.green('✓')}] ${path}`); - - writeFileSync(path, JSON.stringify(result, null, 2)); - }); - - console.log("Everything's fine 🐶🔥"); -}; - -const updateUpToV6 = (json: Record): SQLiteSchema => { - const schema = sqliteSchemaV5.parse(json); - - const tables = mapEntries(schema.tables, (tableKey, table) => { - const columns = mapEntries(table.columns, (key, value) => { - if ( - value.default - && (typeof value.default === 'object' || Array.isArray(value.default)) - ) { - value.default = `'${JSON.stringify(value.default)}'`; - } - return [key, value]; - }); - table.columns = columns; - return [tableKey, table]; - }); - - return { - ...schema, - version: '6', - dialect: 'sqlite', - tables: tables, - views: {}, - }; -}; diff --git a/drizzle-kit/src/serializer/studio.ts b/drizzle-kit/src/cli/commands/studio.ts similarity index 73% rename from drizzle-kit/src/serializer/studio.ts rename to drizzle-kit/src/cli/commands/studio.ts index ee4518be14..e6dbb66511 100644 --- a/drizzle-kit/src/serializer/studio.ts +++ b/drizzle-kit/src/cli/commands/studio.ts @@ -2,7 +2,9 @@ import type { PGlite } from '@electric-sql/pglite'; import { serve } from '@hono/node-server'; import { zValidator } from '@hono/zod-validator'; import { createHash } from 'crypto'; -import { AnyColumn, AnyTable, is } from 'drizzle-orm'; +import type { AnyColumn, AnyTable } from 'drizzle-orm'; +import { is } from 'drizzle-orm'; +import type { TablesRelationalConfig } from 'drizzle-orm/_relations'; import { createTableRelationsHelpers, extractTablesRelationalConfig, @@ -10,34 +12,35 @@ import { normalizeRelation, One, Relations, - TablesRelationalConfig, } from 'drizzle-orm/_relations'; -import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; -import { AnyPgTable, getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core'; -import { - AnySingleStoreTable, - getTableConfig as singlestoreTableConfig, - SingleStoreTable, -} from 'drizzle-orm/singlestore-core'; -import { AnySQLiteTable, getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { AnyMsSqlTable } from 'drizzle-orm/mssql-core'; +import { getTableConfig as mssqlTableConfig, MsSqlTable } from 'drizzle-orm/mssql-core'; +import type { AnyMySqlTable } from 'drizzle-orm/mysql-core'; +import { getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; +import type { AnyPgTable } from 'drizzle-orm/pg-core'; +import { getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core'; +import type { AnySingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { getTableConfig as singlestoreTableConfig, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import type { AnySQLiteTable } from 'drizzle-orm/sqlite-core'; +import { getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; import fs from 'fs'; import { Hono } from 'hono'; -// TODO: replace with '@hono/compress' when Bun supports CompressionStream -import { compress } from '@hono/bun-compress'; +import { compress } from 'hono/compress'; import { cors } from 'hono/cors'; import { createServer } from 'node:https'; -import { CasingType } from 'src/cli/validations/common'; -import { LibSQLCredentials } from 'src/cli/validations/libsql'; -import { assertUnreachable } from 'src/global'; +import type { CasingType } from 'src/cli/validations/common'; +import type { LibSQLCredentials } from 'src/cli/validations/libsql'; import { z } from 'zod'; -import { safeRegister } from '../cli/commands/utils'; -import type { MysqlCredentials } from '../cli/validations/mysql'; -import type { PostgresCredentials } from '../cli/validations/postgres'; -import type { SingleStoreCredentials } from '../cli/validations/singlestore'; -import type { SqliteCredentials } from '../cli/validations/sqlite'; -import type { Proxy, TransactionProxy } from '../utils'; -import { prepareFilenames } from '.'; -import { getColumnCasing } from './utils'; +import { getColumnCasing } from '../../dialects/drizzle'; +import type { Proxy, TransactionProxy } from '../../utils'; +import { assertUnreachable } from '../../utils'; +import { safeRegister } from '../../utils/utils-node'; +import { prepareFilenames } from '../../utils/utils-node'; +import { JSONB } from '../../utils/when-json-met-bigint'; +import type { MysqlCredentials } from '../validations/mysql'; +import type { PostgresCredentials } from '../validations/postgres'; +import type { SingleStoreCredentials } from '../validations/singlestore'; +import type { SqliteCredentials } from '../validations/sqlite'; type CustomDefault = { schema: string; @@ -101,26 +104,26 @@ export const preparePgSchema = async (path: string | string[]) => { content: fs.readFileSync(it, 'utf-8'), })); - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; - const i0: Record = require(`${it}`); - const i0values = Object.entries(i0); + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); - i0values.forEach(([k, t]) => { - if (is(t, PgTable)) { - const schema = pgTableConfig(t).schema || 'public'; - pgSchema[schema] = pgSchema[schema] || {}; - pgSchema[schema][k] = t; - } + i0values.forEach(([k, t]) => { + if (is(t, PgTable)) { + const schema = pgTableConfig(t).schema || 'public'; + pgSchema[schema] = pgSchema[schema] || {}; + pgSchema[schema][k] = t; + } - if (is(t, Relations)) { - relations[k] = t; - } - }); - } - unregister(); + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + }); return { schema: pgSchema, relations, files }; }; @@ -139,29 +142,66 @@ export const prepareMySqlSchema = async (path: string | string[]) => { content: fs.readFileSync(it, 'utf-8'), })); - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; - const i0: Record = require(`${it}`); - const i0values = Object.entries(i0); + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); - i0values.forEach(([k, t]) => { - if (is(t, MySqlTable)) { - const schema = mysqlTableConfig(t).schema || 'public'; - mysqlSchema[schema][k] = t; - } + i0values.forEach(([k, t]) => { + if (is(t, MySqlTable)) { + const schema = mysqlTableConfig(t).schema || 'public'; + mysqlSchema[schema][k] = t; + } - if (is(t, Relations)) { - relations[k] = t; - } - }); - } - unregister(); + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + }); return { schema: mysqlSchema, relations, files }; }; +export const prepareMsSqlSchema = async (path: string | string[]) => { + const imports = prepareFilenames(path); + const mssqlSchema: Record> = { + public: {}, + }; + const relations: Record = {}; + + // files content as string + const files = imports.map((it, index) => ({ + // get the file name from the path + name: it.split('/').pop() || `schema${index}.ts`, + content: fs.readFileSync(it, 'utf-8'), + })); + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); + + i0values.forEach(([k, t]) => { + if (is(t, MsSqlTable)) { + const schema = mssqlTableConfig(t).schema || 'public'; + mssqlSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + }); + + return { schema: mssqlSchema, relations, files }; +}; + export const prepareSQLiteSchema = async (path: string | string[]) => { const imports = prepareFilenames(path); const sqliteSchema: Record> = { @@ -176,25 +216,25 @@ export const prepareSQLiteSchema = async (path: string | string[]) => { content: fs.readFileSync(it, 'utf-8'), })); - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; - const i0: Record = require(`${it}`); - const i0values = Object.entries(i0); + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); - i0values.forEach(([k, t]) => { - if (is(t, SQLiteTable)) { - const schema = 'public'; // sqlite does not have schemas - sqliteSchema[schema][k] = t; - } + i0values.forEach(([k, t]) => { + if (is(t, SQLiteTable)) { + const schema = 'public'; // sqlite does not have schemas + sqliteSchema[schema][k] = t; + } - if (is(t, Relations)) { - relations[k] = t; - } - }); - } - unregister(); + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + }); return { schema: sqliteSchema, relations, files }; }; @@ -216,25 +256,25 @@ export const prepareSingleStoreSchema = async (path: string | string[]) => { content: fs.readFileSync(it, 'utf-8'), })); - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; - const i0: Record = require(`${it}`); - const i0values = Object.entries(i0); + const i0: Record = require(`${it}`); + const i0values = Object.entries(i0); - i0values.forEach(([k, t]) => { - if (is(t, SingleStoreTable)) { - const schema = singlestoreTableConfig(t).schema || 'public'; - singlestoreSchema[schema][k] = t; - } + i0values.forEach(([k, t]) => { + if (is(t, SingleStoreTable)) { + const schema = singlestoreTableConfig(t).schema || 'public'; + singlestoreSchema[schema][k] = t; + } - if (is(t, Relations)) { - relations[k] = t; - } - }); - } - unregister(); + if (is(t, Relations)) { + relations[k] = t; + } + }); + } + }); return { schema: singlestoreSchema, relations, files }; }; @@ -287,7 +327,7 @@ export const drizzleForPostgres = async ( schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { - const { preparePostgresDB } = await import('../cli/connections'); + const { preparePostgresDB } = await import('../connections'); const db = await preparePostgresDB(credentials); const customDefaults = getCustomDefaults(pgSchema, casing); @@ -333,7 +373,7 @@ export const drizzleForMySQL = async ( schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { - const { connectToMySQL } = await import('../cli/connections'); + const { connectToMySQL } = await import('../connections'); const { proxy, transactionProxy, database, packageName } = await connectToMySQL(credentials); const customDefaults = getCustomDefaults(mysqlSchema, casing); @@ -364,6 +404,40 @@ export const drizzleForMySQL = async ( }; }; +// export const drizzleForMsSQL = async ( +// credentials: MssqlCredentials, +// mssqlSchema: Record>, +// relations: Record, +// schemaFiles?: SchemaFile[], +// ): Promise => { +// const { connectToMsSQL } = await import('../cli/connections'); +// const { proxy } = await connectToMsSQL(credentials); + +// const customDefaults = getCustomDefaults(mssqlSchema); + +// let dbUrl: string; + +// if ('url' in credentials) { +// dbUrl = credentials.url; +// } else { +// // TODO() change it! +// dbUrl = +// `mysql://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; +// } + +// const dbHash = createHash('sha256').update(dbUrl).digest('hex'); + +// return { +// dbHash, +// dialect: 'mysql', +// proxy, +// customDefaults, +// schema: mssqlSchema, +// relations, +// schemaFiles, +// }; +// }; + export const drizzleForSQLite = async ( credentials: SqliteCredentials, sqliteSchema: Record>, @@ -371,7 +445,7 @@ export const drizzleForSQLite = async ( schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { - const { connectToSQLite } = await import('../cli/connections'); + const { connectToSQLite } = await import('../connections'); const sqliteDB = await connectToSQLite(credentials); const customDefaults = getCustomDefaults(sqliteSchema, casing); @@ -414,7 +488,7 @@ export const drizzleForLibSQL = async ( schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { - const { connectToLibSQL } = await import('../cli/connections'); + const { connectToLibSQL } = await import('../connections'); const sqliteDB = await connectToLibSQL(credentials); const customDefaults = getCustomDefaults(sqliteSchema, casing); @@ -445,7 +519,7 @@ export const drizzleForSingleStore = async ( schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { - const { connectToSingleStore } = await import('../cli/connections'); + const { connectToSingleStore } = await import('../connections'); const { proxy, transactionProxy, database, packageName } = await connectToSingleStore(credentials); const customDefaults = getCustomDefaults(singlestoreSchema, casing); @@ -545,7 +619,7 @@ export const extractRelations = ( refSchema: refSchema || 'public', refColumns: refColumns, }; - } catch (error) { + } catch { throw new Error( `Invalid relation "${relation.fieldName}" for table "${ it.schema ? `${it.schema}.${it.dbName}` : it.dbName @@ -618,7 +692,7 @@ const schema = z.union([ ]); const jsonStringify = (data: any) => { - return JSON.stringify(data, (_key, value) => { + return JSONB.stringify(data, (_key, value) => { // Convert Error to object if (value instanceof Error) { return { @@ -626,11 +700,6 @@ const jsonStringify = (data: any) => { }; } - // Convert BigInt to string - if (typeof value === 'bigint') { - return value.toString(); - } - // Convert Buffer and ArrayBuffer to base64 if ( (value @@ -763,12 +832,28 @@ export const prepareServer = async ( ...body.data, params: body.data.params || [], }); - return c.json(JSON.parse(jsonStringify(result))); + const res = jsonStringify(result)!; + return c.body( + res, + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ); } if (type === 'tproxy') { const result = await transactionProxy(body.data); - return c.json(JSON.parse(jsonStringify(result))); + const res = jsonStringify(result)!; + return c.body( + res, + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ); } if (type === 'defaults') { @@ -796,8 +881,15 @@ export const prepareServer = async ( value, }; }); - - return c.json(JSON.parse(jsonStringify(result))); + const res = jsonStringify(result)!; + return c.body( + res, + { + headers: { + 'Content-Type': 'application/json', + }, + }, + ); } throw new Error(`Unknown type: ${type}`); diff --git a/drizzle-kit/src/cli/commands/up-cockroach.ts b/drizzle-kit/src/cli/commands/up-cockroach.ts new file mode 100644 index 0000000000..2d83e50183 --- /dev/null +++ b/drizzle-kit/src/cli/commands/up-cockroach.ts @@ -0,0 +1,6 @@ +export const upCockroachHandler = (_out: string) => { + // const { snapshots } = prepareOutFolder(out, "cockroach"); + // const report = validateWithReport(snapshots, "cockroach"); + + console.log("Everything's fine 🐶🔥"); +}; diff --git a/drizzle-kit/src/cli/commands/up-mysql.ts b/drizzle-kit/src/cli/commands/up-mysql.ts new file mode 100644 index 0000000000..437f08b9a0 --- /dev/null +++ b/drizzle-kit/src/cli/commands/up-mysql.ts @@ -0,0 +1,191 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { prepareOutFolder, validateWithReport } from 'src/utils/utils-node'; +import { createDDL } from '../../dialects/mysql/ddl'; +import { Binary, Varbinary } from '../../dialects/mysql/grammar'; +import type { MysqlSchemaV6, MysqlSnapshot } from '../../dialects/mysql/snapshot'; +import { trimChar } from '../../utils'; +import { migrateToFoldersV3 } from './utils'; + +export const upMysqlHandler = (out: string) => { + migrateToFoldersV3(out); + + const { snapshots } = prepareOutFolder(out); + const report = validateWithReport(snapshots, 'mysql'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it] as Record, + })) + .forEach((it) => { + const path = it.path; + + const snapshot = upToV6(it.raw); + + console.log(`[${chalk.green('✓')}] ${path}`); + + writeFileSync(path, JSON.stringify(snapshot, null, 2)); + }); + + console.log("Everything's fine 🐶🔥"); +}; + +export const upToV6 = (it: Record): MysqlSnapshot => { + const json = it as MysqlSchemaV6; + + const ddl = createDDL(); + + for (const table of Object.values(json.tables)) { + ddl.tables.push({ name: table.name }); + + for (const column of Object.values(table.columns)) { + let def = typeof column.default === 'undefined' ? null : String(column.default); + if (def !== null) { + if (column.type.startsWith('decimal')) def = `(${trimChar(def, "'")})`; + if (column.type.startsWith('binary')) { + const trimmed = trimChar(def, "'"); + if (trimmed !== def) def = Binary.defaultFromDrizzle(trimmed)!; + } + if (column.type.startsWith('varbinary')) { + const trimmed = trimChar(def, "'"); + // check if it's not an expression + if (trimmed !== def) def = Varbinary.defaultFromDrizzle(trimmed); + } + } + + ddl.columns.push({ + table: table.name, + name: column.name, + type: column.type, + notNull: column.notNull, + default: def, + autoIncrement: column.autoincrement ?? false, + onUpdateNow: column.onUpdate ?? false, + generated: column.generated, + // TODO: @AleksandrSherman check + charSet: null, + collation: null, + onUpdateNowFsp: null, + }); + } + } + for (const table of Object.values(json.tables)) { + for (const index of Object.values(table.indexes)) { + /* legacy columns mapper + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + */ + + const columns = index.columns.map((x) => { + const nameToCheck = trimChar(x, '`'); + const isColumn = !!ddl.columns.one({ table: table.name, name: nameToCheck }); + return { value: x, isExpression: !isColumn }; + }); + + ddl.indexes.push({ + table: table.name, + name: index.name, + columns, + algorithm: index.algorithm ?? null, + isUnique: index.isUnique, + lock: index.lock ?? null, + using: index.using ?? null, + nameExplicit: true, + }); + } + + for (const unique of Object.values(table.uniqueConstraints)) { + /* legacy columns mapper + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + */ + const columns = unique.columns.map((x) => { + const nameToCheck = trimChar(x, '`'); + const isColumn = !!ddl.columns.one({ table: table.name, name: nameToCheck }); + return { value: x, isExpression: !isColumn }; + }); + + let nameImplicit = `${table.name}_${unique.columns.join('_')}_unique` === unique.name + || `${table.name}_${unique.columns.join('_')}` === unique.name; + + ddl.indexes.push({ + table: table.name, + name: unique.name, + columns, + algorithm: null, + isUnique: true, + lock: null, + using: null, + nameExplicit: !nameImplicit, + }); + } + + for (const fk of Object.values(table.foreignKeys)) { + const isNameImplicit = + `${fk.tableFrom}_${fk.columnsFrom.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fk` === fk.name; + + ddl.fks.push({ + table: table.name, + name: fk.name, + columns: fk.columnsFrom, + columnsTo: fk.columnsTo, + tableTo: fk.tableTo, + onUpdate: fk.onUpdate?.toUpperCase() as any ?? null, + onDelete: fk.onDelete?.toUpperCase() as any ?? null, + nameExplicit: !isNameImplicit, + }); + } + + for (const check of Object.values(table.checkConstraint)) { + ddl.checks.push({ + table: table.name, + name: check.name, + value: check.value, + }); + } + + for (const pk of Object.values(table.compositePrimaryKeys)) { + ddl.pks.push({ + table: table.name, + name: 'PRIMARY', + columns: pk.columns, + }); + } + } + + for (const view of Object.values(json.views)) { + ddl.views.push({ + name: view.name, + algorithm: view.algorithm ?? null, + sqlSecurity: view.sqlSecurity ?? null, + withCheckOption: view.withCheckOption ?? null, + definition: view.definition!, + }); + } + + return { + version: '6', + id: json.id, + prevIds: [json.prevId], + dialect: 'mysql', + ddl: ddl.entities.list(), + renames: [], + }; +}; diff --git a/drizzle-kit/src/cli/commands/up-postgres.ts b/drizzle-kit/src/cli/commands/up-postgres.ts new file mode 100644 index 0000000000..c5c8a74127 --- /dev/null +++ b/drizzle-kit/src/cli/commands/up-postgres.ts @@ -0,0 +1,29 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import { upToV8 } from 'src/dialects/postgres/versions'; +import { prepareOutFolder, validateWithReport } from '../../utils/utils-node'; +import { migrateToFoldersV3 } from './utils'; + +export const upPgHandler = (out: string) => { + migrateToFoldersV3(out); + + const { snapshots } = prepareOutFolder(out); + const report = validateWithReport(snapshots, 'postgresql'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it]! as Record, + })) + .forEach((it) => { + const path = it.path; + + const { snapshot } = upToV8(it.raw); + + console.log(`[${chalk.green('✓')}] ${path}`); + + writeFileSync(path, JSON.stringify(snapshot, null, 2)); + }); + + console.log("Everything's fine 🐶🔥"); +}; diff --git a/drizzle-kit/src/cli/commands/up-singlestore.ts b/drizzle-kit/src/cli/commands/up-singlestore.ts new file mode 100644 index 0000000000..4060791fd8 --- /dev/null +++ b/drizzle-kit/src/cli/commands/up-singlestore.ts @@ -0,0 +1,191 @@ +import chalk from 'chalk'; +import { writeFileSync } from 'fs'; +import type { SchemaV1, SingleStoreSnapshot } from 'src/dialects/singlestore/snapshot'; +import { prepareOutFolder, validateWithReport } from 'src/utils/utils-node'; +import { createDDL } from '../../dialects/mysql/ddl'; +import { Binary, Varbinary } from '../../dialects/mysql/grammar'; +import { trimChar } from '../../utils'; +import { migrateToFoldersV3 } from './utils'; + +export const upSinglestoreHandler = (out: string) => { + migrateToFoldersV3(out); + + const { snapshots } = prepareOutFolder(out); + const report = validateWithReport(snapshots, 'singlestore'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it] as Record, + })) + .forEach((it) => { + const path = it.path; + + const snapshot = upToV2(it.raw); + + console.log(`[${chalk.green('✓')}] ${path}`); + + writeFileSync(path, JSON.stringify(snapshot, null, 2)); + }); + + console.log("Everything's fine 🐶🔥"); +}; + +export const upToV2 = (it: Record): SingleStoreSnapshot => { + const json = it as SchemaV1; + + const ddl = createDDL(); + + for (const table of Object.values(json.tables)) { + ddl.tables.push({ name: table.name }); + + for (const column of Object.values(table.columns)) { + let def = typeof column.default === 'undefined' ? null : String(column.default); + if (def !== null) { + if (column.type.startsWith('decimal')) def = `(${trimChar(def, "'")})`; + if (column.type.startsWith('binary')) { + const trimmed = trimChar(def, "'"); + if (trimmed !== def) def = Binary.defaultFromDrizzle(trimmed)!; + } + if (column.type.startsWith('varbinary')) { + const trimmed = trimChar(def, "'"); + // check if it's not an expression + if (trimmed !== def) def = Varbinary.defaultFromDrizzle(trimmed); + } + } + + ddl.columns.push({ + table: table.name, + name: column.name, + type: column.type, + notNull: column.notNull, + default: def, + autoIncrement: column.autoincrement ?? false, + onUpdateNow: column.onUpdate ?? false, + generated: column.generated, + // TODO: @AleksandrSherman check + charSet: null, + collation: null, + onUpdateNowFsp: null, + }); + } + } + for (const table of Object.values(json.tables)) { + for (const index of Object.values(table.indexes)) { + /* legacy columns mapper + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + */ + + const columns = index.columns.map((x) => { + const nameToCheck = trimChar(x, '`'); + const isColumn = !!ddl.columns.one({ table: table.name, name: nameToCheck }); + return { value: x, isExpression: !isColumn }; + }); + + ddl.indexes.push({ + table: table.name, + name: index.name, + columns, + algorithm: index.algorithm ?? null, + isUnique: index.isUnique, + lock: index.lock ?? null, + using: index.using ?? null, + nameExplicit: true, + }); + } + + for (const unique of Object.values(table.uniqueConstraints)) { + /* legacy columns mapper + const uniqueString = unsquashedUnique.columns + .map((it) => { + return internals?.indexes + ? internals?.indexes[unsquashedUnique.name]?.columns[it] + ?.isExpression + ? it + : `\`${it}\`` + : `\`${it}\``; + }) + .join(','); + */ + const columns = unique.columns.map((x) => { + const nameToCheck = trimChar(x, '`'); + const isColumn = !!ddl.columns.one({ table: table.name, name: nameToCheck }); + return { value: x, isExpression: !isColumn }; + }); + + let nameImplicit = `${table.name}_${unique.columns.join('_')}_unique` === unique.name + || `${table.name}_${unique.columns.join('_')}` === unique.name; + + ddl.indexes.push({ + table: table.name, + name: unique.name, + columns, + algorithm: null, + isUnique: true, + lock: null, + using: null, + nameExplicit: !nameImplicit, + }); + } + + // for (const fk of Object.values(table.foreignKeys)) { + // const isNameImplicit = + // `${fk.tableFrom}_${fk.columnsFrom.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fk` === fk.name; + + // ddl.fks.push({ + // table: table.name, + // name: fk.name, + // columns: fk.columnsFrom, + // columnsTo: fk.columnsTo, + // tableTo: fk.tableTo, + // onUpdate: fk.onUpdate?.toUpperCase() as any ?? null, + // onDelete: fk.onDelete?.toUpperCase() as any ?? null, + // nameExplicit: !isNameImplicit, + // }); + // } + + // for (const check of Object.values(table.checkConstraint)) { + // ddl.checks.push({ + // table: table.name, + // name: check.name, + // value: check.value, + // }); + // } + + for (const pk of Object.values(table.compositePrimaryKeys)) { + ddl.pks.push({ + table: table.name, + name: 'PRIMARY', + columns: pk.columns, + }); + } + } + + // for (const view of Object.values(json.views)) { + // ddl.views.push({ + // name: view.name, + // algorithm: view.algorithm ?? null, + // sqlSecurity: view.sqlSecurity ?? null, + // withCheckOption: view.withCheckOption ?? null, + // definition: view.definition!, + // }); + // } + + return { + version: '2', + id: json.id, + prevIds: [json.prevId], + dialect: 'singlestore', + ddl: ddl.entities.list(), + renames: [], + }; +}; diff --git a/drizzle-kit/src/cli/commands/up-sqlite.ts b/drizzle-kit/src/cli/commands/up-sqlite.ts new file mode 100644 index 0000000000..b588c57561 --- /dev/null +++ b/drizzle-kit/src/cli/commands/up-sqlite.ts @@ -0,0 +1,181 @@ +import chalk from 'chalk'; +import { existsSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import { nameForPk } from 'src/dialects/sqlite/grammar'; +import { prepareOutFolder, validateWithReport } from 'src/utils/utils-node'; +import { createDDL } from '../../dialects/sqlite/ddl'; +import type { SqliteSnapshot } from '../../dialects/sqlite/snapshot'; +import { sqliteSchemaV5, type SQLiteSchemaV6, sqliteSchemaV6 } from '../../dialects/sqlite/snapshot'; +import { mapEntries } from '../../utils'; +import { embeddedMigrations } from './generate-common'; +import { migrateToFoldersV3 } from './utils'; + +export const upSqliteHandler = (out: string) => { + migrateToFoldersV3(out); + + const { snapshots } = prepareOutFolder(out); + const report = validateWithReport(snapshots, 'sqlite'); + + report.nonLatest + .map((it) => ({ + path: it, + raw: report.rawMap[it]! as Record, + })) + .forEach((it) => { + const path = it.path; + + let result: SqliteSnapshot; + if (it.raw['version'] === '5') { + result = updateToV7(updateUpToV6(it.raw)); + } else if (it.raw['version'] === '6') { + result = updateToV7(sqliteSchemaV6.parse(it.raw)); + } else { + throw new Error(`unexpected version of SQLite snapshot: ${it.raw['version']}`); + } + + console.log(`[${chalk.green('✓')}] ${path}`); + writeFileSync(path, JSON.stringify(result, null, 2)); + }); + + if (existsSync(join(out, 'migrations.js'))) { + const js = embeddedMigrations(snapshots); + writeFileSync(`${out}/migrations.js`, js); + } + + console.log("Everything's fine 🐶🔥"); +}; + +const updateToV7 = (snapshot: SQLiteSchemaV6): SqliteSnapshot => { + const ddl = createDDL(); + for (const table of Object.values(snapshot.tables)) { + ddl.tables.push({ + name: table.name, + }); + + for (const column of Object.values(table.columns)) { + ddl.columns.push({ + table: table.name, + name: column.name, + type: column.type, + notNull: column.notNull, + default: column.default ?? null, + autoincrement: column.autoincrement, + generated: column.generated ?? null, + }); + + if (column.primaryKey) { + ddl.pks.push({ + table: table.name, + columns: [column.name], + name: nameForPk(table.name), + nameExplicit: false, + }); + } + } + + for (const pk of Object.values(table.compositePrimaryKeys)) { + const implicit = pk.name === `${table.name}_${pk.columns.join('_')}_pk`; + + ddl.pks.push({ + table: table.name, + name: pk.name, + columns: pk.columns, + nameExplicit: !implicit, + }); + } + + for (const index of Object.values(table.indexes)) { + ddl.indexes.push({ + table: table.name, + name: index.name, + columns: index.columns.map((it) => ({ value: it, isExpression: false })), + isUnique: index.isUnique, + where: index.where, + origin: 'manual', + }); + } + + for (const unique of Object.values(table.uniqueConstraints)) { + const implicit = unique.name === `${table.name}_${unique.columns.join('_')}_unique`; + ddl.uniques.push({ + table: table.name, + name: unique.name, + columns: unique.columns, + nameExplicit: !implicit, + }); + } + + for (const check of Object.values(table.checkConstraints)) { + ddl.checks.push({ + table: table.name, + name: check.name, + value: check.value, + }); + } + + for (const fk of Object.values(table.foreignKeys)) { + const implicit = + fk.name === `${table.name}_${fk.columnsFrom.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fk`; + ddl.fks.push({ + table: table.name, + name: fk.name, + columns: fk.columnsFrom, + tableTo: fk.tableTo, + columnsTo: fk.columnsTo, + onDelete: fk.onDelete ?? 'NO ACTION', + onUpdate: fk.onUpdate ?? 'NO ACTION', + nameExplicit: !implicit, + }); + } + } + + for (const view of Object.values(snapshot.views)) { + ddl.views.push({ + name: view.name, + definition: view.definition, + isExisting: view.isExisting, + error: null, + }); + } + + const renames = [...Object.entries(snapshot._meta.tables), ...Object.entries(snapshot._meta.columns)].map( + ([key, value]) => { + return `${key}->${value}`; + }, + ); + + return { + dialect: 'sqlite', + id: snapshot.id, + prevIds: [snapshot.prevId], + version: '7', + ddl: ddl.entities.list(), + renames: renames, + }; +}; + +const updateUpToV6 = (json: object): SQLiteSchemaV6 => { + const schema = sqliteSchemaV5.parse(json); + + const tables = mapEntries(schema.tables, (tableKey, table) => { + const columns = mapEntries(table.columns, (key, value) => { + if ( + value.default + && (typeof value.default === 'object' || Array.isArray(value.default)) + ) { + value.default = `'${JSON.stringify(value.default)}'`; + } + return [key, value]; + }); + table.columns = columns; + return [tableKey, table]; + }); + + return { + ...schema, + version: '6', + dialect: 'sqlite', + tables: tables, + views: {}, + }; +}; diff --git a/drizzle-kit/src/cli/commands/utils.ts b/drizzle-kit/src/cli/commands/utils.ts index 5618dc18a2..6556134878 100644 --- a/drizzle-kit/src/cli/commands/utils.ts +++ b/drizzle-kit/src/cli/commands/utils.ts @@ -1,93 +1,41 @@ import chalk from 'chalk'; -import { existsSync } from 'fs'; +import { existsSync, mkdirSync, readdirSync, readFileSync, rm, unlinkSync, writeFileSync } from 'fs'; import { render } from 'hanji'; import { join, resolve } from 'path'; import { object, string } from 'zod'; -import { getTablesFilterByExtensions } from '../../extensions/getTablesFilterByExtensions'; -import { assertUnreachable } from '../../global'; -import { type Dialect, dialect } from '../../schemaValidator'; -import { prepareFilenames } from '../../serializer'; -import { Entities, pullParams, pushParams } from '../validations/cli'; -import { - Casing, - CasingType, - CliConfig, - configCommonSchema, - configMigrations, - Driver, - Prefix, - wrapParam, -} from '../validations/common'; -import { GelCredentials, gelCredentials, printConfigConnectionIssues as printIssuesGel } from '../validations/gel'; -import { - LibSQLCredentials, - libSQLCredentials, - printConfigConnectionIssues as printIssuesLibSQL, -} from '../validations/libsql'; -import { - MysqlCredentials, - mysqlCredentials, - printConfigConnectionIssues as printIssuesMysql, -} from '../validations/mysql'; +import { assertUnreachable, type Journal } from '../../utils'; +import { type Dialect, dialect } from '../../utils/schemaValidator'; +import { prepareFilenames } from '../../utils/utils-node'; +import { safeRegister } from '../../utils/utils-node'; +import type { EntitiesFilterConfig } from '../validations/cli'; +import { pullParams, pushParams } from '../validations/cli'; +import type { CockroachCredentials } from '../validations/cockroach'; +import { cockroachCredentials } from '../validations/cockroach'; +import { printConfigConnectionIssues as printCockroachIssues } from '../validations/cockroach'; +import type { Casing, CasingType, CliConfig, Driver, Prefix } from '../validations/common'; +import { configCommonSchema, configMigrations, wrapParam } from '../validations/common'; +import type { GelCredentials } from '../validations/gel'; +import { gelCredentials, printConfigConnectionIssues as printIssuesGel } from '../validations/gel'; +import type { LibSQLCredentials } from '../validations/libsql'; +import { libSQLCredentials, printConfigConnectionIssues as printIssuesLibSQL } from '../validations/libsql'; +import { printConfigConnectionIssues as printMssqlIssues } from '../validations/mssql'; +import type { MssqlCredentials } from '../validations/mssql'; +import { mssqlCredentials } from '../validations/mssql'; +import type { MysqlCredentials } from '../validations/mysql'; +import { mysqlCredentials, printConfigConnectionIssues as printIssuesMysql } from '../validations/mysql'; import { outputs } from '../validations/outputs'; -import { - PostgresCredentials, - postgresCredentials, - printConfigConnectionIssues as printIssuesPg, -} from '../validations/postgres'; +import type { PostgresCredentials } from '../validations/postgres'; +import { postgresCredentials, printConfigConnectionIssues as printIssuesPg } from '../validations/postgres'; +import type { SingleStoreCredentials } from '../validations/singlestore'; import { printConfigConnectionIssues as printIssuesSingleStore, - SingleStoreCredentials, singlestoreCredentials, } from '../validations/singlestore'; -import { - printConfigConnectionIssues as printIssuesSqlite, - SqliteCredentials, - sqliteCredentials, -} from '../validations/sqlite'; +import type { SqliteCredentials } from '../validations/sqlite'; +import { printConfigConnectionIssues as printIssuesSqlite, sqliteCredentials } from '../validations/sqlite'; import { studioCliParams, studioConfig } from '../validations/studio'; import { error } from '../views'; - -// NextJs default config is target: es5, which esbuild-register can't consume -const assertES5 = async (unregister: () => void) => { - try { - require('./_es5.ts'); - } catch (e: any) { - if ('errors' in e && Array.isArray(e.errors) && e.errors.length > 0) { - const es5Error = (e.errors as any[]).filter((it) => it.text?.includes(`("es5") is not supported yet`)).length > 0; - if (es5Error) { - console.log( - error( - `Please change compilerOptions.target from 'es5' to 'es6' or above in your tsconfig.json`, - ), - ); - process.exit(1); - } - } - console.error(e); - process.exit(1); - } -}; - -export const safeRegister = async () => { - const { register } = await import('esbuild-register/dist/node'); - let res: { unregister: () => void }; - try { - res = register({ - format: 'cjs', - loader: 'ts', - }); - } catch { - // tsx fallback - res = { - unregister: () => {}, - }; - } - - // has to be outside try catch to be able to run with tsx - await assertES5(res.unregister); - return res; -}; +import { prepareSnapshotFolderName } from './generate-common'; export const prepareCheckParams = async ( options: { @@ -153,6 +101,7 @@ export type ExportConfig = { dialect: Dialect; schema: string | string[]; sql: boolean; + casing?: CasingType; }; export const prepareGenerateConfig = async ( @@ -211,6 +160,7 @@ export const prepareExportConfig = async ( schema?: string; dialect?: Dialect; sql: boolean; + casing?: CasingType; }, from: 'config' | 'cli', ): Promise => { @@ -231,6 +181,7 @@ export const prepareExportConfig = async ( process.exit(0); } return { + casing: config.casing, dialect: dialect, schema: schema, sql: sql, @@ -264,7 +215,7 @@ export const preparePushConfig = async ( options: Record, from: 'cli' | 'config', ): Promise< - ( + & ( | { dialect: 'mysql'; credentials: MysqlCredentials; @@ -285,15 +236,22 @@ export const preparePushConfig = async ( dialect: 'singlestore'; credentials: SingleStoreCredentials; } - ) & { + | { + dialect: 'mssql'; + credentials: MssqlCredentials; + } + | { + dialect: 'cockroach'; + credentials: CockroachCredentials; + } + ) + & { schemaPath: string | string[]; verbose: boolean; - strict: boolean; force: boolean; - tablesFilter: string[]; - schemasFilter: string[]; + explain: boolean; casing?: CasingType; - entities?: Entities; + filters: EntitiesFilterConfig; } > => { const raw = flattenDatabaseCredentials( @@ -322,22 +280,12 @@ export const preparePushConfig = async ( process.exit(0); } - const tablesFilterConfig = config.tablesFilter; - const tablesFilter = tablesFilterConfig - ? typeof tablesFilterConfig === 'string' - ? [tablesFilterConfig] - : tablesFilterConfig - : []; - - const schemasFilterConfig = config.schemaFilter; - - const schemasFilter = schemasFilterConfig - ? typeof schemasFilterConfig === 'string' - ? [schemasFilterConfig] - : schemasFilterConfig - : []; - - tablesFilter.push(...getTablesFilterByExtensions(config)); + const filters = { + tables: config.tablesFilter, + schemas: config.schemaFilter, + entities: config.entities, + extensions: config.extensionsFilters, + } as const; if (config.dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(config); @@ -349,14 +297,12 @@ export const preparePushConfig = async ( return { dialect: 'postgresql', schemaPath: config.schema, - strict: config.strict ?? false, + explain: (options.explain as boolean) ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, - tablesFilter, - schemasFilter, - entities: config.entities, + filters, }; } @@ -369,13 +315,12 @@ export const preparePushConfig = async ( return { dialect: 'mysql', schemaPath: config.schema, - strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, - tablesFilter, - schemasFilter, + filters, + explain: false, }; } @@ -389,12 +334,11 @@ export const preparePushConfig = async ( return { dialect: 'singlestore', schemaPath: config.schema, - strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, - tablesFilter, - schemasFilter, + filters, + explain: false, }; } @@ -407,13 +351,12 @@ export const preparePushConfig = async ( return { dialect: 'sqlite', schemaPath: config.schema, - strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, - tablesFilter, - schemasFilter, + filters, + explain: false, }; } @@ -426,13 +369,12 @@ export const preparePushConfig = async ( return { dialect: 'turso', schemaPath: config.schema, - strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, - tablesFilter, - schemasFilter, + filters, + explain: false, }; } @@ -445,6 +387,43 @@ export const preparePushConfig = async ( process.exit(1); } + if (config.dialect === 'mssql') { + const parsed = mssqlCredentials.safeParse(config); + if (!parsed.success) { + // printIssuesSqlite(config, 'push'); // TODO print issues + process.exit(1); + } + return { + dialect: 'mssql', + schemaPath: config.schema, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + casing: config.casing, + filters, + explain: false, + }; + } + + if (config.dialect === 'cockroach') { + const parsed = cockroachCredentials.safeParse(config); + if (!parsed.success) { + printCockroachIssues(config); + process.exit(1); + } + + return { + dialect: 'cockroach', + schemaPath: config.schema, + verbose: config.verbose ?? false, + force: (options.force as boolean) ?? false, + credentials: parsed.data, + casing: config.casing, + filters, + explain: false, + }; + } + assertUnreachable(config.dialect); }; @@ -477,14 +456,20 @@ export const preparePullConfig = async ( dialect: 'gel'; credentials?: GelCredentials; } + | { + dialect: 'mssql'; + credentials: MssqlCredentials; + } + | { + dialect: 'cockroach'; + credentials: CockroachCredentials; + } ) & { out: string; breakpoints: boolean; casing: Casing; - tablesFilter: string[]; - schemasFilter: string[]; prefix: Prefix; - entities: Entities; + filters: EntitiesFilterConfig; init: boolean; migrationsSchema: string | undefined; migrationsTable: string | undefined; @@ -507,30 +492,12 @@ export const preparePullConfig = async ( const dialect = config.dialect; const { schema, table } = parsed.data.migrations || {}; - const tablesFilterConfig = config.tablesFilter; - const tablesFilter = tablesFilterConfig - ? typeof tablesFilterConfig === 'string' - ? [tablesFilterConfig] - : tablesFilterConfig - : []; - - if (config.extensionsFilters) { - if ( - config.extensionsFilters.includes('postgis') - && dialect === 'postgresql' - ) { - tablesFilter.push( - ...['!geography_columns', '!geometry_columns', '!spatial_ref_sys'], - ); - } - } - - const schemasFilterConfig = config.schemaFilter; // TODO: consistent naming - const schemasFilter = schemasFilterConfig - ? typeof schemasFilterConfig === 'string' - ? [schemasFilterConfig] - : schemasFilterConfig - : []; + const filters = { + tables: config.tablesFilter, + schemas: config.schemaFilter, + entities: config.entities, + extensions: config.extensionsFilters, + } as const; if (dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(config); @@ -545,10 +512,8 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -567,10 +532,8 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -590,10 +553,8 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -612,10 +573,8 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -634,10 +593,8 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -656,10 +613,50 @@ export const preparePullConfig = async ( breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, - tablesFilter, - schemasFilter, prefix: config.migrations?.prefix || 'index', - entities: config.entities, + init: !!options.init, + migrationsSchema: schema, + migrationsTable: table, + filters, + }; + } + + if (dialect === 'mssql') { + const parsed = mssqlCredentials.safeParse(config); + if (!parsed.success) { + // printIssuesPg(config); // TODO add issues printing + process.exit(1); + } + + return { + dialect, + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + prefix: config.migrations?.prefix || 'index', + filters, + init: !!options.init, + migrationsSchema: schema, + migrationsTable: table, + }; + } + + if (dialect === 'cockroach') { + const parsed = cockroachCredentials.safeParse(config); + if (!parsed.success) { + printCockroachIssues(config); + process.exit(1); + } + + return { + dialect, + out: config.out, + breakpoints: config.breakpoints, + casing: config.casing, + credentials: parsed.data, + prefix: config.migrations?.prefix || 'index', + filters, init: !!options.init, migrationsSchema: schema, migrationsTable: table, @@ -773,13 +770,28 @@ export const prepareStudioConfig = async (options: Record) => { }; } + if (dialect === 'cockroach') { + const parsed = cockroachCredentials.safeParse(flattened); + if (!parsed.success) { + printCockroachIssues(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + schema, + host, + port, + credentials, + }; + } + if (dialect === 'gel') { - console.log( - error( - `You can't use 'studio' command with Gel dialect`, - ), - ); - process.exit(1); + throw new Error(`You can't use 'studio' command with Gel dialect`); + } + + if (dialect === 'mssql') { + throw new Error(`You can't use 'studio' command with MsSql dialect yet`); } assertUnreachable(dialect); @@ -892,6 +904,38 @@ export const prepareMigrateConfig = async (configPath: string | undefined) => { process.exit(1); } + if (dialect === 'mssql') { + const parsed = mssqlCredentials.safeParse(flattened); + if (!parsed.success) { + printMssqlIssues(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + + if (dialect === 'cockroach') { + const parsed = cockroachCredentials.safeParse(flattened); + if (!parsed.success) { + printCockroachIssues(flattened as Record); + process.exit(1); + } + const credentials = parsed.data; + return { + dialect, + out, + credentials, + schema, + table, + }; + } + assertUnreachable(dialect); }; @@ -903,9 +947,9 @@ export const drizzleConfigFromFile = async ( const defaultTsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.ts'))); const defaultJsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.js'))); - const defaultJsonConfigExists = existsSync( - join(resolve('drizzle.config.json')), - ); + // const defaultJsonConfigExists = existsSync( + // join(resolve('drizzle.config.json')), + // ); const defaultConfigPath = defaultTsConfigExists ? 'drizzle.config.ts' @@ -930,10 +974,11 @@ export const drizzleConfigFromFile = async ( if (!isExport) console.log(chalk.grey(`Reading config file '${path}'`)); - const { unregister } = await safeRegister(); - const required = require(`${path}`); - const content = required.default ?? required; - unregister(); + const content = await safeRegister(async () => { + const required = require(`${path}`); + const content = required.default ?? required; + return content; + }); // --- get response and then check by each dialect independently const res = configCommonSchema.safeParse(content); @@ -947,3 +992,54 @@ export const drizzleConfigFromFile = async ( return res.data; }; + +export const migrateToFoldersV3 = (out: string) => { + // if there is meta folder - and there is a journal - it's version 8 + const metaPath = join(out, 'meta'); + const journalPath = join(metaPath, '_journal.json'); + if (existsSync(metaPath) && existsSync(journalPath)) { + const journal: Journal = JSON.parse(readFileSync(journalPath).toString()); + const sqlFiles = readdirSync(out); + for (const entry of journal.entries) { + const folderName = prepareSnapshotFolderName(entry.when); + // Reading Snapshots files + const [snapshotPrefix, ...rest] = entry.tag.split('_'); + const migrationName = rest.join('_'); + const oldSnapshotPath = join(metaPath, `${snapshotPrefix}_snapshot.json`); + + if (!existsSync(oldSnapshotPath)) { + // If for some reason this happens we need to throw an error + // This can't happen unless there were wrong drizzle-kit migrations usage + console.error('No snapshot was found'); + process.exit(1); + } + + const oldSnapshot = readFileSync(oldSnapshotPath); + + // Reading SQL files + let oldSqlPath = join(out, `${entry.tag}.sql`); + const sqlFileFromJournal = join(out, `${entry.tag}.sql`); + if (!existsSync(sqlFileFromJournal)) { + // We will try to find it by prefix, but this is a sign that something went wrong + // with properly using drizzle-kit migrations + const sqlFileName = sqlFiles.find((file) => file.startsWith(snapshotPrefix)); + if (!sqlFileName) continue; + if (sqlFileName?.length > 1) { + console.error('Several sql files were found'); + process.exit(1); + } + } + const oldSql = readFileSync(oldSqlPath); + + mkdirSync(join(out, `${folderName}_${migrationName}`)); + writeFileSync(join(out, `${folderName}_${migrationName}/snapshot.json`), oldSnapshot); + writeFileSync(join(out, `${folderName}_${migrationName}/migration.sql`), oldSql); + + unlinkSync(oldSqlPath); + } + + rm(metaPath, { recursive: true, force: true }, () => {}); + return true; + } + return false; +}; diff --git a/drizzle-kit/src/cli/connections.ts b/drizzle-kit/src/cli/connections.ts index e924a1349f..1a7b1e3c40 100644 --- a/drizzle-kit/src/cli/connections.ts +++ b/drizzle-kit/src/cli/connections.ts @@ -3,28 +3,34 @@ import type { SQLiteCloudRowset } from '@sqlitecloud/drivers'; import type { AwsDataApiPgQueryResult, AwsDataApiSessionOptions } from 'drizzle-orm/aws-data-api/pg'; import type { MigrationConfig, MigratorInitFailResponse } from 'drizzle-orm/migrator'; import type { PreparedQueryConfig } from 'drizzle-orm/pg-core'; +import type { config } from 'mssql'; import fetch from 'node-fetch'; import ws from 'ws'; -import { assertUnreachable } from '../global'; -import type { ProxyParams } from '../serializer/studio'; -import { - type DB, - LibSQLDB, - normalisePGliteUrl, - normaliseSQLiteUrl, - type Proxy, - type SQLiteDB, - type TransactionProxy, -} from '../utils'; -import { assertPackages, checkPackage } from './utils'; -import { GelCredentials } from './validations/gel'; -import { LibSQLCredentials } from './validations/libsql'; +import type { TransactionProxy } from '../utils'; +import { assertUnreachable } from '../utils'; +import type { LibSQLDB } from '../utils'; +import type { DB, Proxy, SQLiteDB } from '../utils'; +import { normaliseSQLiteUrl } from '../utils/utils-node'; +import { JSONB } from '../utils/when-json-met-bigint'; +import type { ProxyParams } from './commands/studio'; +import { assertPackages, checkPackage, QueryError } from './utils'; +import type { GelCredentials } from './validations/gel'; +import type { LibSQLCredentials } from './validations/libsql'; +import type { MssqlCredentials } from './validations/mssql'; import type { MysqlCredentials } from './validations/mysql'; import { withStyle } from './validations/outputs'; import type { PostgresCredentials } from './validations/postgres'; -import { SingleStoreCredentials } from './validations/singlestore'; +import type { SingleStoreCredentials } from './validations/singlestore'; import type { SqliteCredentials } from './validations/sqlite'; +const normalisePGliteUrl = (it: string) => { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; +}; + export const preparePostgresDB = async ( credentials: PostgresCredentials | { driver: 'pglite'; @@ -49,7 +55,7 @@ export const preparePostgresDB = async ( const { driver } = credentials; if (driver === 'aws-data-api') { assertPackages('@aws-sdk/client-rds-data'); - const { RDSDataClient, ExecuteStatementCommand, TypeHint } = await import( + const { RDSDataClient } = await import( '@aws-sdk/client-rds-data' ); const { AwsDataApiSession, drizzle } = await import( @@ -73,7 +79,7 @@ export const preparePostgresDB = async ( undefined, ); - const db = drizzle(rdsClient, config); + const db = drizzle({ client: rdsClient, ...config }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -111,7 +117,7 @@ export const preparePostgresDB = async ( const result = await prepared.execute(); return result.rows; }; - const transactionProxy: TransactionProxy = async (queries) => { + const transactionProxy: TransactionProxy = async (_queries) => { throw new Error('Transaction not supported'); }; @@ -132,7 +138,7 @@ export const preparePostgresDB = async ( const pglite = 'client' in credentials ? credentials.client : new PGlite(normalisePGliteUrl(credentials.url)); await pglite.waitReady; - const drzl = drizzle(pglite); + const drzl = drizzle({ client: pglite }); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; @@ -147,6 +153,8 @@ export const preparePostgresDB = async ( const query = async (sql: string, params: any[] = []) => { const result = await pglite.query(sql, params, { parsers, + }).catch((e) => { + throw new QueryError(e, sql, params); }); return result.rows as T[]; }; @@ -156,6 +164,8 @@ export const preparePostgresDB = async ( const result = await pglite.query(params.sql, preparedParams, { rowMode: params.mode, parsers, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); return result.rows; }; @@ -215,6 +225,9 @@ export const preparePostgresDB = async ( if (typeId === pg.types.builtins.INTERVAL) { return (val: any) => val; } + if (typeId === pg.types.builtins.JSON || typeId === pg.types.builtins.JSONB) { + return (val: any) => JSONB.parse(val); + } // @ts-ignore return pg.types.getTypeParser(typeId, format); }, @@ -224,7 +237,7 @@ export const preparePostgresDB = async ( ? new pg.Pool({ connectionString: credentials.url, max: 1 }) : new pg.Pool({ ...credentials, ssl, max: 1 }); - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -234,6 +247,8 @@ export const preparePostgresDB = async ( text: sql, values: params ?? [], types, + }).catch((e) => { + throw new QueryError(e, sql, params || []); }); return result.rows; }; @@ -244,6 +259,8 @@ export const preparePostgresDB = async ( values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); return result.rows; }; @@ -296,21 +313,27 @@ export const preparePostgresDB = async ( client.options.serializers['114'] = transparentParser; client.options.serializers['3802'] = transparentParser; - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { - const result = await client.unsafe(sql, params); + const result = await client.unsafe(sql, params ?? []).catch((e) => { + throw new QueryError(e, sql, params || []); + }); return result as any[]; }; const proxy: Proxy = async (params) => { if (params.mode === 'array') { - return await client.unsafe(params.sql, params.params).values(); + return client.unsafe(params.sql, params.params).values().catch((e) => { + throw new QueryError(e, params.sql, params.params || []); + }); } - return await client.unsafe(params.sql, params.params); + return client.unsafe(params.sql, params.params).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); + }); }; const transactionProxy: TransactionProxy = async (queries) => { @@ -380,7 +403,7 @@ export const preparePostgresDB = async ( await client.connect(); - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -390,6 +413,8 @@ export const preparePostgresDB = async ( text: sql, values: params ?? [], types, + }).catch((e) => { + throw new QueryError(e, sql, params || []); }); return result.rows; }; @@ -400,6 +425,8 @@ export const preparePostgresDB = async ( values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); return result.rows; }; @@ -480,7 +507,7 @@ export const preparePostgresDB = async ( : new Pool({ ...credentials, max: 1, ssl }); neonConfig.webSocketConstructor = ws; - const db = drizzle(client); + const db = drizzle({ client: client as any }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -490,6 +517,8 @@ export const preparePostgresDB = async ( text: sql, values: params ?? [], types, + }).catch((e) => { + throw new QueryError(e, sql, params || []); }); return result.rows; }; @@ -500,6 +529,8 @@ export const preparePostgresDB = async ( values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); return result.rows; }; @@ -549,13 +580,13 @@ export const preparePostgresDB = async ( ssl, max: 1, }); - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { - const result = await client.unsafe(sql, params); + const result = await client.unsafe(sql, params ?? []); return result; }; @@ -588,7 +619,91 @@ export const preparePostgresDB = async ( console.error( "To connect to Postgres database - please install either of 'pg', 'postgres', 'bun', '@neondatabase/serverless' or '@vercel/postgres' drivers", ); - console.warn("For the 'bun' driver, run your script using: bun --bun"); + process.exit(1); +}; + +export const prepareCockroach = async ( + credentials: PostgresCredentials, +): Promise< + DB & { + proxy: Proxy; + migrate: (config: string | MigrationConfig) => Promise; + } +> => { + if (await checkPackage('pg')) { + const { default: pg } = await import('pg'); + const { drizzle } = await import('drizzle-orm/cockroach'); + const { migrate } = await import('drizzle-orm/cockroach/migrator'); + + const ssl = 'ssl' in credentials + ? credentials.ssl === 'prefer' + || credentials.ssl === 'require' + || credentials.ssl === 'allow' + ? { rejectUnauthorized: false } + : credentials.ssl === 'verify-full' + ? {} + : credentials.ssl + : {}; + + // Override pg default date parsers + const types: { getTypeParser: typeof pg.types.getTypeParser } = { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === pg.types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === pg.types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === pg.types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === pg.types.builtins.INTERVAL) { + return (val: any) => val; + } + // @ts-ignore + return pg.types.getTypeParser(typeId, format); + }, + }; + + const client = 'url' in credentials + ? new pg.Pool({ connectionString: credentials.url, max: 1 }) + : new pg.Pool({ ...credentials, ssl, max: 1 }); + + const db = drizzle({ client }); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + const query = async (sql: string, params?: any[]) => { + const result = await client.query({ + text: sql, + values: params ?? [], + types, + }).catch((e) => { + throw new QueryError(e, sql, params || []); + }); + return result.rows; + }; + + const proxy: Proxy = async (params: ProxyParams) => { + const result = await client.query({ + text: params.sql, + values: params.params, + ...(params.mode === 'array' && { rowMode: 'array' }), + types, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); + }); + return result.rows; + }; + + return { query, proxy, migrate: migrateFn }; + } + + console.error( + "To connect to Cockroach - please install 'pg' package", + ); process.exit(1); }; @@ -621,9 +736,9 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in throw error; } } else if ('url' in credentials) { - 'tlsSecurity' in credentials - ? client = gel.createClient({ dsn: credentials.url, tlsSecurity: credentials.tlsSecurity, concurrency: 1 }) - : client = gel.createClient({ dsn: credentials.url, concurrency: 1 }); + client = 'tlsSecurity' in credentials + ? gel.createClient({ dsn: credentials.url, tlsSecurity: credentials.tlsSecurity, concurrency: 1 }) + : gel.createClient({ dsn: credentials.url, concurrency: 1 }); } else { gel.createClient({ ...credentials, concurrency: 1 }); } @@ -634,7 +749,7 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in }; const proxy: Proxy = async (params: ProxyParams) => { - const { method, mode, params: sqlParams, sql, typings } = params; + const { mode, params: sqlParams, sql } = params; let result: any[]; switch (mode) { @@ -719,7 +834,7 @@ export const connectToSingleStore = async ( ? await createConnection(result.url) : await createConnection(result.credentials!); // needed for some reason! - const db = drizzle(connection); + const db = drizzle({ client: connection }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -819,7 +934,7 @@ export const connectToMySQL = async ( ? await createConnection(result.url) : await createConnection(result.credentials!); // needed for some reason! - const db = drizzle(connection); + const db = drizzle({ client: connection }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; @@ -840,6 +955,8 @@ export const connectToMySQL = async ( sql, values: params, typeCast, + }).catch((e) => { + throw new QueryError(e, sql, params || []); }); return res[0] as any; }; @@ -850,6 +967,8 @@ export const connectToMySQL = async ( values: params.params, rowsAsArray: params.mode === 'array', typeCast, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); return result[0] as any[]; }; @@ -859,7 +978,10 @@ export const connectToMySQL = async ( try { await connection.beginTransaction(); for (const query of queries) { - const res = await connection.query(query.sql); + const res = await connection.query({ + sql: query.sql, + typeCast, + }); results.push(res[0]); } await connection.commit(); @@ -890,13 +1012,15 @@ export const connectToMySQL = async ( const connection = new Client(result); - const db = drizzle(connection); + const db = drizzle({ client: connection }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]): Promise => { - const res = await connection.execute(sql, params); + const res = await connection.execute(sql, params).catch((e) => { + throw new QueryError(e, sql, params || []); + }); return res.rows as T[]; }; const proxy: Proxy = async (params: ProxyParams) => { @@ -904,7 +1028,9 @@ export const connectToMySQL = async ( params.sql, params.params, params.mode === 'array' ? { as: 'array' } : undefined, - ); + ).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); + }); return result.rows; }; @@ -955,13 +1081,13 @@ export const connectToMySQL = async ( ssl, }); - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { - const result = await client.unsafe(sql, params); + const result = await client.unsafe(sql, params ?? []); return result; }; @@ -1001,7 +1127,82 @@ export const connectToMySQL = async ( console.error( "To connect to MySQL database - please install either of 'mysql2', 'bun' or '@planetscale/database' drivers", ); - console.warn("For the 'bun' driver, run your script using: bun --bun"); + process.exit(1); +}; + +function parseMssqlUrl(url: URL): config { + return { + user: url.username, + password: url.password, + server: url.hostname, + port: Number.parseInt(url.port, 10), + database: url.pathname.replace(/^\//, ''), + options: { + encrypt: url.searchParams.get('encrypt') === 'true', + trustServerCertificate: url.searchParams.get('trustServerCertificate') === 'true', + }, + }; +} + +const parseMssqlCredentials = (credentials: MssqlCredentials) => { + if ('url' in credentials) { + try { + const url = new URL(credentials.url); + const parsedCredentials = parseMssqlUrl(url); + return { + database: parsedCredentials.database, + credentials: parsedCredentials, + }; + } catch { + return { url: credentials.url }; + } + } else { + return { + database: credentials.database, + credentials, + }; + } +}; + +export const connectToMsSQL = async ( + it: MssqlCredentials, +): Promise<{ + db: DB; + migrate: (config: MigrationConfig) => Promise; +}> => { + const result = parseMssqlCredentials(it); + + if (await checkPackage('mssql')) { + const mssql = await import('mssql'); + const { drizzle } = await import('drizzle-orm/node-mssql'); + const { migrate } = await import('drizzle-orm/node-mssql/migrator'); + const connection = result.url + ? await mssql.default.connect(result.url) + : await mssql.default.connect(result.credentials!); + + const db = drizzle({ client: connection }); + const migrateFn = async (config: MigrationConfig) => { + return migrate(db, config); + }; + + const query: DB['query'] = async ( + sql: string, + ): Promise => { + const res = await connection.query(sql).catch((e) => { + throw new QueryError(e, sql, []); + }); + return res.recordset as any; + }; + + return { + db: { query }, + migrate: migrateFn, + }; + } + + console.error( + "To connect to MsSQL database - please install 'mssql' driver", + ); process.exit(1); }; @@ -1050,7 +1251,8 @@ const preparePGliteParams = (params: any[]) => { export const connectToSQLite = async ( credentials: SqliteCredentials, ): Promise< - SQLiteDB & { + & SQLiteDB + & { packageName: | 'd1-http' | '@libsql/client' @@ -1103,13 +1305,17 @@ export const connectToSQLite = async ( Authorization: `Bearer ${credentials.token}`, }, }, - ); + ).catch((e) => { + throw new QueryError(e, sql, params || []); + }); const data = (await res.json()) as D1Response; if (!data.success) { - throw new Error( - data.errors.map((it) => `${it.code}: ${it.message}`).join('\n'), + throw new QueryError( + new Error(data.errors.map((it) => `${it.code}: ${it.message}`).join('\n')), + sql, + params || [], ); } @@ -1200,7 +1406,7 @@ export const connectToSQLite = async ( const { migrate } = await import('drizzle-orm/sqlite-cloud/migrator'); const client = new Database(credentials.url); - const drzl = drizzle(client); + const drzl = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; @@ -1291,7 +1497,7 @@ export const connectToSQLite = async ( const client = createClient({ url: normaliseSQLiteUrl(credentials.url, 'libsql'), }); - const drzl = drizzle(client); + const drzl = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; @@ -1311,6 +1517,8 @@ export const connectToSQLite = async ( const result = await client.execute({ sql: params.sql, args: preparedParams, + }).catch((e) => { + throw new QueryError(e, params.sql, params.params || []); }); if (params.mode === 'array') { @@ -1342,49 +1550,60 @@ export const connectToSQLite = async ( return { query, run, packageName: '@libsql/client', proxy, transactionProxy, migrate: migrateFn }; } - // if (await checkPackage('@tursodatabase/database')) { - // console.log(withStyle.info(`Using '@tursodatabase/database' driver for database querying`)); - // const { Database } = await import('@tursodatabase/database'); - // const { drizzle } = await import('drizzle-orm/tursodatabase/database'); - // const { migrate } = await import('drizzle-orm/tursodatabase/migrator'); - - // const client = new Database(normaliseSQLiteUrl(credentials.url, '@tursodatabase/database')); - // const drzl = drizzle(client); - // const migrateFn = async (config: MigrationConfig) => { - // return migrate(drzl, config); - // }; - - // const query = async (sql: string, params?: any[]) => { - // const stmt = client.prepare(sql).bind(preparePGliteParams(params || [])); - // const res = await stmt.all(); - // return res as T[]; - // }; - - // const proxy = async (params: ProxyParams) => { - // const preparedParams = prepareSqliteParams(params.params || []); - // const stmt = client.prepare(params.sql).bind(preparedParams); - - // return stmt.raw(params.mode === 'array').all(); - // }; - - // const transactionProxy: TransactionProxy = async (queries) => { - // const results: (any[] | Error)[] = []; - // try { - // const tx = client.transaction(async () => { - // for (const query of queries) { - // const result = await client.prepare(query.sql).all(); - // results.push(result); - // } - // }); - // await tx(); - // } catch (error) { - // results.push(error as Error); - // } - // return results; - // }; - - // return { query, packageName: '@tursodatabase/database', proxy, transactionProxy, migrate: migrateFn }; - // } + if (await checkPackage('@tursodatabase/database')) { + console.log(withStyle.info(`Using '@tursodatabase/database' driver for database querying`)); + const { Database } = await import('@tursodatabase/database'); + const { drizzle } = await import('drizzle-orm/tursodatabase/database'); + const { migrate } = await import('drizzle-orm/tursodatabase/migrator'); + + const client = new Database(normaliseSQLiteUrl(credentials.url, '@tursodatabase/database')); + const drzl = drizzle({ client }); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const query = async (sql: string, params?: any[]) => { + const stmt = client.prepare(sql).bind(preparePGliteParams(params || [])); + const res = await stmt.all(); + return res as T[]; + }; + + const proxy = async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params || []); + const stmt = client.prepare(params.sql).bind(preparedParams); + + return stmt.raw(params.mode === 'array').all(); + }; + + const transactionProxy: TransactionProxy = async (queries) => { + const results: (any[] | Error)[] = []; + try { + const tx = client.transaction(async () => { + for (const query of queries) { + const result = await client.prepare(query.sql).all(); + results.push(result); + } + }); + await tx(); + } catch (error) { + results.push(error as Error); + } + return results; + }; + + return { + query, + packageName: '@tursodatabase/database', + proxy, + transactionProxy, + migrate: migrateFn, + run: async (query: string) => { + await client.exec(query).catch((e) => { + throw new QueryError(e, query, []); + }); + }, + }; + } if (await checkPackage('better-sqlite3')) { console.log(withStyle.info(`Using 'better-sqlite3' driver for database querying`)); @@ -1395,16 +1614,18 @@ export const connectToSQLite = async ( const sqlite = new Database( normaliseSQLiteUrl(credentials.url, 'better-sqlite'), ); - const drzl = drizzle(sqlite); + const drzl = drizzle({ client: sqlite }); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; - const query = async (sql: string, params: any[] = []) => { - return sqlite.prepare(sql).bind(params).all() as T[]; - }; - const run = async (query: string) => { - sqlite.prepare(query).run(); + const db: SQLiteDB = { + query: async (sql: string, params: any[] = []) => { + return sqlite.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + sqlite.prepare(query).run(); + }, }; const proxy: Proxy = async (params) => { @@ -1451,7 +1672,7 @@ export const connectToSQLite = async ( return results; }; - return { query, run, packageName: 'better-sqlite3', proxy, transactionProxy, migrate: migrateFn }; + return { ...db, packageName: 'better-sqlite3', proxy, transactionProxy, migrate: migrateFn }; } if (await checkPackage('bun')) { @@ -1465,13 +1686,13 @@ export const connectToSQLite = async ( filename: normaliseSQLiteUrl(credentials.url, 'bun'), }); - const db = drizzle(client); + const db = drizzle({ client }); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { - const result = await client.unsafe(sql, params); + const result = await client.unsafe(sql, params ?? []); return result; }; const run = async (sql: string) => { @@ -1512,9 +1733,8 @@ export const connectToSQLite = async ( } console.log( - "Please install either 'better-sqlite3', '@libsql/client' or 'bun' for Drizzle Kit to connect to SQLite databases", + "Please install either 'better-sqlite3', 'bun', '@libsql/client' or '@tursodatabase/database' for Drizzle Kit to connect to SQLite databases", ); - console.warn("For the 'bun' driver, run your script using: bun --bun"); process.exit(1); }; @@ -1527,73 +1747,77 @@ export const connectToLibSQL = async (credentials: LibSQLCredentials): Promise< transactionProxy: TransactionProxy; } > => { - if (await checkPackage('@libsql/client')) { - const { createClient } = await import('@libsql/client'); - const { drizzle } = await import('drizzle-orm/libsql'); - const { migrate } = await import('drizzle-orm/libsql/migrator'); - - const client = createClient({ - url: normaliseSQLiteUrl(credentials.url, 'libsql'), - authToken: credentials.authToken, - }); - const drzl = drizzle(client); - const migrateFn = async (config: MigrationConfig) => { - return migrate(drzl, config); - }; - - const db: LibSQLDB = { - query: async (sql: string, params?: any[]) => { - const res = await client.execute({ sql, args: params || [] }); - return res.rows as T[]; - }, - run: async (query: string) => { - await client.execute(query); - }, - batchWithPragma: async (queries: string[]) => { - await client.migrate(queries); - }, - }; + if (!(await checkPackage('@libsql/client'))) { + console.log( + "Please install '@libsql/client' for Drizzle Kit to connect to LibSQL databases", + ); + process.exit(1); + } - type Transaction = Awaited>; + const { createClient } = await import('@libsql/client'); + const { drizzle } = await import('drizzle-orm/libsql'); + const { migrate } = await import('drizzle-orm/libsql/migrator'); - const proxy = async (params: ProxyParams) => { - const preparedParams = prepareSqliteParams(params.params || []); - const result = await client.execute({ - sql: params.sql, - args: preparedParams, + const client = createClient({ + url: normaliseSQLiteUrl(credentials.url, 'libsql'), + authToken: credentials.authToken, + }); + const drzl = drizzle({ client }); + const migrateFn = async (config: MigrationConfig) => { + return migrate(drzl, config); + }; + + const db: LibSQLDB = { + query: async (sql: string, params?: any[]) => { + const res = await client.execute({ sql, args: params || [] }).catch((e) => { + throw new QueryError(e, sql, params || []); }); + return res.rows as T[]; + }, + run: async (query: string) => { + await client.execute(query).catch((e) => { + throw new QueryError(e, query, []); + }); + }, + batchWithPragma: async (queries: string[]) => { + await client.migrate(queries); + }, + }; + + type Transaction = Awaited>; + + const proxy = async (params: ProxyParams) => { + const preparedParams = prepareSqliteParams(params.params || []); + const result = await client.execute({ + sql: params.sql, + args: preparedParams, + }); - if (params.mode === 'array') { - return result.rows.map((row) => Object.values(row)); - } else { - return result.rows; - } - }; - - const transactionProxy: TransactionProxy = async (queries) => { - const results: (any[] | Error)[] = []; - let transaction: Transaction | null = null; - try { - transaction = await client.transaction(); - for (const query of queries) { - const result = await transaction.execute(query.sql); - results.push(result.rows); - } - await transaction.commit(); - } catch (error) { - results.push(error as Error); - await transaction?.rollback(); - } finally { - transaction?.close(); + if (params.mode === 'array') { + return result.rows.map((row) => Object.values(row)); + } else { + return result.rows; + } + }; + + const transactionProxy: TransactionProxy = async (queries) => { + const results: (any[] | Error)[] = []; + let transaction: Transaction | null = null; + try { + transaction = await client.transaction(); + for (const query of queries) { + const result = await transaction.execute(query.sql); + results.push(result.rows); } - return results; - }; - - return { ...db, packageName: '@libsql/client', proxy, transactionProxy, migrate: migrateFn }; - } + await transaction.commit(); + } catch (error) { + results.push(error as Error); + await transaction?.rollback(); + } finally { + transaction?.close(); + } + return results; + }; - console.log( - "Please install '@libsql/client' for Drizzle Kit to connect to LibSQL databases", - ); - process.exit(1); + return { ...db, packageName: '@libsql/client', proxy, transactionProxy, migrate: migrateFn }; }; diff --git a/drizzle-kit/src/cli/highlighter.ts b/drizzle-kit/src/cli/highlighter.ts new file mode 100644 index 0000000000..89738f7e02 --- /dev/null +++ b/drizzle-kit/src/cli/highlighter.ts @@ -0,0 +1,304 @@ +import chalk from 'chalk'; + +type TokenType = + | 'keyword' + | 'string' + | 'variable' + | 'operator' + | 'type' + | 'number' + | 'comment' + | 'built_in' + | 'literal' + | 'whitespace' + | 'punctuation' + | 'identifier'; + +interface Token { + type: TokenType; + value: string; +} + +const KEYWORDS = new Set([ + 'WITH', + 'AS', + 'SELECT', + 'FROM', + 'JOIN', + 'ON', + 'WHERE', + 'BETWEEN', + 'AND', + 'GROUP', + 'BY', + 'ORDER', + 'LIMIT', + 'DESC', + 'ASC', + 'IS', + 'NOT', + 'NULL', + 'OVER', + 'PARTITION', + 'RANK', + 'HAVING', + 'INSERT', + 'INTO', + 'VALUES', + 'UPDATE', + 'CASCADE', + 'SET', + 'DELETE', + 'CREATE', + 'SCHEMA', + 'TABLE', + 'COLUMN', + 'ALTER', + 'DROP', + 'UNION', + 'ALL', + 'DISTINCT', + 'CASE', + 'WHEN', + 'THEN', + 'ELSE', + 'END', + 'LEFT', + 'RIGHT', + 'INNER', + 'OUTER', + 'DEFAULT', + 'UNIQUE', + 'TYPE', + 'ADD', + 'CONSTRAINT', + 'REFERENCES', + 'FOREIGN', + 'KEY', +]); + +const BUILT_INS = new Set([ + 'SUM', + 'COUNT', + 'ROUND', + 'AVG', + 'MIN', + 'MAX', + 'COALESCE', + 'NOW', + 'DATE', + 'CAST', + 'CONVERT', + 'SUBSTRING', + 'TRIM', + 'LOWER', + 'UPPER', + 'CURRENT_TIMESTAMP', +]); + +const TYPES = new Set([ + 'int', + 'integer', + 'varchar', + 'char', + 'text', + 'date', + 'timestamp', + 'numeric', + 'decimal', + 'float', + 'double', + 'boolean', + 'json', + 'jsonb', +]); + +const LITERALS = new Set(['true', 'false']); + +function getTokenType(value: string): TokenType { + const upper = value.toUpperCase(); + if (KEYWORDS.has(upper)) return 'keyword'; + if (BUILT_INS.has(upper)) return 'built_in'; + if (TYPES.has(value.toLowerCase())) return 'type'; + if (LITERALS.has(value.toLowerCase())) return 'literal'; + return 'identifier'; +} + +export function tokenize(code: string): Token[] { + const tokens: Token[] = []; + let current = 0; + + while (current < code.length) { + const char = code[current]; + if (!char) break; // Safety check + + // Whitespace + if (/\s/.test(char)) { + let value = ''; + while (current < code.length && /\s/.test(code[current] || '')) { + value += code[current]; + current++; + } + tokens.push({ type: 'whitespace', value }); + continue; + } + + // Strings (single quotes) + if (char === "'") { + let value = "'"; + current++; + while (current < code.length) { + const c = code[current]; + const next = code[current + 1]; + if (c === "'" && next === "'") { + value += "''"; + current += 2; + } else if (c === "'") { + value += "'"; + current++; + break; + } else { + value += c || ''; + current++; + } + } + tokens.push({ type: 'string', value }); + continue; + } + + // Numbers + if (/[0-9]/.test(char)) { + let value = ''; + while (current < code.length && /[0-9.]/.test(code[current] || '')) { + value += code[current]; + current++; + } + tokens.push({ type: 'number', value }); + continue; + } + + // Comments (-- style) + if (char === '-' && code[current + 1] === '-') { + let value = ''; + while (current < code.length && code[current] !== '\n') { + value += code[current] || ''; + current++; + } + tokens.push({ type: 'comment', value }); + continue; + } + + // Operators and Punctuation + if (/[(),;.]/.test(char)) { + tokens.push({ type: 'punctuation', value: char }); + current++; + continue; + } + + if (/[=<>!+\-*/|:]/.test(char)) { + let value = ''; + while (current < code.length && /[=<>!+\-*/|:]/.test(code[current] || '')) { + value += code[current]; + current++; + } + tokens.push({ type: 'operator', value }); + continue; + } + + // Quoted Identifiers ("" or ``) + if (char === '"' || char === '`') { + const quote = char; + let value = quote; + current++; + while (current < code.length) { + const c = code[current]; + const next = code[current + 1]; + if (c === quote && next === quote) { + value += quote + quote; + current += 2; + } else if (c === quote) { + value += quote; + current++; + break; + } else { + value += c || ''; + current++; + } + } + tokens.push({ type: 'identifier', value }); + continue; + } + + // Bracket Identifiers ([]) + if (char === '[') { + let value = '['; + current++; + while (current < code.length) { + const c = code[current]; + const next = code[current + 1]; + if (c === ']' && next === ']') { + value += ']]'; + current += 2; + } else if (c === ']') { + value += ']'; + current++; + break; + } else { + value += c || ''; + current++; + } + } + tokens.push({ type: 'identifier', value }); + continue; + } + + // Identifiers and Keywords + if (/[a-zA-Z_]/.test(char)) { + let value = ''; + while (current < code.length && /[a-zA-Z0-9_]/.test(code[current] || '')) { + value += code[current]; + current++; + } + tokens.push({ type: getTokenType(value), value }); + continue; + } + + // Fallback for unknown characters + tokens.push({ type: 'identifier', value: char }); + current++; + } + + return tokens; +} + +export function highlightSQL(code: string): string { + const tokens = tokenize(code); + return tokens.map((token) => { + switch (token.type) { + case 'keyword': + return chalk.redBright.bold(token.value); + case 'string': + return chalk.green(token.value); + case 'variable': + return chalk.blue(token.value); // Not explicitly detected in simple lexer, usually identifiers + case 'operator': + return chalk.gray(token.value); + case 'type': + return chalk.magenta(token.value); + case 'number': + return chalk.yellow(token.value); + case 'comment': + return chalk.gray.italic(token.value); + case 'built_in': + return chalk.redBright(token.value); + case 'literal': + return chalk.yellow(token.value); + case 'identifier': + return chalk.italic(token.value); // Default color for identifiers + case 'punctuation': + return chalk.gray(token.value); + default: + return token.value; + } + }).join(''); +} diff --git a/drizzle-kit/src/cli/index.ts b/drizzle-kit/src/cli/index.ts index de8de5b0c1..93fc0e5809 100644 --- a/drizzle-kit/src/cli/index.ts +++ b/drizzle-kit/src/cli/index.ts @@ -1,7 +1,8 @@ import { command, getCommandNameWithParents, run } from '@drizzle-team/brocli'; import chalk from 'chalk'; -import { check, drop, exportRaw, generate, migrate, pull, push, studio, up } from './schema'; -import { ormCoreVersions } from './utils'; +import { highlightSQL } from './highlighter'; +import { check, exportRaw, generate, migrate, pull, push, studio, up } from './schema'; +import { ormCoreVersions, QueryError } from './utils'; import { error } from './views'; const version = async () => { @@ -13,11 +14,19 @@ const version = async () => { console.log(chalk.gray(versions), '\n'); }; -const legacyCommand = (name: string, newName: string) => { +const legacyCommand = ( + { name, newName, customMessage }: { name: string; newName?: string; customMessage?: string }, +) => { return command({ name, hidden: true, handler: () => { + // in this case command was deleted and there is no new command + if (!newName) { + console.log( + `This command is deprecated. ${customMessage}`, + ); + } console.log( `This command is deprecated, please use updated '${newName}' command (see https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210)`, ); @@ -26,26 +35,30 @@ const legacyCommand = (name: string, newName: string) => { }; const legacy = [ - legacyCommand('generate:pg', 'generate'), - legacyCommand('generate:mysql', 'generate'), - legacyCommand('generate:sqlite', 'generate'), - legacyCommand('push:pg', 'push'), - legacyCommand('push:mysql', 'push'), - legacyCommand('push:sqlite', 'push'), - legacyCommand('introspect:pg', 'introspect'), - legacyCommand('introspect:mysql', 'introspect'), - legacyCommand('introspect:sqlite', 'introspect'), - legacyCommand('up:pg', 'up'), - legacyCommand('up:mysql', 'up'), - legacyCommand('up:sqlite', 'up'), - legacyCommand('check:pg', 'check'), - legacyCommand('check:mysql', 'check'), - legacyCommand('check:sqlite', 'check'), + legacyCommand({ name: 'generate:pg', newName: 'generate' }), + legacyCommand({ name: 'generate:mysql', newName: 'generate' }), + legacyCommand({ name: 'generate:sqlite', newName: 'generate' }), + legacyCommand({ name: 'push:pg', newName: 'push' }), + legacyCommand({ name: 'push:mysql', newName: 'push' }), + legacyCommand({ name: 'push:sqlite', newName: 'push' }), + legacyCommand({ name: 'introspect:pg', newName: 'introspect' }), + legacyCommand({ name: 'introspect:mysql', newName: 'introspect' }), + legacyCommand({ name: 'introspect:sqlite', newName: 'introspect' }), + legacyCommand({ name: 'up:pg', newName: 'up' }), + legacyCommand({ name: 'up:mysql', newName: 'up' }), + legacyCommand({ name: 'up:sqlite', newName: 'up' }), + legacyCommand({ name: 'check:pg', newName: 'check' }), + legacyCommand({ name: 'check:mysql', newName: 'check' }), + legacyCommand({ name: 'check:sqlite', newName: 'check' }), + + // after folders v3 update + legacyCommand({ name: 'drop', customMessage: 'To drop a migration you can remove a migration folder manually' }), ]; -run([generate, migrate, pull, push, studio, up, check, drop, exportRaw, ...legacy], { +run([generate, migrate, pull, push, studio, up, check, exportRaw, ...legacy], { name: 'drizzle-kit', version: version, + hook: (event, command) => { if (event === 'after' && getCommandNameWithParents(command) !== 'studio') process.exit(0); }, @@ -53,12 +66,22 @@ run([generate, migrate, pull, push, studio, up, check, drop, exportRaw, ...legac if (event.type === 'error') { if (event.violation !== 'unknown_error') return false; - const reason = event.error; + const e = event.error; + if (e instanceof QueryError) { + let msg = `┌── ${chalk.bgRed.bold('query error:')} ${chalk.red(e.message)}\n\n`; + msg += `${highlightSQL(e.sql)}\n`; + if (e.params.length > 0) msg += '| ' + chalk.gray(`--- params: ${e.params || '[]'}\n\n`); + msg += '└──'; + console.log(); + console.log(msg); + return true; + } + if ( - !(typeof reason === 'object' && reason !== null && 'message' in reason && typeof reason.message === 'string') + !(typeof e === 'object' && e !== null && 'message' in e && typeof e.message === 'string') ) return false; - console.log(error(reason.message)); + console.log(error(e.message)); return true; } diff --git a/drizzle-kit/src/cli/prompts.ts b/drizzle-kit/src/cli/prompts.ts new file mode 100644 index 0000000000..35c742364b --- /dev/null +++ b/drizzle-kit/src/cli/prompts.ts @@ -0,0 +1,95 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import type { Resolver } from 'src/dialects/common'; +import type { RenamePropmtItem } from './views'; +import { isRenamePromptItem, ResolveSelect } from './views'; + +export const resolver = ( + entity: + | 'schema' + | 'enum' + | 'table' + | 'column' + | 'sequence' + | 'view' + | 'privilege' + | 'policy' + | 'role' + | 'check' + | 'index' + | 'unique' + | 'primary key' + | 'foreign key' + | 'default', + defaultSchema: 'public' | 'dbo' = 'public', +): Resolver => { + return async (it: { created: T[]; deleted: T[] }) => { + const { created, deleted } = it; + + if (created.length === 0 || deleted.length === 0) { + return { created, deleted, renamedOrMoved: [] }; + } + + const result: { + created: T[]; + deleted: T[]; + renamedOrMoved: { from: T; to: T }[]; + } = { created: [], deleted: [], renamedOrMoved: [] }; + let index = 0; + let leftMissing = [...deleted]; + do { + const newItem = created[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: newItem }; + }); + + const promptData: (RenamePropmtItem | T)[] = [newItem, ...renames]; + const { status, data } = await render(new ResolveSelect(newItem, promptData, entity, defaultSchema)); + + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + const to = data.to; + + const schemaFromPrefix = newItem.schema ? newItem.schema !== defaultSchema ? `${newItem.schema}.` : '' : ''; + + const tableFromPrefix = newItem.table ? `${newItem.table}.` : ''; + + const fromEntity = `${schemaFromPrefix}${tableFromPrefix}${newItem.name}`; + + const schemaToPrefix = to.schema ? to.schema !== defaultSchema ? `${to.schema}.` : '' : ''; + const tableToPrefix = to.table ? `${to.table}.` : ''; + const toEntity = `${schemaToPrefix}${tableToPrefix}${to.name}`; + + console.log( + `${chalk.yellow('~')} ${fromEntity} › ${toEntity} ${ + chalk.gray( + `${entity} will be renamed/moved`, + ) + }`, + ); + + result.renamedOrMoved.push(data); + + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${newItem.name} ${ + chalk.gray( + `${entity} will be created`, + ) + }`, + ); + result.created.push(newItem); + } + index += 1; + } while (index < created.length); + console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); + result.deleted.push(...leftMissing); + return result; + }; +}; diff --git a/drizzle-kit/src/cli/schema.ts b/drizzle-kit/src/cli/schema.ts index 8ad610caa2..b5865bcf6c 100644 --- a/drizzle-kit/src/cli/schema.ts +++ b/drizzle-kit/src/cli/schema.ts @@ -3,22 +3,20 @@ import chalk from 'chalk'; import 'dotenv/config'; import { mkdirSync } from 'fs'; import { renderWithTask } from 'hanji'; -import { dialects } from 'src/schemaValidator'; +import { dialects } from 'src/utils/schemaValidator'; import '../@types/utils'; import type { MigrationConfig, MigratorInitFailResponse } from 'drizzle-orm/migrator'; -import { assertUnreachable } from '../global'; -import { type Setup } from '../serializer/studio'; -import { assertV1OutFolder } from '../utils'; -import { certs } from '../utils/certs'; +import { assertUnreachable } from '../utils'; +import { assertV3OutFolder } from '../utils/utils-node'; import { checkHandler } from './commands/check'; -import { dropMigration } from './commands/drop'; -import { upMysqlHandler } from './commands/mysqlUp'; -import { upPgHandler } from './commands/pgUp'; -import { upSinglestoreHandler } from './commands/singlestoreUp'; -import { upSqliteHandler } from './commands/sqliteUp'; +import type { Setup } from './commands/studio'; +import { upCockroachHandler } from './commands/up-cockroach'; +import { upMysqlHandler } from './commands/up-mysql'; +import { upPgHandler } from './commands/up-postgres'; +import { upSinglestoreHandler } from './commands/up-singlestore'; +import { upSqliteHandler } from './commands/up-sqlite'; import { prepareCheckParams, - prepareDropParams, prepareExportConfig, prepareGenerateConfig, prepareMigrateConfig, @@ -34,7 +32,7 @@ import { error, grey, MigrateProgress } from './views'; const optionDialect = string('dialect') .enum(...dialects) .desc( - `Database dialect: 'gel', 'postgresql', 'mysql', 'sqlite', 'turso' or 'singlestore'`, + `Database dialect: 'gel', 'postgresql', 'mysql', 'sqlite', 'turso', 'singlestore' or 'mssql'`, ); const optionOut = string().desc("Output folder, 'drizzle' by default"); const optionConfig = string().desc('Path to drizzle config file'); @@ -79,29 +77,34 @@ export const generate = command({ await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); - // const parsed = cliConfigGenerate.parse(opts); - - const { - prepareAndMigratePg, - prepareAndMigrateMysql, - prepareAndMigrateSqlite, - prepareAndMigrateLibSQL, - prepareAndMigrateSingleStore, - } = await import('./commands/migrate'); + assertV3OutFolder(opts.out); const dialect = opts.dialect; + await checkHandler(opts.out, dialect); + if (dialect === 'postgresql') { - await prepareAndMigratePg(opts); + const { handle } = await import('./commands/generate-postgres'); + await handle(opts); } else if (dialect === 'mysql') { - await prepareAndMigrateMysql(opts); + const { handle } = await import('./commands/generate-mysql'); + await handle(opts); } else if (dialect === 'sqlite') { - await prepareAndMigrateSqlite(opts); + const { handle } = await import('./commands/generate-sqlite'); + await handle(opts); } else if (dialect === 'turso') { - await prepareAndMigrateLibSQL(opts); + const { handle } = await import('./commands/generate-libsql'); + await handle(opts); } else if (dialect === 'singlestore') { - await prepareAndMigrateSingleStore(opts); + const { handle } = await import('./commands/generate-singlestore'); + await handle(opts); } else if (dialect === 'gel') { throw new Error(`You can't use 'generate' command with Gel dialect`); + } else if (dialect === 'mssql') { + const { handle } = await import('./commands/generate-mssql'); + await handle(opts); + } else if (dialect === 'cockroach') { + const { handle } = await import('./commands/generate-cockroach'); + await handle(opts); } else { assertUnreachable(dialect); } @@ -120,8 +123,12 @@ export const migrate = command({ await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); + assertV3OutFolder(opts.out); + const { dialect, schema, table, out, credentials } = opts; + await checkHandler(out, dialect); + if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; @@ -197,6 +204,28 @@ export const migrate = command({ migrationsSchema: schema, }), ); + } else if (dialect === 'cockroach') { + const { prepareCockroach } = await import('./connections'); + const { migrate } = await prepareCockroach(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); + } else if (dialect === 'mssql') { + const { connectToMsSQL } = await import('./connections'); + const { migrate } = await connectToMsSQL(credentials); + await renderWithTask( + new MigrateProgress(), + migrate({ + migrationsFolder: out, + migrationsTable: table, + migrationsSchema: schema, + }), + ); } else if (dialect === 'gel') { throw new Error(`You can't use 'migrate' command with Gel dialect`); } else { @@ -247,12 +276,15 @@ export const push = command({ 'Auto-approve all data loss statements. Note: Data loss statements may truncate your tables and data', ) .default(false), + explain: boolean() + .desc('Print the planned SQL changes (dry run)') + .default(false), }, transform: async (opts) => { const from = assertCollisions( 'push', opts, - ['force', 'verbose', 'strict'], + ['force', 'verbose', 'strict', 'explain'], [ 'schema', 'dialect', @@ -279,99 +311,43 @@ export const push = command({ await assertPackages('drizzle-orm'); await assertOrmCoreVersion(); - const { - dialect, - schemaPath, - strict, - verbose, - credentials, - tablesFilter, - schemasFilter, - force, - casing, - entities, - } = config; + const { dialect, schemaPath, verbose, credentials, force, casing, filters, explain } = config; if (dialect === 'mysql') { - const { mysqlPush } = await import('./commands/push'); - await mysqlPush( - schemaPath, - credentials, - tablesFilter, - strict, - verbose, - force, - casing, - ); + const { handle } = await import('./commands/push-mysql'); + await handle(schemaPath, credentials, verbose, force, casing, filters, explain); } else if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; - if (driver === 'aws-data-api') { - if (!(await ormVersionGt('0.30.10'))) { - console.log( - "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", - ); - process.exit(1); - } - } else if (driver === 'pglite') { - if (!(await ormVersionGt('0.30.6'))) { - console.log( - "To use 'pglite' driver - please update drizzle-orm to the latest version", - ); - process.exit(1); - } - } else { - assertUnreachable(driver); + if (driver === 'aws-data-api' && !(await ormVersionGt('0.30.10'))) { + console.log("To use 'aws-data-api' driver - please update drizzle-orm to the latest version"); + process.exit(1); + } + if (driver === 'pglite' && !(await ormVersionGt('0.30.6'))) { + console.log("To use 'pglite' driver - please update drizzle-orm to the latest version"); + process.exit(1); } } - const { pgPush } = await import('./commands/push'); - await pgPush( - schemaPath, - verbose, - strict, - credentials, - tablesFilter, - schemasFilter, - entities, - force, - casing, - ); + const { handle } = await import('./commands/push-postgres'); + await handle(schemaPath, verbose, credentials, filters, force, casing, explain); } else if (dialect === 'sqlite') { - const { sqlitePush } = await import('./commands/push'); - await sqlitePush( - schemaPath, - verbose, - strict, - credentials, - tablesFilter, - force, - casing, - ); + const { handle: sqlitePush } = await import('./commands/push-sqlite'); + await sqlitePush(schemaPath, verbose, credentials, filters, force, casing, explain); } else if (dialect === 'turso') { - const { libSQLPush } = await import('./commands/push'); - await libSQLPush( - schemaPath, - verbose, - strict, - credentials, - tablesFilter, - force, - casing, - ); + const { handle: libSQLPush } = await import('./commands/push-libsql'); + await libSQLPush(schemaPath, verbose, credentials, filters, force, casing, explain); } else if (dialect === 'singlestore') { - const { singlestorePush } = await import('./commands/push'); - await singlestorePush( - schemaPath, - credentials, - tablesFilter, - strict, - verbose, - force, - casing, - ); + const { handle } = await import('./commands/push-singlestore'); + await handle(schemaPath, credentials, filters, verbose, force, casing); + } else if (dialect === 'cockroach') { + const { handle } = await import('./commands/push-cockroach'); + await handle(schemaPath, verbose, credentials, filters, force, casing); + } else if (dialect === 'mssql') { + const { handle } = await import('./commands/push-mssql'); + await handle(schemaPath, verbose, credentials, filters, force, casing); } else if (dialect === 'gel') { - throw new Error(`You can't use 'push' command with Gel dialect`); + console.log(error(`You can't use 'push' command with Gel dialect`)); } else { assertUnreachable(dialect); } @@ -392,8 +368,10 @@ export const check = command({ handler: async (config) => { await assertOrmCoreVersion(); + assertV3OutFolder(config.out); + const { out, dialect } = config; - checkHandler(out, dialect); + await checkHandler(out, dialect); console.log("Everything's fine 🐶🔥"); }, }); @@ -431,6 +409,10 @@ export const up = command({ upSinglestoreHandler(out); } + if (dialect === 'cockroach') { + upCockroachHandler(out); + } + if (dialect === 'gel') { throw new Error(`You can't use 'up' command with Gel dialect`); } @@ -438,8 +420,8 @@ export const up = command({ }); export const pull = command({ - name: 'introspect', - aliases: ['pull'], + name: 'pull', + aliases: ['introspect'], options: { config: optionConfig, dialect: optionDialect, @@ -454,7 +436,7 @@ export const pull = command({ const from = assertCollisions( 'introspect', opts, - [], + ['init'], [ 'dialect', 'driver', @@ -473,7 +455,6 @@ export const pull = command({ 'schemaFilters', 'extensionsFilters', 'tlsSecurity', - 'init', ], ); return preparePullConfig(opts, from); @@ -488,27 +469,14 @@ export const pull = command({ out, casing, breakpoints, - tablesFilter, - schemasFilter, prefix, - entities, + filters, init, migrationsSchema, migrationsTable, } = config; mkdirSync(out, { recursive: true }); - console.log( - grey( - `Pulling from [${ - schemasFilter - .map((it) => `'${it}'`) - .join(', ') - }] list of schemas`, - ), - ); - console.log(); - let migrate: ((config: MigrationConfig) => Promise) | undefined; if (dialect === 'postgresql') { if ('driver' in credentials) { @@ -536,102 +504,63 @@ export const pull = command({ const db = await preparePostgresDB(credentials); migrate = db.migrate; - const { introspectPostgres } = await import('./commands/introspect'); - await introspectPostgres( - casing, - out, - breakpoints, - credentials, - tablesFilter, - schemasFilter, - prefix, - entities, - db, - ); + const { handle: introspectPostgres } = await import('./commands/pull-postgres'); + await introspectPostgres(casing, out, breakpoints, credentials, filters, prefix, db); } else if (dialect === 'mysql') { const { connectToMySQL } = await import('./connections'); const db = await connectToMySQL(credentials); migrate = db.migrate; - const { introspectMysql } = await import('./commands/introspect'); - await introspectMysql( - casing, - out, - breakpoints, - credentials, - tablesFilter, - prefix, - db, - ); + const { handle: introspectMysql } = await import('./commands/pull-mysql'); + await introspectMysql(casing, out, breakpoints, credentials, filters, prefix, db); } else if (dialect === 'sqlite') { const { connectToSQLite } = await import('./connections'); const db = await connectToSQLite(credentials); migrate = db.migrate; - const { introspectSqlite } = await import('./commands/introspect'); - await introspectSqlite( - casing, - out, - breakpoints, - credentials, - tablesFilter, - prefix, - db, - ); + const { handle } = await import('./commands/pull-sqlite'); + await handle(casing, out, breakpoints, credentials, filters, prefix, 'sqlite', db); } else if (dialect === 'turso') { const { connectToLibSQL } = await import('./connections'); const db = await connectToLibSQL(credentials); migrate = db.migrate; - const { introspectLibSQL } = await import('./commands/introspect'); - await introspectLibSQL( - casing, - out, - breakpoints, - credentials, - tablesFilter, - prefix, - db, - ); + const { handle } = await import('./commands/pull-libsql'); + await handle(casing, out, breakpoints, credentials, filters, prefix, 'libsql', db); } else if (dialect === 'singlestore') { const { connectToSingleStore } = await import('./connections'); const db = await connectToSingleStore(credentials); migrate = db.migrate; - const { introspectSingleStore } = await import('./commands/introspect'); - await introspectSingleStore( - casing, - out, - breakpoints, - credentials, - tablesFilter, - prefix, - db, - ); + const { handle } = await import('./commands/pull-singlestore'); + await handle(casing, out, breakpoints, credentials, filters, prefix, db); } else if (dialect === 'gel') { const { prepareGelDB } = await import('./connections'); const db = await prepareGelDB(credentials); - if (init) throw new Error(`You can't use "--init" flag with Gel`); - // migrate = db.migrate; - not supported for Gel + // migrate = db.migrate; - const { introspectGel } = await import('./commands/introspect'); - await introspectGel( - casing, - out, - breakpoints, - credentials, - tablesFilter, - schemasFilter, - prefix, - entities, - db, - ); + const { handle } = await import('./commands/pull-gel'); + await handle(casing, out, breakpoints, credentials, filters, prefix, db); + } else if (dialect === 'mssql') { + const { connectToMsSQL } = await import('./connections'); + const db = await connectToMsSQL(credentials); + migrate = db.migrate; + + const { handle } = await import('./commands/pull-mssql'); + await handle(casing, out, breakpoints, credentials, filters, prefix, db); + } else if (dialect === 'cockroach') { + const { prepareCockroach } = await import('./connections'); + const db = await prepareCockroach(credentials); + migrate = db.migrate; + + const { handle } = await import('./commands/pull-cockroach'); + await handle(casing, out, breakpoints, credentials, filters, prefix, db); } else { assertUnreachable(dialect); } if (init) { - if (!migrate) throw new Error(`--init can't be used with ${dialect}`); + if (!migrate) throw new Error(`--init can't be used with '${dialect}' dialect`); console.log(); console.log(grey('Applying migration metadata to the database')); @@ -641,7 +570,7 @@ export const pull = command({ migrationsTable, migrationsSchema, // Internal param - won't be displayed in types. Do not remove. - init, + init: true, }; const error = await migrate(migrateInput); @@ -658,25 +587,6 @@ export const pull = command({ }, }); -export const drop = command({ - name: 'drop', - options: { - config: optionConfig, - out: optionOut, - driver: optionDriver, - }, - transform: async (opts) => { - const from = assertCollisions('check', opts, [], ['driver', 'out']); - return prepareDropParams(opts, from); - }, - handler: async (config) => { - await assertOrmCoreVersion(); - - assertV1OutFolder(config.out); - await dropMigration(config); - }, -}); - export const studio = command({ name: 'studio', options: { @@ -712,7 +622,8 @@ export const studio = command({ prepareSingleStoreSchema, drizzleForSingleStore, drizzleForLibSQL, - } = await import('../serializer/studio'); + // drizzleForMsSQL, + } = await import('./commands/studio'); let setup: Setup; @@ -768,14 +679,13 @@ export const studio = command({ files, casing, ); - } else if (dialect === 'gel') { - throw new Error(`You can't use 'studio' command with Gel dialect`); + } else if (dialect === 'cockroach') { + throw new Error(`You can't use 'studio' command with 'cockroach' dialect`); } else { assertUnreachable(dialect); } - const { prepareServer } = await import('../serializer/studio'); - + const { prepareServer } = await import('./commands/studio'); const server = await prepareServer(setup); console.log(); @@ -785,13 +695,14 @@ export const studio = command({ ), ); + const { certs } = await import('../utils/certs'); const { key, cert } = (await certs()) || {}; server.start({ host, port, key, cert, - cb: (err, address) => { + cb: (err, _address) => { if (err) { console.error(err); } else { @@ -840,29 +751,30 @@ export const exportRaw = command({ await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); - const { - prepareAndExportPg, - prepareAndExportMysql, - prepareAndExportSqlite, - prepareAndExportLibSQL, - prepareAndExportSinglestore, - } = await import( - './commands/migrate' - ); - const dialect = opts.dialect; if (dialect === 'postgresql') { - await prepareAndExportPg(opts); + const { handleExport } = await import('./commands/generate-postgres'); + await handleExport(opts); } else if (dialect === 'mysql') { - await prepareAndExportMysql(opts); + const { handleExport } = await import('./commands/generate-mysql'); + await handleExport(opts); } else if (dialect === 'sqlite') { - await prepareAndExportSqlite(opts); + const { handleExport } = await import('./commands/generate-sqlite'); + await handleExport(opts); } else if (dialect === 'turso') { - await prepareAndExportLibSQL(opts); + const { handleExport } = await import('./commands/generate-libsql'); + await handleExport(opts); } else if (dialect === 'singlestore') { - await prepareAndExportSinglestore(opts); + const { handleExport } = await import('./commands/generate-singlestore'); + await handleExport(opts); } else if (dialect === 'gel') { throw new Error(`You can't use 'export' command with Gel dialect`); + } else if (dialect === 'mssql') { + const { handleExport } = await import('./commands/generate-mssql'); + await handleExport(opts); + } else if (dialect === 'cockroach') { + const { handleExport } = await import('./commands/generate-cockroach'); + await handleExport(opts); } else { assertUnreachable(dialect); } diff --git a/drizzle-kit/src/cli/selector-ui.ts b/drizzle-kit/src/cli/selector-ui.ts index f384831d0e..815bc6ec97 100644 --- a/drizzle-kit/src/cli/selector-ui.ts +++ b/drizzle-kit/src/cli/selector-ui.ts @@ -23,7 +23,7 @@ export class Select extends Prompt<{ index: number; value: string }> { text += idx === this.data.selectedIdx ? `${chalk.green('❯ ' + it.label)}` : ` ${it.label}`; - text += idx != this.data.items.length - 1 ? '\n' : ''; + text += idx !== this.data.items.length - 1 ? '\n' : ''; }); return text; diff --git a/drizzle-kit/src/cli/utils.ts b/drizzle-kit/src/cli/utils.ts index cadaf79416..2ce67d76f9 100644 --- a/drizzle-kit/src/cli/utils.ts +++ b/drizzle-kit/src/cli/utils.ts @@ -1,5 +1,5 @@ import semver from 'semver'; -import { err, warning } from './views'; +import { err } from './views'; export const assertExists = (it?: any) => { if (!it) throw new Error(); @@ -24,7 +24,7 @@ export const checkPackage = async (it: string) => { try { await import(it); return true; - } catch (e) { + } catch { return false; } }; @@ -35,7 +35,7 @@ export const assertPackages = async (...pkgs: string[]) => { const it = pkgs[i]; await import(it); } - } catch (e) { + } catch { err( `please install required packages: ${ pkgs @@ -57,7 +57,7 @@ export const assertEitherPackage = async ( const it = pkgs[i]; await import(it); availables.push(it); - } catch (e) {} + } catch {} } if (availables.length > 0) { @@ -74,7 +74,7 @@ export const assertEitherPackage = async ( process.exit(1); }; -const requiredApiVersion = 11; +const requiredApiVersion = 12; export const assertOrmCoreVersion = async () => { try { const { compatibilityVersion } = await import('drizzle-orm/version'); @@ -94,7 +94,7 @@ export const assertOrmCoreVersion = async () => { 'This version of drizzle-kit is outdated\nPlease update drizzle-kit package to the latest version 👍', ); } - } catch (e) { + } catch { console.log('Please install latest version of drizzle-orm'); } process.exit(1); @@ -106,7 +106,13 @@ export const ormCoreVersions = async () => { 'drizzle-orm/version' ); return { compatibilityVersion, npmVersion }; - } catch (e) { + } catch { return {}; } }; + +export class QueryError extends Error { + constructor(wrapped: Error, public readonly sql: string, public readonly params: any[]) { + super(wrapped.message, { cause: wrapped }); + } +} diff --git a/drizzle-kit/src/cli/validations/cli.ts b/drizzle-kit/src/cli/validations/cli.ts index d1e49c9ab3..4602c86975 100644 --- a/drizzle-kit/src/cli/validations/cli.ts +++ b/drizzle-kit/src/cli/validations/cli.ts @@ -1,31 +1,13 @@ -import { array, boolean, intersection, literal, object, string, TypeOf, union } from 'zod'; -import { dialect } from '../../schemaValidator'; -import { casing, casingType, configMigrations, prefix } from './common'; +import type { TypeOf } from 'zod'; +import { boolean, intersection, literal, object, string, union } from 'zod'; +import { dialect } from '../../utils/schemaValidator'; +import { casing, casingType, configMigrations } from './common'; -export const cliConfigGenerate = object({ - dialect: dialect.optional(), - schema: union([string(), string().array()]).optional(), - out: string().optional().default('./drizzle'), - config: string().optional(), - name: string().optional(), - prefix: prefix.optional(), - breakpoints: boolean().optional().default(true), - custom: boolean().optional().default(false), -}).strict(); - -export type CliConfigGenerate = TypeOf; - -export const pushParams = object({ - dialect: dialect, - casing: casingType.optional(), - schema: union([string(), string().array()]), +export const entitiesParams = { tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]) - .optional() - .default(['public']), + .optional(), extensionsFilters: literal('postgis').array().optional(), - verbose: boolean().optional(), - strict: boolean().optional(), entities: object({ roles: boolean().or(object({ provider: string().optional(), @@ -33,6 +15,16 @@ export const pushParams = object({ exclude: string().array().optional(), })).optional().default(false), }).optional(), +}; + +export const pushParams = object({ + dialect: dialect, + casing: casingType.optional(), + schema: union([string(), string().array()]), + verbose: boolean().optional(), + strict: boolean().optional(), + explain: boolean().optional(), + ...entitiesParams, }).passthrough(); export type PushParams = TypeOf; @@ -41,26 +33,23 @@ export const pullParams = object({ config: string().optional(), dialect: dialect, out: string().optional().default('drizzle'), - tablesFilter: union([string(), string().array()]).optional(), - schemaFilter: union([string(), string().array()]) - .optional() - .default(['public']), - extensionsFilters: literal('postgis').array().optional(), casing, breakpoints: boolean().optional().default(true), migrations: configMigrations, - entities: object({ - roles: boolean().or(object({ - provider: string().optional(), - include: string().array().optional(), - exclude: string().array().optional(), - })).optional().default(false), - }).optional(), + ...entitiesParams, }).passthrough(); -export type Entities = TypeOf['entities']; +export type EntitiesFilter = TypeOf; +export type TablesFilter = TypeOf; +export type SchemasFilter = TypeOf; +export type ExtensionsFilter = TypeOf; -export type PullParams = TypeOf; +export type EntitiesFilterConfig = { + schemas: SchemasFilter; + tables: TablesFilter; + entities: EntitiesFilter; + extensions: ExtensionsFilter; +}; export const configCheck = object({ dialect: dialect.optional(), diff --git a/drizzle-kit/src/cli/validations/cockroach.ts b/drizzle-kit/src/cli/validations/cockroach.ts new file mode 100644 index 0000000000..52840e423d --- /dev/null +++ b/drizzle-kit/src/cli/validations/cockroach.ts @@ -0,0 +1,58 @@ +import type { TypeOf } from 'zod'; +import { boolean, coerce, literal, object, string, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; + +export const cockroachCredentials = union([ + object({ + host: string().min(1), + port: coerce.number().min(1).optional(), + user: string().min(1).optional(), + password: string().min(1).optional(), + database: string().min(1), + // TODO update ssl params + ssl: union([ + literal('require'), + literal('allow'), + literal('prefer'), + literal('verify-full'), + boolean(), + object({}).passthrough(), + ]).optional(), + }), + object({ + url: string().min(1), + }), +]); + +export type CockroachCredentials = TypeOf; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if ('url' in options) { + let text = `Please provide required params for Cockroach dialect:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + if ('host' in options || 'database' in options) { + let text = `Please provide required params for Cockroach dialect:\n`; + console.log(error(text)); + console.log(wrapParam('host', options.host)); + console.log(wrapParam('port', options.port, true)); + console.log(wrapParam('user', options.user, true)); + console.log(wrapParam('password', options.password, true, 'secret')); + console.log(wrapParam('database', options.database)); + console.log(wrapParam('ssl', options.ssl, true)); + process.exit(1); + } + + console.log( + error( + `Either connection "url" or "host", "database", "user", "server" are required for Cockroach connection`, + ), + ); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/common.ts b/drizzle-kit/src/cli/validations/common.ts index 14d4fc6b34..5f3aac40fe 100644 --- a/drizzle-kit/src/cli/validations/common.ts +++ b/drizzle-kit/src/cli/validations/common.ts @@ -1,7 +1,8 @@ import chalk from 'chalk'; -import { UnionToIntersection } from 'hono/utils/types'; -import { any, boolean, enum as enum_, literal, object, string, TypeOf, union } from 'zod'; -import { dialect } from '../../schemaValidator'; +import type { UnionToIntersection } from 'hono/utils/types'; +import type { TypeOf } from 'zod'; +import { any, boolean, enum as enum_, literal, object, string, union } from 'zod'; +import { dialect } from '../../utils/schemaValidator'; import { outputs } from './outputs'; export type Commands = @@ -13,7 +14,7 @@ export type Commands = | 'push' | 'export'; -type Expand = T extends infer O ? { [K in keyof O]: O[K] } : never; +// type Expand = T extends infer O ? { [K in keyof O]: O[K] } : never; type IsUnion = [T] extends [UnionToIntersection] ? false : true; type LastTupleElement = TArr extends [ ...start: infer _, @@ -37,9 +38,9 @@ export const assertCollisions = < command: Commands, options: T, whitelist: Exclude, - remainingKeys: UniqueArrayOfUnion, + _remainingKeys: UniqueArrayOfUnion, ): IsUnion> extends false ? 'cli' | 'config' : TKeys => { - const { config, init, ...rest } = options; + const { config, ...rest } = options; let atLeastOneParam = false; for (const key of Object.keys(rest)) { @@ -109,7 +110,7 @@ export const configCommonSchema = object({ verbose: boolean().optional().default(false), driver: driver.optional(), tablesFilter: union([string(), string().array()]).optional(), - schemaFilter: union([string(), string().array()]).default(['public']), + schemaFilter: union([string(), string().array()]).optional(), migrations: configMigrations, dbCredentials: any().optional(), casing: casingType.optional(), diff --git a/drizzle-kit/src/cli/validations/gel.ts b/drizzle-kit/src/cli/validations/gel.ts index cf6d38614d..626dd8a214 100644 --- a/drizzle-kit/src/cli/validations/gel.ts +++ b/drizzle-kit/src/cli/validations/gel.ts @@ -1,4 +1,5 @@ -import { coerce, literal, object, string, TypeOf, undefined as undefinedType, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { coerce, literal, object, string, undefined as undefinedType, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; @@ -42,9 +43,7 @@ export const gelCredentials = union([ }), object({ driver: undefinedType(), - }).transform((o) => { - return undefined; - }), + }).transform((): undefined => {}), ]); export type GelCredentials = TypeOf; diff --git a/drizzle-kit/src/cli/validations/libsql.ts b/drizzle-kit/src/cli/validations/libsql.ts index a9b03c1687..9171934d82 100644 --- a/drizzle-kit/src/cli/validations/libsql.ts +++ b/drizzle-kit/src/cli/validations/libsql.ts @@ -1,5 +1,5 @@ -import { softAssertUnreachable } from 'src/global'; -import { object, string, TypeOf } from 'zod'; +import type { TypeOf } from 'zod'; +import { object, string } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; @@ -17,7 +17,7 @@ const _: LibSQLCredentials = {} as TypeOf; export const printConfigConnectionIssues = ( options: Record, - command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio', + _command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio', ) => { let text = `Please provide required params for 'turso' dialect:\n`; console.log(error(text)); diff --git a/drizzle-kit/src/cli/validations/mssql.ts b/drizzle-kit/src/cli/validations/mssql.ts new file mode 100644 index 0000000000..371069f163 --- /dev/null +++ b/drizzle-kit/src/cli/validations/mssql.ts @@ -0,0 +1,53 @@ +import type { TypeOf } from 'zod'; +import { boolean, coerce, object, string, union } from 'zod'; +import { error } from '../views'; +import { wrapParam } from './common'; +import { outputs } from './outputs'; + +export const mssqlCredentials = union([ + object({ + port: coerce.number().min(1), + user: string().min(1), + password: string().min(1), + database: string().min(1), + server: string().min(1), + + options: object({ + encrypt: boolean().optional(), + trustServerCertificate: boolean().optional(), + }).optional(), + }), + object({ + url: string().min(1), + }), +]); + +export type MssqlCredentials = TypeOf; + +export const printCliConnectionIssues = (options: any) => { + const { uri, host, database } = options || {}; + + if (!uri && (!host || !database)) { + console.log(outputs.mssql.connection.required()); + } +}; + +export const printConfigConnectionIssues = ( + options: Record, +) => { + if ('url' in options) { + let text = `Please provide required params for MsSQL driver:\n`; + console.log(error(text)); + console.log(wrapParam('url', options.url, false, 'url')); + process.exit(1); + } + + let text = `Please provide required params for MySQL driver:\n`; + console.log(error(text)); + console.log(wrapParam('server', options.server)); + console.log(wrapParam('port', options.port)); + console.log(wrapParam('user', options.user)); + console.log(wrapParam('password', options.password, false, 'secret')); + console.log(wrapParam('database', options.database)); + process.exit(1); +}; diff --git a/drizzle-kit/src/cli/validations/mysql.ts b/drizzle-kit/src/cli/validations/mysql.ts index 1841dbdd6f..53c30ce5aa 100644 --- a/drizzle-kit/src/cli/validations/mysql.ts +++ b/drizzle-kit/src/cli/validations/mysql.ts @@ -1,4 +1,5 @@ -import { boolean, coerce, object, string, TypeOf, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { boolean, coerce, object, string, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; import { outputs } from './outputs'; diff --git a/drizzle-kit/src/cli/validations/outputs.ts b/drizzle-kit/src/cli/validations/outputs.ts index 6e9d520dd6..896ed4bf88 100644 --- a/drizzle-kit/src/cli/validations/outputs.ts +++ b/drizzle-kit/src/cli/validations/outputs.ts @@ -57,6 +57,15 @@ export const outputs = { ), }, }, + mssql: { + connection: { + driver: () => withStyle.error(`Only "mssql" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, sqlite: { connection: { driver: () => { diff --git a/drizzle-kit/src/cli/validations/postgres.ts b/drizzle-kit/src/cli/validations/postgres.ts index 658760c614..567bd64847 100644 --- a/drizzle-kit/src/cli/validations/postgres.ts +++ b/drizzle-kit/src/cli/validations/postgres.ts @@ -1,10 +1,11 @@ -import { boolean, coerce, literal, object, string, TypeOf, undefined, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { boolean, coerce, literal, object, string, undefined as zUndefined, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; export const postgresCredentials = union([ object({ - driver: undefined(), + driver: zUndefined(), host: string().min(1), port: coerce.number().min(1).optional(), user: string().min(1).optional(), @@ -23,7 +24,7 @@ export const postgresCredentials = union([ return o as Omit; }), object({ - driver: undefined(), + driver: zUndefined(), url: string().min(1), }).transform<{ url: string }>((o) => { delete o.driver; diff --git a/drizzle-kit/src/cli/validations/singlestore.ts b/drizzle-kit/src/cli/validations/singlestore.ts index ebe0cc5f01..c486d6187e 100644 --- a/drizzle-kit/src/cli/validations/singlestore.ts +++ b/drizzle-kit/src/cli/validations/singlestore.ts @@ -1,4 +1,5 @@ -import { boolean, coerce, object, string, TypeOf, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { boolean, coerce, object, string, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; import { outputs } from './outputs'; diff --git a/drizzle-kit/src/cli/validations/sqlite.ts b/drizzle-kit/src/cli/validations/sqlite.ts index a2114b05a5..25a78f8b7d 100644 --- a/drizzle-kit/src/cli/validations/sqlite.ts +++ b/drizzle-kit/src/cli/validations/sqlite.ts @@ -1,5 +1,6 @@ -import { softAssertUnreachable } from 'src/global'; -import { literal, object, string, TypeOf, undefined, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { literal, object, string, undefined as zUndefined, union } from 'zod'; +import { softAssertUnreachable } from '../../utils'; import { error } from '../views'; import { sqliteDriver, wrapParam } from './common'; @@ -20,7 +21,7 @@ export const sqliteCredentials = union([ url: string().min(1), }), object({ - driver: undefined(), + driver: zUndefined(), url: string().min(1), }).transform<{ url: string }>((o) => { delete o.driver; diff --git a/drizzle-kit/src/cli/validations/studio.ts b/drizzle-kit/src/cli/validations/studio.ts index 1c8d170b85..ca96485908 100644 --- a/drizzle-kit/src/cli/validations/studio.ts +++ b/drizzle-kit/src/cli/validations/studio.ts @@ -1,5 +1,6 @@ -import { coerce, intersection, object, string, TypeOf, union } from 'zod'; -import { dialect } from '../../schemaValidator'; +import type { TypeOf } from 'zod'; +import { coerce, intersection, object, string, union } from 'zod'; +import { dialect } from '../../utils/schemaValidator'; import { casingType } from './common'; import { mysqlCredentials } from './mysql'; import { postgresCredentials } from './postgres'; diff --git a/drizzle-kit/src/cli/views.ts b/drizzle-kit/src/cli/views.ts index 9106d31cd8..a4e12f1e04 100644 --- a/drizzle-kit/src/cli/views.ts +++ b/drizzle-kit/src/cli/views.ts @@ -1,12 +1,28 @@ import chalk from 'chalk'; import { Prompt, render, SelectState, TaskView } from 'hanji'; -import type { CommonSchema } from '../schemaValidator'; -import { objectValues } from '../utils'; -import type { Named, NamedWithSchema } from './commands/migrate'; +import type { SchemaError as MssqlSchemaError } from 'src/dialects/mssql/ddl'; +import type { SchemaError as MysqlSchemaError } from 'src/dialects/mysql/ddl'; +import type { + SchemaError as PostgresSchemaError, + SchemaWarning as PostgresSchemaWarning, + View, +} from 'src/dialects/postgres/ddl'; +import type { JsonStatement as StatementCockraoch } from '../dialects/cockroach/statements'; +import type { JsonStatement as StatementMssql } from '../dialects/mssql/statements'; +import type { JsonStatement as StatementMysql } from '../dialects/mysql/statements'; +import { vectorOps } from '../dialects/postgres/grammar'; +import type { JsonStatement as StatementPostgres } from '../dialects/postgres/statements'; +import type { SchemaError as SqliteSchemaError } from '../dialects/sqlite/ddl'; +import type { JsonStatement as StatementSqlite } from '../dialects/sqlite/statements'; +import type { Named, NamedWithSchema } from '../dialects/utils'; +import { assertUnreachable } from '../utils'; +import { highlightSQL } from './highlighter'; +import { withStyle } from './validations/outputs'; export const warning = (msg: string) => { render(`[${chalk.yellow('Warning')}] ${msg}`); }; + export const err = (msg: string) => { render(`${chalk.bold.red('Error')} ${msg}`); }; @@ -22,57 +38,974 @@ export const error = (error: string, greyMsg: string = ''): string => { return `${chalk.bgRed.bold(' Error ')} ${error} ${greyMsg ? chalk.grey(greyMsg) : ''}`.trim(); }; -export const schema = (schema: CommonSchema): string => { - type TableEntry = (typeof schema)['tables'][keyof (typeof schema)['tables']]; - const tables = Object.values(schema.tables) as unknown as TableEntry[]; - - let msg = chalk.bold(`${tables.length} tables\n`); - - msg += tables - .map((t) => { - const columnsCount = Object.values(t.columns).length; - const indexesCount = Object.values(t.indexes).length; - let foreignKeys: number = 0; - // Singlestore doesn't have foreign keys - if (schema.dialect !== 'singlestore') { - // @ts-expect-error - foreignKeys = Object.values(t.foreignKeys).length; +export const postgresSchemaWarning = (warning: PostgresSchemaWarning): string => { + if (warning.type === 'policy_not_linked') { + return withStyle.errorWarning( + `"Policy ${warning.policy} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, + ); + } + + assertUnreachable(warning.type); +}; + +export const sqliteSchemaError = (error: SqliteSchemaError): string => { + if (error.type === 'conflict_table') { + return `'${error.table}' table name is a duplicate`; + } + + if (error.type === 'conflict_check') { + return `'${error.name}' check constraint name is a duplicate`; + } + + if (error.type === 'conflict_unique') { + return `'${error.name}' unique constraint name is a duplicate`; + } + + if (error.type === 'conflict_view') { + return `'${error.view}' view name is a duplicate`; + } + + // assertUnreachable(error.type) + return ''; +}; + +function formatOptionChanges( + oldState: Record | null, + newState: Record | null, +): string { + if (oldState === null && newState) { + const keys = Object.keys(newState) as Array; + return keys + .map((key) => `${key}: null -> ${key}: ${String(newState[key])}`) + .join('\n'); + } + + if (newState === null && oldState) { + const keys = Object.keys(oldState) as Array; + return keys + .map((key) => `${key}: ${String(oldState[key])} -> ${key}: null`) + .join('\n'); + } + + if (oldState && newState) { + const keys = Object.keys(newState) as Array; + return keys + .filter((key) => oldState[key] !== newState[key]) + .map((key) => `${key}: ${String(oldState[key])} -> ${key}: ${String(newState[key])}`) + .join('\n'); + } + + return ''; +} + +export const explain = ( + dialect: 'postgres' | 'mysql' | 'sqlite' | 'singlestore' | 'mssql' | 'common' | 'gel' | 'cockroach', + grouped: { jsonStatement: StatementPostgres | StatementSqlite | StatementMysql; sqlStatements: string[] }[], + explain: boolean, + hints: { hint: string; statement?: string }[], +) => { + const res = []; + const explains = []; + for (const { jsonStatement, sqlStatements } of grouped) { + const res = dialect === 'postgres' + ? psqlExplain(jsonStatement as StatementPostgres) + : dialect === 'sqlite' + ? sqliteExplain(jsonStatement as StatementSqlite) + : dialect === 'mysql' + ? mysqlExplain(jsonStatement as StatementMysql) + : null; + + if (res) { + let msg = `┌─── ${res.title}\n`; + msg += res.cause; + msg += `├───\n`; + for (const sql of sqlStatements) { + msg += `│ ${highlightSQL(sql)}\n`; } + msg += `└───\n`; + explains.push(msg); + } else if (explain) { + explains.push(...sqlStatements.map((x) => highlightSQL(x))); + } + } - return `${chalk.bold.blue(t.name)} ${ - chalk.gray( - `${columnsCount} columns ${indexesCount} indexes ${foreignKeys} fks`, - ) - }`; - }) - .join('\n'); - - msg += '\n'; - - const enums = objectValues( - 'enums' in schema - ? 'values' in schema['enums'] - ? schema['enums'] - : {} - : {}, - ); - - if (enums.length > 0) { - msg += '\n'; - msg += chalk.bold(`${enums.length} enums\n`); - - msg += enums - .map((it) => { - return `${chalk.bold.blue(it.name)} ${ - chalk.gray( - `[${Object.values(it.values).join(', ')}]`, + if (explains.length > 0) { + res.push('\n'); + if (explain) res.push(chalk.gray(`--- Generated migration statements ---\n`)); + res.push(explains.join('\n')); + } + + if (hints.length > 0) { + res.push('\n\n'); + res.push(withStyle.warning(`There're potential data loss statements:\n`)); + + for (const h of hints) { + res.push(h.hint); + res.push('\n'); + if (h.statement) res.push(highlightSQL(h.statement), '\n'); + } + } + return res.join(''); +}; + +export const psqlExplain = (st: StatementPostgres) => { + let title = ''; + let cause = ''; + + if (st.type === 'alter_column') { + const r = st.to; + const d = st.diff; + + const key = `${r.schema}.${r.table}.${r.name}`; + title += `${key} column changed:`; + if (d.default) cause += `│ default: ${d.default.from} -> ${d.default.to}\n`; + if (d.type) cause += `│ type: ${d.type.from} -> ${d.type.to}\n`; + if (d.notNull) cause += `│ notNull: ${d.notNull.from} -> ${d.notNull.to}\n`; + if (d.dimensions) cause += `│ dimensions: ${d.dimensions.from} -> ${d.dimensions.to}\n`; + + // TODO check manually + if (d.identity) cause += `│ identity: ${formatOptionChanges(d.identity.from, d.identity.to)}\n`; + } + + if (st.type === 'recreate_column') { + const { diff: d } = st; + + const key = `${d.$right.schema}.${d.$right.table}.${d.$right.name}`; + title += `${key} column recreated:`; + if (d.generated) { + const from = d.generated.from ? `${d.generated.from.as} ${d.generated.from.type}` : 'null'; + const to = d.generated.to ? `${d.generated.to.as} ${d.generated.to.type}` : 'null'; + cause += `│ generated: ${from} -> ${to}\n`; + } + } + + if (st.type === 'recreate_index') { + const diff = st.diff; + const idx = diff.$right; + const key = `${idx.schema}.${idx.table}.${idx.name}`; + title += `${key} index changed:`; + if (diff.isUnique) cause += `│ unique: ${diff.isUnique.from} -> ${diff.isUnique.to}\n`; + if (diff.where) cause += `│ where: ${diff.where.from} -> ${diff.where.to}\n`; + if (diff.method) cause += `│ where: ${diff.method.from} -> ${diff.method.to}\n`; + } + + if (st.type === 'recreate_fk') { + const { fk, diff } = st; + const key = `${fk.schema}.${fk.table}.${fk.name}`; + title += `${key} index changed:`; + if (diff.onUpdate) cause += `│ where: ${diff.onUpdate.from} -> ${diff.onUpdate.to}\n`; + if (diff.onDelete) cause += `│ onDelete: ${diff.onDelete.from} -> ${diff.onDelete.to}\n`; + } + + if (st.type === 'recreate_enum') { + const { to, from } = st; + title = `${to.schema}.${to.name} enum changed:`; + cause += `│ values shuffled/removed: [${from.values.join(',')}] -> [${to.values.join(',')}]\n`; + } + + if (st.type === 'alter_enum') { + const r = st.to; + const l = st.from; + const d = st.diff; + + title = `${r.schema}.${r.name} enum changed:`; + cause += `│ changes: [${r.values.join(',')}] -> [${l.values.join(',')}]\n`; + cause += `│ values added: ${d.filter((it) => it.type === 'added').map((it) => it.value).join(',')}\n`; + } + + if (st.type === 'alter_role') { + const d = st.diff; + const to = st.role; + + const key = `${to.name}`; + title = `${key} role changed:`; + if (d.bypassRls) cause += `│ bypassRls: ${d.bypassRls.from} -> ${d.bypassRls.to}\n`; + if (d.canLogin) cause += `│ canLogin: ${d.canLogin.from} -> ${d.canLogin.to}\n`; + if (d.connLimit) cause += `│ connLimit: ${d.connLimit.from} -> ${d.connLimit.to}\n`; + if (d.createDb) cause += `│ createDb: ${d.createDb.from} -> ${d.createDb.to}\n`; + if (d.createRole) cause += `│ createRole: ${d.createRole.from} -> ${d.createRole.to}\n`; + if (d.inherit) cause += `│ inherit: ${d.inherit.from} -> ${d.inherit.to}\n`; + if (d.password) cause += `│ password: ${d.password.from} -> ${d.password.to}\n`; + if (d.replication) cause += `│ replication: ${d.replication.from} -> ${d.replication.to}\n`; + if (d.superuser) cause += `│ superuser: ${d.superuser.from} -> ${d.superuser.to}\n`; + if (d.validUntil) cause += `│ validUntil: ${d.validUntil.from} -> ${d.validUntil.to}\n`; + } + + if (st.type === 'alter_sequence') { + const d = st.diff; + const to = st.sequence; + + const key = `${to.schema}.${to.name}`; + title = `${key} sequence changed:`; + if (d.cacheSize) cause += `│ cacheSize: ${d.cacheSize.from} -> ${d.cacheSize.to}\n`; + if (d.cycle) cause += `│ cycle: ${d.cycle.from} -> ${d.cycle.to}\n`; + if (d.incrementBy) cause += `│ incrementBy: ${d.incrementBy.from} -> ${d.incrementBy.to}\n`; + if (d.maxValue) cause += `│ maxValue: ${d.maxValue.from} -> ${d.maxValue.to}\n`; + if (d.minValue) cause += `│ minValue: ${d.minValue.from} -> ${d.minValue.to}\n`; + if (d.startWith) cause += `│ startWith: ${d.startWith.from} -> ${d.startWith.to}\n`; + } + + if (st.type === 'alter_rls') { + const key = `${st.schema}.${st.name}`; + title = `${key} rls changed:\n`; + cause += `│ rlsEnabled: ${!st.isRlsEnabled} -> ${st.isRlsEnabled}\n`; + } + + if (st.type === 'alter_policy' || st.type === 'recreate_policy') { + const d = st.diff; + const to = st.policy; + + const key = `${to.schema}.${to.table}.${to.name}`; + title = `${key} policy changed:`; + if (d.as) cause += `│ as: ${d.as.from} -> ${d.as.to}\n`; + if (d.for) cause += `│ for: ${d.for.from} -> ${d.for.to}\n`; + if (d.roles) cause += `│ roles: [${d.roles.from.join(',')}] -> [${d.roles.to.join(',')}]\n`; + if (d.using) cause += `│ using: ${d.using.from} -> ${d.using.to}\n`; + if (d.withCheck) cause += `│ withCheck: ${d.withCheck.from} -> ${d.withCheck.to}\n`; + } + + if (st.type === 'alter_unique') { + const d = st.diff; + const to = d.$right; + + const key = `${to.schema}.${to.table}.${to.name}`; + title = `${key} unique changed:`; + if (d.nullsNotDistinct) cause += `│ nullsNotDistinct: ${d.nullsNotDistinct.from} -> ${d.nullsNotDistinct.to}\n`; + if (d.columns) cause += `│ columns: [${d.columns.from.join(',')}] -> [${d.columns.to.join(',')}]\n`; + } + + if (st.type === 'alter_check') { + const d = st.diff; + + const key = `${d.schema}.${d.table}.${d.name}`; + title = `${key} check changed:`; + if (d.value) cause += `│ definition: ${d.value.from} -> ${d.value.to}\n`; + } + + if (st.type === 'alter_pk') { + const d = st.diff; + + const key = `${d.schema}.${d.table}.${d.name}`; + title += `${key} pk changed:`; + if (d.columns) cause += `│ columns: [${d.columns.from.join(',')}] -> [${d.columns.to.join(',')}]\n`; + } + + if (st.type === 'alter_view') { + const d = st.diff; + + const key = `${d.schema}.${d.name}`; + title += `${key} view changed:`; + // This should trigger recreate_view + // if (d.definition) msg += `│ definition: ${d.definition.from} -> ${d.definition.to}\n`; + + // TODO alter materialized? Should't it be recreate? + if (d.materialized) cause += `│ materialized: ${d.materialized.from} -> ${d.materialized.to}\n`; + + if (d.tablespace) cause += `│ tablespace: ${d.tablespace.from} -> ${d.tablespace.to}\n`; + if (d.using) cause += `│ using: ${d.using.from} -> ${d.using.to}\n`; + if (d.withNoData) cause += `│ withNoData: ${d.withNoData.from} -> ${d.withNoData.to}\n`; + if (d.with) cause += `| with: ${formatOptionChanges(d.with.from, d.with.to)}`; + } + + if (st.type === 'recreate_view') { + const { from, to } = st; + + const key = `${to.schema}.${to.name}`; + title += `${key} view changed:`; + cause += `│ definition: [${from.definition}] -> [${to.definition}]\n`; + } + + if (st.type === 'regrant_privilege') { + const { privilege, diff } = st; + + const key = `${privilege.name}`; + title += `${key} privilege changed:`; + if (diff.grantee) cause += `│ grantee: [${diff.grantee.from}] -> [${diff.grantee.to}]\n`; + if (diff.grantor) cause += `│ grantor: [${diff.grantor.from}] -> [${diff.grantor.to}]\n`; + if (diff.isGrantable) cause += `│ isGrantable: [${diff.isGrantable.from}] -> [${diff.isGrantable.to}]\n`; + if (diff.type) cause += `│ type: [${diff.type.from}] -> [${diff.type.to}]\n`; + } + + if (title) return { title, cause }; + + return null; +}; + +export const cockroachExplain = (st: StatementCockraoch) => { + let title = ''; + let cause = ''; + + if (st.type === 'alter_column') { + const r = st.to; + const d = st.diff; + + const key = `${r.schema}.${r.table}.${r.name}`; + title += `${key} column changed:`; + if (d.default) cause += `│ default: ${d.default.from} -> ${d.default.to}\n`; + if (d.type) cause += `│ type: ${d.type.from} -> ${d.type.to}\n`; + if (d.notNull) cause += `│ notNull: ${d.notNull.from} -> ${d.notNull.to}\n`; + if (d.dimensions) cause += `│ dimensions: ${d.dimensions.from} -> ${d.dimensions.to}\n`; + + // TODO check manually + if (d.identity) cause += `│ identity: ${formatOptionChanges(d.identity.from, d.identity.to)}\n`; + } + + if (st.type === 'recreate_column') { + const { diff: d } = st; + + const key = `${d.$right.schema}.${d.$right.table}.${d.$right.name}`; + title += `${key} column recreated:`; + if (d.generated) { + const from = d.generated.from ? `${d.generated.from.as} ${d.generated.from.type}` : 'null'; + const to = d.generated.to ? `${d.generated.to.as} ${d.generated.to.type}` : 'null'; + cause += `│ generated: ${from} -> ${to}\n`; + } + } + + if (st.type === 'recreate_index') { + const diff = st.diff; + const idx = diff.$right; + const key = `${idx.schema}.${idx.table}.${idx.name}`; + title += `${key} index changed:`; + if (diff.isUnique) cause += `│ unique: ${diff.isUnique.from} -> ${diff.isUnique.to}\n`; + if (diff.where) cause += `│ where: ${diff.where.from} -> ${diff.where.to}\n`; + if (diff.method) cause += `│ where: ${diff.method.from} -> ${diff.method.to}\n`; + } + + if (st.type === 'recreate_fk') { + const { fk, diff } = st; + const key = `${fk.schema}.${fk.table}.${fk.name}`; + title += `${key} index changed:`; + if (diff.onUpdate) cause += `│ where: ${diff.onUpdate.from} -> ${diff.onUpdate.to}\n`; + if (diff.onDelete) cause += `│ onDelete: ${diff.onDelete.from} -> ${diff.onDelete.to}\n`; + } + + if (st.type === 'recreate_enum') { + const { to, from } = st; + title = `${to.schema}.${to.name} enum changed:`; + cause += `│ values shuffled/removed: [${from.values.join(',')}] -> [${to.values.join(',')}]\n`; + } + + if (st.type === 'alter_enum') { + const r = st.to; + const l = st.from; + const d = st.diff; + + title = `${r.schema}.${r.name} enum changed:`; + cause += `│ changes: [${r.values.join(',')}] -> [${l.values.join(',')}]\n`; + cause += `│ values added: ${d.filter((it) => it.type === 'added').map((it) => it.value).join(',')}\n`; + } + + if (st.type === 'alter_role') { + const d = st.diff; + const to = st.role; + + const key = `${to.name}`; + title = `${key} role changed:`; + if (d.createDb) cause += `│ createDb: ${d.createDb.from} -> ${d.createDb.to}\n`; + if (d.createRole) cause += `│ createRole: ${d.createRole.from} -> ${d.createRole.to}\n`; + } + + if (st.type === 'alter_sequence') { + const d = st.diff; + const to = st.sequence; + + const key = `${to.schema}.${to.name}`; + title = `${key} sequence changed:`; + if (d.cacheSize) cause += `│ cacheSize: ${d.cacheSize.from} -> ${d.cacheSize.to}\n`; + if (d.incrementBy) cause += `│ incrementBy: ${d.incrementBy.from} -> ${d.incrementBy.to}\n`; + if (d.maxValue) cause += `│ maxValue: ${d.maxValue.from} -> ${d.maxValue.to}\n`; + if (d.minValue) cause += `│ minValue: ${d.minValue.from} -> ${d.minValue.to}\n`; + if (d.startWith) cause += `│ startWith: ${d.startWith.from} -> ${d.startWith.to}\n`; + } + + if (st.type === 'alter_rls') { + const key = `${st.schema}.${st.name}`; + title = `${key} rls changed:\n`; + cause += `│ rlsEnabled: ${!st.isRlsEnabled} -> ${st.isRlsEnabled}\n`; + } + + if (st.type === 'alter_policy' || st.type === 'recreate_policy') { + const d = st.diff; + const to = st.policy; + + const key = `${to.schema}.${to.table}.${to.name}`; + title = `${key} policy changed:`; + if (d.as) cause += `│ as: ${d.as.from} -> ${d.as.to}\n`; + if (d.for) cause += `│ for: ${d.for.from} -> ${d.for.to}\n`; + if (d.roles) cause += `│ roles: [${d.roles.from.join(',')}] -> [${d.roles.to.join(',')}]\n`; + if (d.using) cause += `│ using: ${d.using.from} -> ${d.using.to}\n`; + if (d.withCheck) cause += `│ withCheck: ${d.withCheck.from} -> ${d.withCheck.to}\n`; + } + + if (st.type === 'alter_check') { + const d = st.diff; + + const key = `${d.schema}.${d.table}.${d.name}`; + title = `${key} check changed:`; + if (d.value) cause += `│ definition: ${d.value.from} -> ${d.value.to}\n`; + } + + if (st.type === 'alter_pk') { + const d = st.diff; + + const key = `${d.schema}.${d.table}.${d.name}`; + title += `${key} pk changed:`; + if (d.columns) cause += `│ columns: [${d.columns.from.join(',')}] -> [${d.columns.to.join(',')}]\n`; + } + + if (st.type === 'recreate_view') { + const { from, to } = st; + + const key = `${to.schema}.${to.name}`; + title += `${key} view changed:`; + cause += `│ definition: [${from.definition}] -> [${to.definition}]\n`; + } + + if (title) return { title, cause }; + + return null; +}; + +export const mysqlExplain = ( + st: StatementMysql, +) => { + let title = ''; + let cause = ''; + + if (st.type === 'alter_column') { + const r = st.diff.$right; + const d = st.diff; + + const key = `${r.table}.${r.name}`; + title += `${key} column changed:\n`; + if (d.default) cause += `│ default: ${d.default.from} -> ${d.default.to}\n`; + if (d.type) cause += `│ type: ${d.type.from} -> ${d.type.to}\n`; + if (d.notNull) cause += `│ notNull: ${d.notNull.from} -> ${d.notNull.to}\n`; + if (d.autoIncrement) cause += `│ autoIncrement: ${d.autoIncrement.from} -> ${d.autoIncrement.to}\n`; + if (d.charSet) cause += `│ charSet: ${d.charSet.from} -> ${d.charSet.to}\n`; + if (d.collation) cause += `│ collation: ${d.collation.from} -> ${d.collation.to}\n`; + if (d.onUpdateNow) cause += `│ onUpdateNow: ${d.onUpdateNow.from} -> ${d.onUpdateNow.to}\n`; + if (d.onUpdateNowFsp) cause += `│ onUpdateNowFsp: ${d.onUpdateNowFsp.from} -> ${d.onUpdateNowFsp.to}\n`; + } + + if (st.type === 'recreate_column') { + const { column, diff } = st; + + const key = `${column.table}.${column.name}`; + title += `${key} column recreated:\n`; + if (diff.generated) { + const from = diff.generated.from ? `${diff.generated.from.as} ${diff.generated.from.type}` : 'null'; + const to = diff.generated.to ? `${diff.generated.to.as} ${diff.generated.to.type}` : 'null'; + cause += `│ generated: ${from} -> ${to}\n`; + } + } + + if (st.type === 'alter_view') { + const { diff, view } = st; + + const key = `${view.name}`; + title += `${key} view changed:\n`; + if (diff.algorithm) cause += `│ algorithm: ${diff.algorithm.from} -> ${diff.algorithm.to}\n`; + if (diff.definition) cause += `│ definition: ${diff.definition.from} -> ${diff.definition.to}\n`; + if (diff.sqlSecurity) cause += `│ sqlSecurity: ${diff.sqlSecurity.from} -> ${diff.sqlSecurity.to}\n`; + if (diff.withCheckOption) { + cause += `│ withCheckOption: ${diff.withCheckOption.from} -> ${diff.withCheckOption.to}\n`; + } + } + + if (title) return { title, cause }; + + return null; +}; + +export const mssqlExplain = ( + st: StatementMssql, +) => { + let title = ''; + let cause = ''; + + if (st.type === 'alter_column') { + const r = st.diff.$right; + const d = st.diff; + + const key = `${r.schema}.${r.table}.${r.name}`; + title += `${key} column changed:\n`; + if (d.type) cause += `│ type: ${d.type.from} -> ${d.type.to}\n`; + if (d.notNull) cause += `│ notNull: ${d.notNull.from} -> ${d.notNull.to}\n`; + } + + if (st.type === 'recreate_column') { + const { diff } = st; + + const key = `${diff.$right.schema}.${diff.$right.table}.${diff.$right.name}`; + title += `${key} column recreated:\n`; + if (diff.generated) { + const from = diff.generated.from ? `${diff.generated.from.as} ${diff.generated.from.type}` : 'null'; + const to = diff.generated.to ? `${diff.generated.to.as} ${diff.generated.to.type}` : 'null'; + cause += `│ generated: ${from} -> ${to}\n`; + } + } + if (st.type === 'recreate_identity_column') { + const { column } = st; + + const key = `${column.$right.schema}.${column.$right.table}.${column.$right.name}`; + title += `${key} column recreated:\n`; + if (column.identity) { + const from = column.identity.from ? `${column.identity.from.increment} ${column.identity.from.seed}` : 'null'; + const to = column.identity.to ? `${column.identity.to.increment} ${column.identity.to.seed}` : 'null'; + cause += `│ identity: ${from} -> ${to}\n`; + } + } + + if (st.type === 'alter_view') { + const { diff, view } = st; + + const key = `${view.schema}.${view.name}`; + title += `${key} view changed:\n`; + if (diff.checkOption) cause += `│ checkOption: ${diff.checkOption.from} -> ${diff.checkOption.to}\n`; + if (diff.definition) cause += `│ definition: ${diff.definition.from} -> ${diff.definition.to}\n`; + if (diff.encryption) cause += `│ encryption: ${diff.encryption.from} -> ${diff.encryption.to}\n`; + if (diff.schemaBinding) { + cause += `│ schemaBinding: ${diff.schemaBinding.from} -> ${diff.schemaBinding.to}\n`; + } + if (diff.viewMetadata) { + cause += `│ viewMetadata: ${diff.viewMetadata.from} -> ${diff.viewMetadata.to}\n`; + } + } + + if (st.type === 'recreate_default') { + const { from, to } = st; + + const key = `${to.schema}.${to.name}`; + title += `${key} default changed:\n`; + cause += `│ default: ${from.default} -> ${to.default}\n`; + } + + if (title) return { title, cause }; + + return null; +}; + +export const sqliteExplain = ( + st: StatementSqlite, +) => { + let title = ''; + let cause = ''; + + if (st.type === 'recreate_table') { + const { + to, + alteredColumnsBecameGenerated, + checkDiffs, + checksAlters, + columnAlters, + fksAlters, + fksDiff, + indexesDiff, + newStoredColumns, + pksAlters, + pksDiff, + uniquesAlters, + uniquesDiff, + } = st; + + const key = `${to.name}`; + + title += `${key} table recreated:\n`; + + const blocks: string[][] = []; + + if (alteredColumnsBecameGenerated.length) { + blocks.push([ + `│ Columns become generated stored: ${alteredColumnsBecameGenerated.map((it) => `${it.name}`).join(', ')}\n`, + `│ It is not possible to make existing column as generated STORED\n`, + ]); + } + + if (checkDiffs.length) { + const createdChecks = checkDiffs.filter((it) => it.$diffType === 'create'); + const droppedChecks = checkDiffs.filter((it) => it.$diffType === 'drop'); + + if (createdChecks.length) { + blocks.push([`| Check constraints added: ${createdChecks.map((it) => `${it.name}`).join(', ')}\n`]); + } + + if (droppedChecks.length) { + blocks.push([`| Check constraints dropped: ${droppedChecks.map((it) => `${it.name}`).join(', ')}\n`]); + } + } + + if (checksAlters.length) { + blocks.push([ + `│ Check constraints altered definition:\n`, + `│ ${checksAlters.map((it) => `${it.name}: ${it.$left.value} -> ${it.$right.value}`).join(',\n')}\n`, + ]); + } + + if (columnAlters.filter((it) => it.type || it.default || it.autoincrement || it.notNull).length) { + let res: string = ''; + const alteredNotNull = columnAlters.filter((it) => it.notNull); + const alteredType = columnAlters.filter((it) => it.type); + const alteredDefault = columnAlters.filter((it) => it.default); + const alteredAutoincrement = columnAlters.filter((it) => it.autoincrement); + + res += `│ Columns altered:\n`; + if (alteredNotNull.length) { + res += `${ + alteredNotNull.map((it) => `│ ${it.name} => notNull: ${it.notNull?.from} -> ${it.notNull?.to}`).join( + '\n', ) - }`; - }) - .join('\n'); - msg += '\n'; + }\n`; + } + if (alteredType.length) { + res += `${alteredType.map((it) => `│ ${it.name} => type: ${it.type?.from} -> ${it.type?.to}`).join('\n')}\n`; + } + if (alteredDefault.length) { + res += `${ + alteredDefault.map((it) => `│ ${it.name} => default: ${it.default?.from} -> ${it.default?.to}`).join( + '\n', + ) + }\n`; + } + if (alteredAutoincrement.length) { + res += `${ + alteredAutoincrement.map((it) => + `│ ${it.name} => autoincrement: ${it.autoincrement?.from} -> ${it.autoincrement?.to}` + ).join('\n') + }\n`; + } + + blocks.push([res]); + } + + if (uniquesDiff.length) { + const uniquesCreated = uniquesDiff.filter((it) => it.$diffType === 'create'); + const uniquesDropped = uniquesDiff.filter((it) => it.$diffType === 'drop'); + if (uniquesCreated.length) { + blocks.push([`│ Unique constraints added: ${uniquesCreated.map((it) => `${it.name}`).join(', ')}\n`]); + } + if (uniquesDropped.length) { + blocks.push([`│ Unique constraints dropped: ${uniquesDropped.map((it) => `${it.name}`).join(', ')}\n`]); + } + } + + if (pksDiff.length) { + const pksCreated = pksDiff.filter((it) => it.$diffType === 'create'); + const pksDropped = pksDiff.filter((it) => it.$diffType === 'drop'); + + if (pksCreated.length) { + blocks.push([`│ Primary key constraints added: ${pksCreated.map((it) => `${it.name}`).join(', ')}\n`]); + } + if (pksDropped.length) { + blocks.push([`│ Primary key constraints dropped: ${pksDropped.map((it) => `${it.name}`).join(', ')}\n`]); + } + } + + if (newStoredColumns.length) { + blocks.push([ + `| Stored columns added: ${newStoredColumns.map((it) => `${it.name}`).join(', ')}\n`, + ]); + } + + if (pksAlters.length) { + blocks.push([ + `│ Primary key was altered:\n`, + `${ + pksAlters.filter((it) => it.columns).map((it) => + `[${it.columns?.from.join(',')}] -> [${it.columns?.to.join(',')}]\n` + ) + }\n`, + ]); + } + + if (uniquesAlters.length) { + blocks.push([ + `│ Unique constraint was altered:\n`, + `${ + uniquesAlters.filter((it) => it.columns).map((it) => + `│ name: ${it.name} => columns: [${it.columns?.from.join(',')}] -> [${it.columns?.to.join(',')}]\n` + ) + }\n`, + ]); + } + + if (fksAlters.length) { + let res: string = ''; + + const columnsAltered = fksAlters.filter((it) => it.columns); + const columnsToAltered = fksAlters.filter((it) => it.columnsTo); + const tablesToAltered = fksAlters.filter((it) => it.tableTo); + + res += `│ Foreign key constraint was altered:\n`; + if (columnsAltered) { + res += `${ + columnsAltered.map((it) => + `│ name: ${it.name} => columns: [${it.columns?.from.join(',')}] -> [${it.columns?.to.join(',')}]` + ) + }\n`; + } + if (columnsToAltered) { + res += ` ${ + columnsToAltered.map((it) => + `│ name: ${it.name} => columnsTo: [${it.columnsTo?.from.join(',')}] -> [${it.columnsTo?.to.join(',')}]` + ) + }\n`; + } + if (tablesToAltered) { + res += `${ + tablesToAltered.map((it) => `│ name: ${it.name} => tableTo: [${it.tableTo?.from}] -> [${it.tableTo?.to}]`) + }\n`; + } + + blocks.push([res]); + } + + if (fksDiff.length) { + const fksCreated = fksDiff.filter((it) => it.$diffType === 'create'); + const fksDropped = fksDiff.filter((it) => it.$diffType === 'drop'); + if (fksCreated.length) { + blocks.push([`| Foreign key constraints added: ${fksCreated.map((it) => `${it.name}`).join(', ')}\n`]); + } + if (fksDropped.length) { + blocks.push([`| Foreign key constraints dropped: ${fksDropped.map((it) => `${it.name}`).join(', ')}\n`]); + } + } + + if (indexesDiff.filter((it) => it.isUnique && it.origin === 'auto').length) { + const indexCreated = indexesDiff.filter((it) => it.$diffType === 'create'); + const indexDropped = indexesDiff.filter((it) => it.$diffType === 'drop'); + if (indexCreated.length) { + blocks.push([`| System generated index added: ${indexCreated.map((it) => `${it.name}`).join(', ')}\n`]); + } + if (indexDropped.length) { + blocks.push([`| System generated index dropped: ${indexDropped.map((it) => `${it.name}`).join(', ')}\n`]); + } + } + + cause += blocks.map((it) => it.join('')).join('├─\n'); + } + + if (st.type === 'recreate_column') { + const { + column, + diffGenerated, + } = st; + + const key = `${column.name}`; + + title += `${key} column recreated:\n`; + + cause += `| generated: ${ + diffGenerated && diffGenerated.from ? diffGenerated.from.as + ' ' + diffGenerated.from.type : 'null' + } -> ${diffGenerated && diffGenerated.to ? diffGenerated.to.as + ' ' + diffGenerated.to.type : 'null'}`; + } + + if (title) return { title, cause }; + + return null; +}; + +export const postgresSchemaError = (error: PostgresSchemaError): string => { + if (error.type === 'constraint_name_duplicate') { + const { name, schema, table } = error; + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + const constraintName = chalk.underline.blue(`'${name}'`); + return withStyle.errorWarning( + `There's a duplicate constraint name ${constraintName} in ${tableName} table`, + ); + } + + if (error.type === 'index_duplicate') { + // check for index names duplicates + const { schema, table, name } = error; + const sch = chalk.underline.blue(`"${schema}"`); + const idx = chalk.underline.blue(`'${name}'`); + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + return withStyle.errorWarning( + `There's a duplicate index name ${idx} in ${sch} schema in ${tableName}`, + ); + } + + if (error.type === 'index_no_name') { + const { schema, table, sql } = error; + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + return withStyle.errorWarning( + `Please specify an index name in ${tableName} table that has "${sql}" expression.\n\nWe can generate index names for indexes on columns only; for expressions in indexes, you need to specify index name yourself.`, + ); + } + + if (error.type === 'pgvector_index_noop') { + const { table, indexName, column, method } = error; + return withStyle.errorWarning( + `You are specifying an index on the ${ + chalk.blueBright( + column, + ) + } column inside the ${ + chalk.blueBright( + table, + ) + } table with the ${ + chalk.blueBright( + 'vector', + ) + } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ + vectorOps + .map((it) => `${chalk.underline(`${it}`)}`) + .join(', ') + }].\n\nYou can specify it using current syntax: ${ + chalk.underline( + `index("${indexName}").using("${method}", table.${column}.op("${vectorOps[0]}"))`, + ) + }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, + ); + } + + if (error.type === 'policy_duplicate') { + const { schema, table, policy } = error; + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + + return withStyle.errorWarning( + `We've found duplicated policy name across ${tableName} table. Please rename one of the policies with ${ + chalk.underline.blue( + policy, + ) + } name`, + ); + } + + if (error.type === 'view_name_duplicate') { + const schema = chalk.underline.blue(error.schema ?? 'public'); + const name = chalk.underline.blue(error.name); + return withStyle.errorWarning( + `There's a view duplicate name ${name} in ${schema} schema`, + ); + } + + if (error.type === 'sequence_name_duplicate') { + return withStyle.errorWarning(`There's a sequence name duplicate '${error.name}' in '${error.schema}' schema`); + } + + // assertUnreachable(error); + return ''; +}; + +export const mysqlSchemaError = (error: MysqlSchemaError): string => { + if (error.type === 'column_name_conflict') { + const { name, table } = error; + const tableName = chalk.underline.blue(`\`${table}\``); + const columnName = chalk.underline.blue(`\`${name}\``); + return withStyle.errorWarning( + `There's a duplicate column name ${columnName} in ${tableName} table`, + ); + } + + if (error.type === 'table_name_conflict') { + const { name: table } = error; + const tableName = chalk.underline.blue(`\`${table}\``); + return withStyle.errorWarning( + `There's a duplicate table name ${tableName}`, + ); + } + + if (error.type === 'column_unsupported_unique') { + const { table, columns } = error; + const tableName = chalk.underline.blue(`\`${table}\``); + const columnsName = chalk.underline.blue(`\`${columns.join('`, `')}\``); + + const warningText = `You tried to add${columns.length > 1 ? ` COMPOSITE` : ''} UNIQUE on ${columnsName} ${ + columns.length > 1 ? 'columns' : 'column' + } in ${tableName} table +It's not currently possible to create a UNIQUE constraint on BLOB/TEXT column type. +To enforce uniqueness, create a UNIQUE INDEX instead, specifying a prefix length with sql\`\` +Ex. +const users = mysqlTable('users', { + username: text() +}, (t) => [${chalk.underline.green('uniqueIndex("name").on(sql`username(10)`)')}]`; + + return withStyle.errorWarning(warningText); + } + + if (error.type === 'column_unsupported_default_on_autoincrement') { + const { table, column } = error; + const tableName = chalk.underline.blue(`\`${table}\``); + const columnName = chalk.underline.blue(`\`${column}\``); + + const warningText = + `You tried to add DEFAULT value to ${columnName} in ${tableName}. AUTO_INCREMENT or SERIAL automatically generate their values. You can not set a default for it`; + + return withStyle.errorWarning(warningText); } - return msg; + + assertUnreachable(error); + return ''; +}; + +export const mssqlSchemaError = (error: MssqlSchemaError): string => { + if (error.type === 'constraint_duplicate') { + const { name, schema } = error; + const constraintName = chalk.underline.blue(`'${name}'`); + const schemaName = chalk.underline.blue(`'${schema}'`); + + return withStyle.errorWarning( + `There's a duplicate constraint name ${constraintName} across ${schemaName} schema`, + ); + } + + if (error.type === 'index_duplicate') { + // check for index names duplicates + const { schema, table, name } = error; + const sch = chalk.underline.blue(`"${schema}"`); + const idx = chalk.underline.blue(`'${name}'`); + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + return withStyle.errorWarning( + `There's a duplicate index name ${idx} in ${sch} schema in ${tableName}`, + ); + } + + if (error.type === 'index_no_name') { + const { schema, table, sql } = error; + const tableName = chalk.underline.blue(`"${schema}"."${table}"`); + return withStyle.errorWarning( + `Please specify an index name in ${tableName} table that has "${sql}" expression.\n\nWe can generate index names for indexes on columns only; for expressions in indexes, you need to specify index name yourself.`, + ); + } + + if (error.type === 'view_name_duplicate') { + const schema = chalk.underline.blue(error.schema); + const name = chalk.underline.blue(error.name); + return withStyle.errorWarning( + `There's a view duplicate name ${name} across ${schema} schema`, + ); + } + + if (error.type === 'column_duplicate') { + const schema = chalk.underline.blue(error.schema); + const name = chalk.underline.blue(error.name); + const tableName = chalk.underline.blue(`"${schema}"."${error.table}"`); + return withStyle.errorWarning( + `There's a column duplicate name ${name} in ${tableName} table`, + ); + } + + if (error.type === 'schema_duplicate') { + const schemaName = chalk.underline.blue(error.name); + return withStyle.errorWarning( + `There's a schema duplicate name ${schemaName}`, + ); + } + + if (error.type === 'table_duplicate') { + const schema = chalk.underline.blue(error.schema); + const tableName = chalk.underline.blue(`"${schema}"."${error.name}"`); + + return withStyle.errorWarning( + `There's a table duplicate name ${tableName} across ${schema} schema`, + ); + } + + assertUnreachable(error); }; export interface RenamePropmtItem { @@ -80,7 +1013,7 @@ export interface RenamePropmtItem { to: T; } -export const isRenamePromptItem = ( +export const isRenamePromptItem = ( item: RenamePropmtItem | T, ): item is RenamePropmtItem => { return 'from' in item && 'to' in item; @@ -148,7 +1081,7 @@ export class ResolveColumnSelect extends Prompt< : `${chalk.green('+')} ${title} ${chalk.gray('create column')}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; - text += idx != this.data.items.length - 1 ? '\n' : ''; + text += idx !== this.data.items.length - 1 ? '\n' : ''; }); return text; } @@ -178,7 +1111,6 @@ export class ResolveSelectNamed extends Prompt< this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); - this.base = base; } render(status: 'idle' | 'submitted' | 'aborted'): string { @@ -225,7 +1157,7 @@ export class ResolveSelectNamed extends Prompt< : `${chalk.green('+')} ${title} ${chalk.gray(`create ${entityType}`)}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; - text += idx != this.state.items.length - 1 ? '\n' : ''; + text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } @@ -235,7 +1167,15 @@ export class ResolveSelectNamed extends Prompt< } } -export class ResolveSelect extends Prompt< +type EntityBase = { schema?: string; table?: string; name: string }; + +const keyFor = (it: EntityBase, defaultSchema: 'dbo' | 'public' = 'public') => { + const schemaPrefix = it.schema && it.schema !== defaultSchema ? `${it.schema}.` : ''; + const tablePrefix = it.table ? `${it.schema}.` : ''; + return `${schemaPrefix}${tablePrefix}${it.name}`; +}; + +export class ResolveSelect extends Prompt< RenamePropmtItem | T > { private readonly state: SelectState | T>; @@ -243,21 +1183,36 @@ export class ResolveSelect extends Prompt< constructor( private readonly base: T, data: (RenamePropmtItem | T)[], - private readonly entityType: 'table' | 'enum' | 'sequence' | 'view' | 'role', + private readonly entityType: + | 'schema' + | 'enum' + | 'table' + | 'column' + | 'sequence' + | 'view' + | 'privilege' + | 'policy' + | 'role' + | 'check' + | 'index' + | 'unique' + | 'primary key' + | 'foreign key' + | 'default', + private defaultSchema: 'dbo' | 'public' = 'public', ) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); - this.base = base; } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return ''; } - const key = tableKey(this.base); + const key = keyFor(this.base, this.defaultSchema); let text = `\nIs ${chalk.bold.blue(key)} ${this.entityType} created or renamed from another ${this.entityType}?\n`; const isSelectedRenamed = isRenamePromptItem( @@ -272,7 +1227,7 @@ export class ResolveSelect extends Prompt< .filter((it) => isRenamePromptItem(it)) .map((_) => { const it = _ as RenamePropmtItem; - const keyFrom = tableKey(it.from); + const keyFrom = keyFor(it.from); return key.length + 3 + keyFrom.length; }) .reduce((a, b) => { @@ -288,15 +1243,15 @@ export class ResolveSelect extends Prompt< const isRenamed = isRenamePromptItem(it); const title = isRenamed - ? `${tableKey(it.from)} › ${tableKey(it.to)}`.padEnd(labelLength, ' ') - : tableKey(it).padEnd(labelLength, ' '); + ? `${keyFor(it.from, this.defaultSchema)} › ${keyFor(it.to, this.defaultSchema)}`.padEnd(labelLength, ' ') + : keyFor(it, this.defaultSchema).padEnd(labelLength, ' '); const label = isRenamed ? `${chalk.yellow('~')} ${title} ${chalk.gray(`rename ${entityType}`)}` : `${chalk.green('+')} ${title} ${chalk.gray(`create ${entityType}`)}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; - text += idx != this.state.items.length - 1 ? '\n' : ''; + text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } @@ -316,7 +1271,6 @@ export class ResolveSchemasSelect extends Prompt< this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); - this.base = base; } render(status: 'idle' | 'submitted' | 'aborted'): string { @@ -359,7 +1313,7 @@ export class ResolveSchemasSelect extends Prompt< : `${chalk.green('+')} ${title} ${chalk.gray('create schema')}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; - text += idx != this.state.items.length - 1 ? '\n' : ''; + text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } @@ -389,16 +1343,16 @@ class Spinner { }; } -const frames = function(values: string[]): () => string { - let index = 0; - const iterator = () => { - const frame = values[index]; - index += 1; - index %= values.length; - return frame!; - }; - return iterator; -}; +// const frames = function(values: string[]): () => string { +// let index = 0; +// const iterator = () => { +// const frame = values[index]; +// index += 1; +// index %= values.length; +// return frame!; +// }; +// return iterator; +// }; type ValueOf = T[keyof T]; export type IntrospectStatus = 'fetching' | 'done'; @@ -549,6 +1503,12 @@ export class MigrateProgress extends TaskView { } } +export class EmptyProgressView extends TaskView { + override render(): string { + return ''; + } +} + export class ProgressView extends TaskView { private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); private timeout: NodeJS.Timeout | undefined; @@ -612,7 +1572,7 @@ export class DropMigrationView extends Prompt { title = isSelected ? chalk.yellow(title) : title; text += isSelected ? `${selectedPrefix}${title}` : ` ${title}`; - text += idx != this.data.items.length - 1 ? '\n' : ''; + text += idx !== this.data.items.length - 1 ? '\n' : ''; }); text += data.endTrimmed ? ' ...\n' : ''; diff --git a/drizzle-kit/src/dialects/cockroach/convertor.ts b/drizzle-kit/src/dialects/cockroach/convertor.ts new file mode 100644 index 0000000000..cf2aa16b40 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/convertor.ts @@ -0,0 +1,783 @@ +import { escapeSingleQuotes, type Simplify } from '../../utils'; +import { defaultNameForPK, defaults, defaultToSQL, isDefaultAction } from './grammar'; +import type { JsonStatement } from './statements'; + +export const convertor = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + convertor: (statement: Simplify>) => string | string[], +) => { + return { + type, + can: (st: JsonStatement) => { + return st.type === type; + }, + convert: convertor, + }; +}; + +const createSchemaConvertor = convertor('create_schema', (st) => { + return `CREATE SCHEMA "${st.name}";\n`; +}); + +const dropSchemaConvertor = convertor('drop_schema', (st) => { + return `DROP SCHEMA "${st.name}";\n`; +}); + +const renameSchemaConvertor = convertor('rename_schema', (st) => { + return `ALTER SCHEMA "${st.from.name}" RENAME TO "${st.to.name}";\n`; +}); + +const createViewConvertor = convertor('create_view', (st) => { + const { definition, name: viewName, schema, materialized, withNoData } = st.view; + + const name = schema !== 'public' ? `"${schema}"."${viewName}"` : `"${viewName}"`; + let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`; + + statement += ` AS (${definition})`; + if (withNoData) statement += ` WITH NO DATA`; + statement += `;`; + + return statement; +}); + +const dropViewConvertor = convertor('drop_view', (st) => { + const { name: viewName, schema, materialized } = st.view; + const name = schema !== 'public' ? `"${schema}"."${viewName}"` : `"${viewName}"`; + return `DROP${materialized ? ' MATERIALIZED' : ''} VIEW ${name};`; +}); + +const renameViewConvertor = convertor('rename_view', (st) => { + const materialized = st.from.materialized; + const nameFrom = st.from.schema !== 'public' ? `"${st.from.schema}"."${st.from.name}"` : `"${st.from.name}"`; + + return `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW ${nameFrom} RENAME TO "${st.to.name}";`; +}); + +const moveViewConvertor = convertor('move_view', (st) => { + const { fromSchema, toSchema, view } = st; + const from = fromSchema === 'public' ? `"${view.name}"` : `"${fromSchema}"."${view.name}"`; + return `ALTER${view.materialized ? ' MATERIALIZED' : ''} VIEW ${from} SET SCHEMA "${toSchema}";`; +}); + +const recreateViewConvertor = convertor('recreate_view', (st) => { + const drop = dropViewConvertor.convert({ view: st.from }) as string; + const create = createViewConvertor.convert({ view: st.to }) as string; + return [drop, create]; +}); + +const createTableConvertor = convertor('create_table', (st) => { + const { schema, name, columns, pk, checks, policies, isRlsEnabled, indexes } = st.table; + + const uniqueIndexes = indexes.filter((it) => + it.isUnique && (!it.method || it.method === defaults.index.method) && !it.where + ); + + const statements = [] as string[]; + let statement = ''; + const key = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + statement += `CREATE TABLE ${key} (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === column.name + && pk.name === defaultNameForPK(column.table); + + const primaryKeyStatement = isPK ? ' PRIMARY KEY' : ''; + const notNullStatement = pk?.columns.includes(column.name) + ? '' + : column.notNull && !column.identity + ? ' NOT NULL' + : ''; + + const defaultStatement = column.default ? ` DEFAULT ${defaultToSQL(column)}` : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + let type = column.typeSchema + ? `"${column.type}"` + : column.type; + type = `${schemaPrefix}${type}${'[]'.repeat(column.dimensions)}`; + + const generated = column.generated; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + const identity = column.identity + ? ` GENERATED ${column.identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'} AS IDENTITY (${ + column.identity.increment ? `INCREMENT BY ${column.identity.increment}` : '' + }${column.identity.minValue ? ` MINVALUE ${column.identity.minValue}` : ''}${ + column.identity.maxValue ? ` MAXVALUE ${column.identity.maxValue}` : '' + }${column.identity.startWith ? ` START WITH ${column.identity.startWith}` : ''}${ + column.identity.cache ? ` CACHE ${column.identity.cache}` : '' + })` + : ''; + + statement += '\t' + + `"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${identity}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + for (const unique of uniqueIndexes) { + statement += ',\n'; + const uniqueString = unique.columns.map((it) => (it.isExpression ? `${it.value}` : `"${it.value}"`)).join(','); + + statement += `\tCONSTRAINT "${unique.name}" UNIQUE(${uniqueString})`; + } + + if (pk && (pk.columns.length > 1 || pk.name !== defaultNameForPK(st.table.name))) { + statement += ',\n'; + statement += `\tCONSTRAINT "${pk.name}" PRIMARY KEY("${pk.columns.join(`","`)}")`; + } + + for (const check of checks) { + statement += ',\n'; + statement += `\tCONSTRAINT "${check.name}" CHECK (${check.value})`; + } + + statement += `\n);`; + statement += `\n`; + statements.push(statement); + + if ((policies && policies.length > 0) || isRlsEnabled) { + statements.push( + toggleRlsConvertor.convert({ + isRlsEnabled: true, + name: st.table.name, + schema: st.table.schema, + }) as string, + ); + } + + return statements; +}); + +const dropTableConvertor = convertor('drop_table', (st) => { + const { name, schema, policies } = st.table; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + const droppedPolicies = policies.map((policy) => dropPolicyConvertor.convert({ policy }) as string); + + return [...droppedPolicies, `DROP TABLE ${tableNameWithSchema};`]; +}); + +const renameTableConvertor = convertor('rename_table', (st) => { + const schemaPrefix = st.schema !== 'public' ? `"${st.schema}".` : ''; + + return `ALTER TABLE ${schemaPrefix}"${st.from}" RENAME TO "${st.to}";`; +}); + +const moveTableConvertor = convertor('move_table', (st) => { + const from = st.from !== 'public' ? `"${st.from}"."${st.name}"` : `"${st.name}"`; + + return `ALTER TABLE ${from} SET SCHEMA "${st.to}";\n`; +}); + +const addColumnConvertor = convertor('add_column', (st) => { + const { schema, table, name, identity, generated } = st.column; + const column = st.column; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + const defaultStatement = column.default ? ` DEFAULT ${defaultToSQL(column)}` : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + const type = column.typeSchema + ? `"${column.type}"` + : column.type; + let fixedType = `${schemaPrefix}${type}${'[]'.repeat(column.dimensions)}`; + + // unlike postgres cockroach requires explicit not null columns for pk + const notNullStatement = column.notNull && !identity && !generated ? ' NOT NULL' : ''; + + const identityStatement = identity + ? ` GENERATED ${identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'} AS IDENTITY (${ + identity.increment ? `INCREMENT BY ${identity.increment}` : '' + }${identity.minValue ? ` MINVALUE ${identity.minValue}` : ''}${ + identity.maxValue ? ` MAXVALUE ${identity.maxValue}` : '' + }${identity.startWith ? ` START WITH ${identity.startWith}` : ''}${ + identity.cache ? ` CACHE ${identity.cache}` : '' + })` + : ''; + + const generatedStatement = column.generated ? ` GENERATED ALWAYS AS (${column.generated.as}) STORED` : ''; + + return `ALTER TABLE ${tableNameWithSchema} ADD COLUMN "${name}" ${fixedType}${defaultStatement}${generatedStatement}${notNullStatement}${identityStatement};`; +}); + +const dropColumnConvertor = convertor('drop_column', (st) => { + const { schema, table, name } = st.column; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP COLUMN "${name}";`; +}); + +const renameColumnConvertor = convertor('rename_column', (st) => { + const { table, schema } = st.from; + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} RENAME COLUMN "${st.from.name}" TO "${st.to.name}";`; +}); + +const recreateColumnConvertor = convertor('recreate_column', (st) => { + // AlterTableAlterColumnSetExpressionConvertor + // AlterTableAlterColumnAlterGeneratedConvertor + + const drop = dropColumnConvertor.convert({ column: st.diff.$right }) as string; + const add = addColumnConvertor.convert({ + column: st.diff.$right, + }) as string; + + return [drop, add]; +}); + +const alterColumnConvertor = convertor('alter_column', (st) => { + const { diff, to: column, isEnum, wasEnum } = st; + const statements = [] as string[]; + + const key = column.schema !== 'public' ? `"${column.schema}"."${column.table}"` : `"${column.table}"`; + + // TODO need to recheck this + const recreateDefault = diff.type && (isEnum || wasEnum) && (column.default || (diff.default && diff.default.from)); + if (recreateDefault) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + + if (diff.type) { + const typeSchema = column.typeSchema && column.typeSchema !== 'public' ? `"${column.typeSchema}".` : ''; + const textProxy = wasEnum && isEnum ? 'text::' : ''; // using enum1::text::enum2 + const suffix = isEnum + ? ` USING "${column.name}"::${textProxy}${typeSchema}"${column.type}"${'[]'.repeat(column.dimensions)}` + : ''; + + const type = diff.typeSchema?.to && diff.typeSchema.to !== 'public' + ? `"${diff.typeSchema.to}"."${diff.type.to}"` + : isEnum + ? `"${diff.type.to}"` + : diff.type.to; + + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE ${type}${ + '[]'.repeat(column.dimensions) + }${suffix};`, + ); + + if (recreateDefault) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(column)};`); + } + } + + if (diff.default && !recreateDefault) { + if (diff.default.to) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(diff.$right)};`); + } else { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + } + + if (diff.identity) { + if (diff.identity.from === null) { + const identity = column.identity!; + const typeClause = identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'; + const incrementClause = identity.increment ? `INCREMENT BY ${identity.increment}` : ''; + const minClause = identity.minValue ? ` MINVALUE ${identity.minValue}` : ''; + const maxClause = identity.maxValue ? ` MAXVALUE ${identity.maxValue}` : ''; + const startWith = identity.startWith ? ` START WITH ${identity.startWith}` : ''; + const cache = identity.cache ? ` CACHE ${identity.cache}` : ''; + const identityStatement = + `GENERATED ${typeClause} AS IDENTITY (${incrementClause}${minClause}${maxClause}${startWith}${cache})`; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" ADD ${identityStatement};`); + } else if (diff.identity.to === null) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP IDENTITY;`); + } else { + const { from, to } = diff.identity; + + // TODO: when to.prop === null? + if (from.type !== to.type) { + const typeClause = to.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET GENERATED ${typeClause};`); + } + if (from.minValue !== to.minValue) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET MINVALUE ${to.minValue};`); + } + + if (from.maxValue !== to.maxValue) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET MAXVALUE ${to.maxValue};`); + } + + if (from.increment !== to.increment) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET INCREMENT BY ${to.increment};`); + } + + if (from.startWith !== to.startWith) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET START WITH ${to.startWith};`); + } + + if (from.cache !== to.cache) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET CACHE ${to.cache};`); + } + } + } + + return statements; +}); + +const alterColumnAddNotNullConvertor = convertor('alter_add_column_not_null', (st) => { + const { table, schema, column } = st; + const statements = [] as string[]; + + const key = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column}" SET NOT NULL;`); + return statements; +}); +const alterColumnDropNotNullConvertor = convertor('alter_drop_column_not_null', (st) => { + const { table, schema, column } = st; + const statements = [] as string[]; + + const key = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column}" DROP NOT NULL;`); + return statements; +}); + +const createIndexConvertor = convertor('create_index', (st) => { + const { schema, table, name, columns, isUnique, method, where } = st.index; + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const value = columns + .map((it) => { + const expr = it.isExpression ? it.value : `"${it.value}"`; + + // ASC - default + const ord = it.asc ? '' : ' DESC'; + + return `${expr}${ord}`; + }) + .join(','); + + const key = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + const whereClause = where ? ` WHERE ${where}` : ''; + const using = method !== defaults.index.method ? method : null; + + let statement = `CREATE ${indexPart} "${name}" ON ${key}`; + if (using === 'hash') { + statement += ` (${value}) USING ${using}`; + } else { + statement += (using ? ` USING ${using}` : '') + ` (${value})`; + } + statement += `${whereClause};`; + + return statement; +}); + +const dropIndexConvertor = convertor('drop_index', (st) => { + const { index } = st; + + const cascade = index.isUnique ? ' CASCADE' : ''; + return `DROP INDEX "${st.index.name}"${cascade};`; +}); + +const recreateIndexConvertor = convertor('recreate_index', (st) => { + const { diff } = st; + const drop = dropIndexConvertor.convert({ index: diff.$right }) as string; + const create = createIndexConvertor.convert({ index: diff.$right, newTable: false }) as string; + return [drop, create]; +}); + +const renameIndexConvertor = convertor('rename_index', (st) => { + const key = st.schema !== 'public' ? `"${st.schema}"."${st.from}"` : `"${st.from}"`; + + return `ALTER INDEX ${key} RENAME TO "${st.to}";`; +}); + +const addPrimaryKeyConvertor = convertor('add_pk', (st) => { + const { pk } = st; + const key = pk.schema !== 'public' ? `"${pk.schema}"."${pk.table}"` : `"${pk.table}"`; + + if (!pk.nameExplicit) { + return `ALTER TABLE ${key} ADD PRIMARY KEY ("${pk.columns.join('","')}");`; + } + return `ALTER TABLE ${key} ADD CONSTRAINT "${pk.name}" PRIMARY KEY("${pk.columns.join('","')}");`; +}); + +const dropPrimaryKeyConvertor = convertor('drop_pk', (st) => { + const pk = st.pk; + const key = pk.schema !== 'public' ? `"${pk.schema}"."${pk.table}"` : `"${pk.table}"`; + + return `ALTER TABLE ${key} DROP CONSTRAINT "${pk.name}";`; +}); + +const alterPrimaryKeyConvertor = convertor('alter_pk', (it) => { + const key = it.pk.schema !== 'public' ? `"${it.pk.schema}"."${it.pk.table}"` : `"${it.pk.table}"`; + + return `ALTER TABLE ${key} DROP CONSTRAINT "${it.pk.name}", ADD CONSTRAINT "${it.pk.name}" PRIMARY KEY("${ + it.pk.columns.join('","') + }");`; +}); + +const recreatePrimaryKeyConvertor = convertor('recreate_pk', (it) => { + const { left, right } = it; + + const key = it.right.schema !== 'public' ? `"${right.schema}"."${right.table}"` : `"${right.table}"`; + + return `ALTER TABLE ${key} DROP CONSTRAINT "${left.name}", ADD CONSTRAINT "${right.name}" PRIMARY KEY("${ + right.columns.join('","') + }");`; +}); + +const renameConstraintConvertor = convertor('rename_constraint', (st) => { + const key = st.schema !== 'public' ? `"${st.schema}"."${st.table}"` : `"${st.table}"`; + + return `ALTER TABLE ${key} RENAME CONSTRAINT "${st.from}" TO "${st.to}";`; +}); + +const createForeignKeyConvertor = convertor('create_fk', (st) => { + const { schema, table, name, tableTo, columns, columnsTo, onDelete, onUpdate, schemaTo } = st.fk; + + const onDeleteStatement = onDelete && !isDefaultAction(onDelete) ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate && !isDefaultAction(onUpdate) ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columns.map((it) => `"${it}"`).join(','); + const toColumnsString = columnsTo.map((it) => `"${it}"`).join(','); + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + const tableToNameWithSchema = schemaTo !== 'public' ? `"${schemaTo}"."${tableTo}"` : `"${tableTo}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; +}); + +const recreateFKConvertor = convertor('recreate_fk', (st) => { + const { fk } = st; + + const key = fk.schema !== 'public' ? `"${fk.schema}"."${fk.table}"` : `"${fk.table}"`; + + const onDeleteStatement = fk.onDelete !== 'NO ACTION' ? ` ON DELETE ${fk.onDelete}` : ''; + const onUpdateStatement = fk.onUpdate !== 'NO ACTION' ? ` ON UPDATE ${fk.onUpdate}` : ''; + + const fromColumnsString = fk.columns.map((it) => `"${it}"`).join(','); + const toColumnsString = fk.columnsTo.map((it) => `"${it}"`).join(','); + + const tableToNameWithSchema = fk.schemaTo !== 'public' ? `"${fk.schemaTo}"."${fk.tableTo}"` : `"${fk.tableTo}"`; + + let sql = `ALTER TABLE ${key} DROP CONSTRAINT "${fk.name}", `; + sql += `ADD CONSTRAINT "${fk.name}" FOREIGN KEY (${fromColumnsString}) `; + sql += `REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; + + return sql; +}); + +const dropForeignKeyConvertor = convertor('drop_fk', (st) => { + const { schema, table, name } = st.fk; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${name}";`; +}); + +const addCheckConvertor = convertor('add_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'public' ? `"${check.schema}"."${check.table}"` : `"${check.table}"`; + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${check.name}" CHECK (${check.value});`; +}); + +const dropCheckConvertor = convertor('drop_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'public' ? `"${check.schema}"."${check.table}"` : `"${check.table}"`; + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${check.name}";`; +}); + +const recreateCheckConvertor = convertor('alter_check', (st) => { + const { check } = st; + + const key = check.schema !== 'public' ? `"${check.schema}"."${check.table}"` : `"${check.table}"`; + + let sql = [`ALTER TABLE ${key} DROP CONSTRAINT "${check.name}";`]; + sql.push(`ALTER TABLE ${key} ADD CONSTRAINT "${check.name}" CHECK (${check.value});`); + + return sql; +}); + +const createEnumConvertor = convertor('create_enum', (st) => { + const { name, schema, values } = st.enum; + const enumNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + let valuesStatement = '('; + valuesStatement += values.map((it) => `'${escapeSingleQuotes(it)}'`).join(', '); + valuesStatement += ')'; + + return `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`; +}); + +const dropEnumConvertor = convertor('drop_enum', (st) => { + const { name, schema } = st.enum; + const enumNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + return `DROP TYPE ${enumNameWithSchema};`; +}); + +const renameEnumConvertor = convertor('rename_enum', (st) => { + const from = st.schema !== 'public' ? `"${st.schema}"."${st.from}"` : `"${st.from}"`; + return `ALTER TYPE ${from} RENAME TO "${st.to}";`; +}); + +const moveEnumConvertor = convertor('move_enum', (st) => { + const { from, to } = st; + + const enumNameWithSchema = from.schema !== 'public' ? `"${from.schema}"."${from.name}"` : `"${from.name}"`; + return `ALTER TYPE ${enumNameWithSchema} SET SCHEMA "${to.schema || 'public'}";`; +}); + +const alterEnumConvertor = convertor('alter_enum', (st) => { + const { diff, to } = st; + const key = to.schema !== 'public' ? `"${to.schema}"."${to.name}"` : `"${to.name}"`; + + const statements = [] as string[]; + for (const d of diff.filter((it) => it.type === 'added')) { + if (d.beforeValue) { + statements.push(`ALTER TYPE ${key} ADD VALUE '${d.value}' BEFORE '${d.beforeValue}';`); + } else { + statements.push(`ALTER TYPE ${key} ADD VALUE '${d.value}';`); + } + } + return statements; +}); + +const recreateEnumConvertor = convertor('recreate_enum', (st) => { + const { to, columns } = st; + const statements: string[] = []; + for (const column of columns) { + const key = column.schema !== 'public' ? `"${column.schema}"."${column.table}"` : `"${column.table}"`; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE text;`); + if (column.default) statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + statements.push(dropEnumConvertor.convert({ enum: to }) as string); + statements.push(createEnumConvertor.convert({ enum: to }) as string); + + for (const column of columns) { + const key = column.schema !== 'public' ? `"${column.schema}"."${column.table}"` : `"${column.table}"`; + const arr = column.dimensions > 0 ? '[]' : ''; + const enumType = to.schema !== 'public' ? `"${to.schema}"."${to.name}"${arr}` : `"${to.name}"${arr}`; + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE ${enumType} USING "${column.name}"::${enumType};`, + ); + if (column.default) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(column)};`); + } + } + + return statements; +}); + +const createSequenceConvertor = convertor('create_sequence', (st) => { + const { name, schema, minValue, maxValue, incrementBy, startWith, cacheSize } = st.sequence; + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `CREATE SEQUENCE ${sequenceWithSchema}${incrementBy ? ` INCREMENT BY ${incrementBy}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cacheSize ? ` CACHE ${cacheSize}` : '' + };`; +}); + +const dropSequenceConvertor = convertor('drop_sequence', (st) => { + const { name, schema } = st.sequence; + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + return `DROP SEQUENCE ${sequenceWithSchema};`; +}); + +const renameSequenceConvertor = convertor('rename_sequence', (st) => { + const sequenceWithSchemaFrom = st.from.schema !== 'public' + ? `"${st.from.schema}"."${st.from.name}"` + : `"${st.from.name}"`; + return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${st.to.name}";`; +}); + +const moveSequenceConvertor = convertor('move_sequence', (st) => { + const { from, to } = st; + const sequenceWithSchema = from.schema !== 'public' ? `"${from.schema}"."${from.name}"` : `"${from.name}"`; + const seqSchemaTo = `"${to.schema}"`; + return `ALTER SEQUENCE ${sequenceWithSchema} SET SCHEMA ${seqSchemaTo};`; +}); + +const alterSequenceConvertor = convertor('alter_sequence', (st) => { + const { schema, name, incrementBy, minValue, maxValue, startWith, cacheSize } = st.sequence; + + const sequenceWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER SEQUENCE ${sequenceWithSchema}${incrementBy ? ` INCREMENT BY ${incrementBy}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cacheSize ? ` CACHE ${cacheSize}` : '' + };`; +}); + +const createRoleConvertor = convertor('create_role', (st) => { + const { name, createDb, createRole } = st.role; + const withClause = createDb || createRole + ? ` WITH${createDb ? ' CREATEDB' : ''}${createRole ? ' CREATEROLE' : ''}` + : ''; + + return `CREATE ROLE "${name}"${withClause};`; +}); + +const dropRoleConvertor = convertor('drop_role', (st) => { + return `DROP ROLE "${st.role.name}";`; +}); + +const alterRoleConvertor = convertor('alter_role', (st) => { + const { name, createDb, createRole } = st.role; + return `ALTER ROLE "${name}"${` WITH${createDb ? ' CREATEDB' : ' NOCREATEDB'}${ + createRole ? ' CREATEROLE' : ' NOCREATEROLE' + }`};`; +}); + +const createPolicyConvertor = convertor('create_policy', (st) => { + const { schema, table } = st.policy; + const policy = st.policy; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"`; + + const usingPart = policy.using ? ` USING (${policy.using})` : ''; + + const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; + + const policyToPart = policy.roles?.map((v) => (['current_user', 'session_user', 'public'].includes(v) ? v : `"${v}"`)) + .join(', '); + + return `CREATE POLICY "${policy.name}" ON ${tableNameWithSchema} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; +}); + +const dropPolicyConvertor = convertor('drop_policy', (st) => { + const policy = st.policy; + + const tableNameWithSchema = policy.schema !== 'public' ? `"${policy.schema}"."${policy.table}"` : `"${policy.table}"`; + + return `DROP POLICY "${policy.name}" ON ${tableNameWithSchema};`; +}); + +const renamePolicyConvertor = convertor('rename_policy', (st) => { + const { from, to } = st; + + const tableNameWithSchema = to.schema !== 'public' ? `"${to.schema}"."${to.table}"` : `"${to.table}"`; + + return `ALTER POLICY "${from.name}" ON ${tableNameWithSchema} RENAME TO "${to.name}";`; +}); + +const alterPolicyConvertor = convertor('alter_policy', (st) => { + const { policy } = st; + + const tableNameWithSchema = policy.schema !== 'public' ? `"${policy.schema}"."${policy.table}"` : `"${policy.table}"`; + + const usingPart = policy.using ? ` USING (${policy.using})` : ''; + + const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; + + const toClause = policy.roles?.map(( + v, + ) => (['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"`)).join(', '); + + return `ALTER POLICY "${policy.name}" ON ${tableNameWithSchema} TO ${toClause}${usingPart}${withCheckPart};`; +}); + +const recreatePolicy = convertor('recreate_policy', (st) => { + return [ + dropPolicyConvertor.convert({ policy: st.policy }) as string, + createPolicyConvertor.convert({ policy: st.policy }) as string, + ]; +}); + +const toggleRlsConvertor = convertor('alter_rls', (st) => { + const { schema, name, isRlsEnabled } = st; + + const tableNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER TABLE ${tableNameWithSchema} ${isRlsEnabled ? 'ENABLE' : 'DISABLE'} ROW LEVEL SECURITY;`; +}); + +const convertors = [ + createSchemaConvertor, + dropSchemaConvertor, + renameSchemaConvertor, + createViewConvertor, + dropViewConvertor, + renameViewConvertor, + moveViewConvertor, + recreateViewConvertor, + createTableConvertor, + dropTableConvertor, + renameTableConvertor, + moveTableConvertor, + addColumnConvertor, + dropColumnConvertor, + renameColumnConvertor, + recreateColumnConvertor, + alterColumnConvertor, + createIndexConvertor, + dropIndexConvertor, + recreateIndexConvertor, + renameIndexConvertor, + addPrimaryKeyConvertor, + dropPrimaryKeyConvertor, + recreatePrimaryKeyConvertor, + createForeignKeyConvertor, + recreateFKConvertor, + dropForeignKeyConvertor, + addCheckConvertor, + dropCheckConvertor, + recreateCheckConvertor, + renameConstraintConvertor, + createEnumConvertor, + dropEnumConvertor, + renameEnumConvertor, + moveEnumConvertor, + alterEnumConvertor, + recreateEnumConvertor, + createSequenceConvertor, + dropSequenceConvertor, + renameSequenceConvertor, + moveSequenceConvertor, + alterSequenceConvertor, + createRoleConvertor, + dropRoleConvertor, + alterRoleConvertor, + createPolicyConvertor, + dropPolicyConvertor, + renamePolicyConvertor, + alterPolicyConvertor, + recreatePolicy, + toggleRlsConvertor, + alterPrimaryKeyConvertor, + alterColumnAddNotNullConvertor, + alterColumnDropNotNullConvertor, +]; + +export function fromJson(statements: JsonStatement[]) { + const grouped = statements + .map((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + if (!convertor) throw new Error(`No convertor for: ${statement.type} statement`); + + const sqlStatements = convertor.convert(statement as any); + const statements = typeof sqlStatements === 'string' ? [sqlStatements] : sqlStatements; + return { jsonStatement: statement, sqlStatements: statements }; + }) + .filter((it) => it !== null); + + const result = { + sqlStatements: grouped.map((it) => it.sqlStatements).flat(), + groupedStatements: grouped, + }; + return result; +} diff --git a/drizzle-kit/src/dialects/cockroach/ddl.ts b/drizzle-kit/src/dialects/cockroach/ddl.ts new file mode 100644 index 0000000000..67fcda7041 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/ddl.ts @@ -0,0 +1,501 @@ +import type { SchemaForPull } from 'src/cli/commands/pull-common'; +import { create } from '../dialect'; +import { defaultNameForPK, defaultNameForUnique } from './grammar'; +import { defaults } from './grammar'; + +export const createDDL = () => { + return create({ + schemas: {}, + tables: { schema: 'required', isRlsEnabled: 'boolean' }, + enums: { + schema: 'required', + values: 'string[]', + }, + columns: { + schema: 'required', + table: 'required', + type: 'string', + typeSchema: 'string?', + notNull: 'boolean', + dimensions: 'number', + default: 'string?', + generated: { + type: ['stored', 'virtual'], + as: 'string', + }, + identity: { + type: ['always', 'byDefault'], + increment: 'string?', + minValue: 'string?', + maxValue: 'string?', + startWith: 'string?', + cache: 'number?', + }, + }, + indexes: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: [ + { + value: 'string', + isExpression: 'boolean', + asc: 'boolean', + }, + ], + isUnique: 'boolean', + where: 'string?', + method: 'string?', + }, + fks: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: 'string[]', + schemaTo: 'string', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + onDelete: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + }, + pks: { + schema: 'required', + table: 'required', + columns: 'string[]', + nameExplicit: 'boolean', + }, + checks: { + schema: 'required', + table: 'required', + value: 'string', + }, + sequences: { + schema: 'required', + incrementBy: 'string?', + minValue: 'string?', + maxValue: 'string?', + startWith: 'string?', + cacheSize: 'number?', + }, + roles: { + createDb: 'boolean?', + createRole: 'boolean?', + }, + policies: { + schema: 'required', + table: 'required', + as: ['PERMISSIVE', 'RESTRICTIVE'], + for: ['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE'], + roles: 'string[]', // TO { role_name | PUBLIC | CURRENT_ROLE | SESSION_USER } + using: 'string?', + withCheck: 'string?', + }, + views: { + schema: 'required', + definition: 'string?', + withNoData: 'boolean?', + materialized: 'boolean', + }, + }); +}; + +export type CockroachDDL = ReturnType; + +export type CockroachEntities = CockroachDDL['_']['types']; +export type CockroachEntity = CockroachEntities[keyof CockroachEntities]; + +export type DiffEntities = CockroachDDL['_']['diffs']['alter']; + +export type Schema = CockroachEntities['schemas']; +export type Enum = CockroachEntities['enums']; +export type Sequence = CockroachEntities['sequences']; +export type Column = CockroachEntities['columns']; +export type Identity = Column['identity']; +export type Role = CockroachEntities['roles']; +export type Index = CockroachEntities['indexes']; +export type ForeignKey = CockroachEntities['fks']; +export type PrimaryKey = CockroachEntities['pks']; +export type CheckConstraint = CockroachEntities['checks']; +export type Policy = CockroachEntities['policies']; +export type View = CockroachEntities['views']; +export type ViewColumn = { + schema: string; + view: string; + type: string; + typeSchema: string | null; + notNull: boolean; + dimensions: number; + name: string; +}; + +export type Table = { + schema: string; + name: string; + columns: Column[]; + indexes: Index[]; + pk: PrimaryKey | null; + fks: ForeignKey[]; + checks: CheckConstraint[]; + policies: Policy[]; + isRlsEnabled: boolean; +}; + +export type InterimColumn = Omit & { + pk: boolean; + pkName: string | null; +} & { + unique: boolean; + uniqueName: string | null; +}; + +export type InterimIndex = Index & { + forPK: boolean; +}; + +export interface InterimSchema { + schemas: Schema[]; + enums: Enum[]; + tables: CockroachEntities['tables'][]; + columns: InterimColumn[]; + indexes: InterimIndex[]; + pks: PrimaryKey[]; + fks: ForeignKey[]; + checks: CheckConstraint[]; + sequences: Sequence[]; + roles: Role[]; + policies: Policy[]; + views: View[]; + viewColumns: ViewColumn[]; +} + +export const tableFromDDL = ( + table: CockroachEntities['tables'], + ddl: CockroachDDL, +): Table => { + const filter = { schema: table.schema, table: table.name } as const; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + const policies = ddl.policies.list(filter); + return { + ...table, + columns, + pk, + fks, + checks, + indexes, + policies, + }; +}; + +interface SchemaDuplicate { + type: 'schema_name_duplicate'; + name: string; +} + +interface EnumDuplicate { + type: 'enum_name_duplicate'; + name: string; + schema: string; +} + +interface TableDuplicate { + type: 'table_name_duplicate'; + name: string; + schema: string; +} +interface ColumnDuplicate { + type: 'column_name_duplicate'; + schema: string; + table: string; + name: string; +} + +interface ConstraintDuplicate { + type: 'constraint_name_duplicate'; + schema: string; + table: string; + name: string; +} +interface SequenceDuplicate { + type: 'sequence_name_duplicate'; + schema: string; + name: string; +} + +interface ViewDuplicate { + type: 'view_name_duplicate'; + schema: string; + name: string; +} + +interface IndexWithoutName { + type: 'index_no_name'; + schema: string; + table: string; + sql: string; +} + +interface IndexDuplicate { + type: 'index_duplicate'; + schema: string; + table: string; + name: string; +} + +interface PgVectorIndexNoOp { + type: 'pgvector_index_noop'; + table: string; + column: string; + indexName: string; + method: string; +} + +interface PolicyDuplicate { + type: 'policy_duplicate'; + schema: string; + table: string; + policy: string; +} + +interface RoleDuplicate { + type: 'role_duplicate'; + name: string; +} + +export type SchemaError = + | SchemaDuplicate + | EnumDuplicate + | TableDuplicate + | ColumnDuplicate + | ViewDuplicate + | ConstraintDuplicate + | SequenceDuplicate + | IndexWithoutName + | IndexDuplicate + | PgVectorIndexNoOp + | RoleDuplicate + | PolicyDuplicate; + +interface PolicyNotLinked { + type: 'policy_not_linked'; + policy: string; +} +export type SchemaWarning = PolicyNotLinked; + +export const fromEntities = (entities: CockroachEntity[]) => { + const ddl = createDDL(); + for (const it of entities) { + ddl.entities.push(it); + } + + return ddl; +}; +export const interimToDDL = ( + schema: InterimSchema, +): { ddl: CockroachDDL; errors: SchemaError[] } => { + const ddl = createDDL(); + const errors: SchemaError[] = []; + + for (const it of schema.schemas) { + const res = ddl.schemas.push(it); + if (res.status === 'CONFLICT') { + errors.push({ type: 'schema_name_duplicate', name: it.name }); + } + } + + for (const it of schema.enums) { + const res = ddl.enums.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'enum_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const it of schema.tables) { + const res = ddl.tables.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'table_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const column of schema.columns) { + const { pk: _1, pkName: _2, unique: _3, uniqueName: _4, ...rest } = column; + const res = ddl.columns.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'column_name_duplicate', + schema: column.schema, + table: column.table, + name: column.name, + }); + } + } + + for (const it of schema.indexes) { + const { forPK: _, ...rest } = it; + const isConflictNamePerSchema = ddl.indexes.one({ schema: it.schema, name: it.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'index_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + + ddl.indexes.push(rest); + } + + for (const it of schema.fks) { + const isConflictNamePerSchema = ddl.fks.one({ schema: it.schema, name: it.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + ddl.fks.push(it); + } + + for (const it of schema.pks) { + const isConflictNamePerSchema = ddl.pks.one({ schema: it.schema, name: it.name }); + if (isConflictNamePerSchema) { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + ddl.pks.push(it); + } + + for (const column of schema.columns.filter((it) => it.pk)) { + const name = column.pkName !== null ? column.pkName : defaultNameForPK(column.table); + const exists = ddl.pks.one({ schema: column.schema, table: column.table, name: name }) !== null; + if (exists) continue; + + ddl.pks.push({ + schema: column.schema, + table: column.table, + name, + nameExplicit: column.pkName !== null, + columns: [column.name], + }); + } + + for (const column of schema.columns.filter((it) => it.unique)) { + const name = column.uniqueName !== null ? column.uniqueName : defaultNameForUnique(column.table, column.name); + const exists = ddl.indexes.one({ schema: column.schema, table: column.table, name: name }) !== null; + if (exists) continue; + + ddl.indexes.push({ + table: column.table, + name, + isUnique: true, + method: defaults.index.method, + nameExplicit: !!column.uniqueName, + where: null, + schema: column.schema, + columns: [{ asc: true, isExpression: false, value: column.name }], + }); + } + + for (const it of schema.checks) { + const isConflictNamePerSchema = ddl.checks.one({ schema: it.schema, name: it.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + ddl.checks.push(it); + } + + for (const it of schema.sequences) { + const isConflictNamePerSchema = ddl.sequences.one({ schema: it.schema, name: it.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'sequence_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + ddl.sequences.push(it); + } + + for (const it of schema.roles) { + const res = ddl.roles.push(it); + if (res.status === 'CONFLICT') { + errors.push({ type: 'role_duplicate', name: it.name }); + } + } + for (const it of schema.policies) { + const res = ddl.policies.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'policy_duplicate', + schema: it.schema, + table: it.table, + policy: it.name, + }); + } + } + for (const it of schema.views) { + const res = ddl.views.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'view_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const it of ddl.entities.list()) { + let err = false; + + if (!ddl.entities.validate(it)) { + console.log('invalid entity:', it); + err = true; + } + if (err) throw new Error(); + } + + return { ddl, errors }; +}; + +export function cockroachToRelationsPull(schema: CockroachDDL): SchemaForPull { + return Object.values(schema.tables.list()).map((table) => { + const rawTable = tableFromDDL(table, schema); + return { + schema: rawTable.schema, + foreignKeys: rawTable.fks, + uniques: Object.values(rawTable.indexes).map((idx) => ({ + columns: idx.columns.map((idxc) => { + if (!idxc.isExpression && idx.isUnique) { + return idxc.value; + } + }).filter((item) => item !== undefined), + })), + }; + }); +} diff --git a/drizzle-kit/src/dialects/cockroach/diff.ts b/drizzle-kit/src/dialects/cockroach/diff.ts new file mode 100644 index 0000000000..7ac8e61491 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/diff.ts @@ -0,0 +1,1130 @@ +import { prepareMigrationRenames } from '../../utils'; +import { mockResolver } from '../../utils/mocks'; +import { diffStringArrays } from '../../utils/sequence-matcher'; +import type { Resolver } from '../common'; +import { diff } from '../dialect'; +import { groupDiffs, preserveEntityNames } from '../utils'; +import { fromJson } from './convertor'; +import type { + CheckConstraint, + CockroachDDL, + CockroachEntities, + Column, + DiffEntities, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Schema, + Sequence, + View, +} from './ddl'; +import { createDDL, tableFromDDL } from './ddl'; +import { defaults, defaultsCommutative, typesCommutative } from './grammar'; +import type { + JsonAlterColumn, + JsonAlterColumnAddNotNull, + JsonAlterColumnDropNotNull, + JsonStatement, +} from './statements'; +import { prepareStatement } from './statements'; + +export const ddlDiffDry = async (ddlFrom: CockroachDDL, ddlTo: CockroachDDL, mode: 'default' | 'push') => { + const mocks = new Set(); + return ddlDiff( + ddlFrom, + ddlTo, + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mode, + ); +}; + +export const ddlDiff = async ( + ddl1: CockroachDDL, + ddl2: CockroachDDL, + schemasResolver: Resolver, + enumsResolver: Resolver, + sequencesResolver: Resolver, + policyResolver: Resolver, + tablesResolver: Resolver, + columnsResolver: Resolver, + viewsResolver: Resolver, + indexesResolver: Resolver, + checksResolver: Resolver, + pksResolver: Resolver, + fksResolver: Resolver, + mode: 'default' | 'push', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { jsonStatement: JsonStatement; sqlStatements: string[] }[]; + renames: string[]; +}> => { + const ddl1Copy = createDDL(); + for (const entity of ddl1.entities.list()) { + ddl1Copy.entities.push(entity); + } + + const schemasDiff = diff(ddl1, ddl2, 'schemas'); + const { + created: createdSchemas, + deleted: deletedSchemas, + renamedOrMoved: renamedSchemas, + } = await schemasResolver({ + created: schemasDiff.filter((it) => it.$diffType === 'create'), + deleted: schemasDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const rename of renamedSchemas) { + ddl1.entities.update({ + set: { + schema: rename.to.name, + }, + where: { + schema: rename.from.name, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.name, + }, + where: { + schemaTo: rename.from.name, + }, + }); + } + + const enumsDiff = diff(ddl1, ddl2, 'enums'); + const { + created: createdEnums, + deleted: deletedEnums, + renamedOrMoved: renamedOrMovedEnums, + } = await enumsResolver({ + created: enumsDiff.filter((it) => it.$diffType === 'create'), + deleted: enumsDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedEnums = renamedOrMovedEnums.filter((it) => it.from.name !== it.to.name); + const movedEnums = renamedOrMovedEnums.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedEnums) { + ddl1.enums.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + ddl1.columns.update({ + set: { + type: rename.to.name, + typeSchema: rename.to.schema, + }, + where: { + type: rename.from.name, + typeSchema: rename.from.schema, + }, + }); + } + for (const move of movedEnums) { + ddl1.enums.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + ddl1.columns.update({ + set: { + typeSchema: move.to.schema, + }, + where: { + type: move.from.name, + typeSchema: move.from.schema, + }, + }); + } + + const sequencesDiff = diff(ddl1, ddl2, 'sequences'); + const { + created: createdSequences, + deleted: deletedSequences, + renamedOrMoved: renamedOrMovedSequences, + } = await sequencesResolver({ + created: sequencesDiff.filter((it) => it.$diffType === 'create'), + deleted: sequencesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedSequences = renamedOrMovedSequences.filter((it) => it.from.schema === it.to.schema); + const movedSequences = renamedOrMovedSequences.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedSequences) { + ddl1.sequences.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + for (const move of movedSequences) { + ddl1.sequences.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + } + + const rolesDiff = diff(ddl1, ddl2, 'roles'); + // Cockroach does not allow to rename roles + const createdRoles = rolesDiff.filter((it) => it.$diffType === 'create'); + const deletedRoles = rolesDiff.filter((it) => it.$diffType === 'drop'); + + const tablesDiff = diff(ddl1, ddl2, 'tables'); + + const { + created: createdTables, + deleted: deletedTables, + renamedOrMoved: renamedOrMovedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.filter((it) => it.$diffType === 'create'), + deleted: tablesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedTables = renamedOrMovedTables.filter((it) => it.from.name !== it.to.name); + const movedTables = renamedOrMovedTables.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedOrMovedTables) { + ddl1.tables.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.schema, + tableTo: rename.to.name, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.name, + }, + }); + + ddl1.fks.update({ + set: { + schema: rename.to.schema, + table: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.name, + }, + }); + + ddl1.entities.update({ + set: { + table: rename.to.name, + schema: rename.to.schema, + }, + where: { + table: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const columnsDiff = diff(ddl1, ddl2, 'columns'); + const columnRenames = [] as { from: Column; to: Column }[]; + const columnsToCreate = [] as Column[]; + const columnsToDelete = [] as Column[]; + + const groupedByTable = groupDiffs(columnsDiff); + + for (let it of groupedByTable) { + const { created, deleted, renamedOrMoved } = await columnsResolver({ + created: it.inserted, + deleted: it.deleted, + }); + + columnsToCreate.push(...created); + columnsToDelete.push(...deleted); + columnRenames.push(...renamedOrMoved); + } + + for (const rename of columnRenames) { + ddl1.columns.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl1.indexes.update({ + set: { + columns: (it) => { + if (!it.isExpression && it.value === rename.from.name) { + return { ...it, value: rename.to.name }; + } + return it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + + ddl1.pks.update({ + set: { + columns: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + + ddl1.fks.update({ + set: { + columns: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + + ddl1.fks.update({ + set: { + columnsTo: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.table, + }, + }); + + ddl1.checks.update({ + set: { + value: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + value: rename.from.name, + }, + }); + } + + preserveEntityNames(ddl1.fks, ddl2.fks, mode); + preserveEntityNames(ddl1.pks, ddl2.pks, mode); + preserveEntityNames(ddl1.indexes, ddl2.indexes, mode); + + const diffChecks = diff(ddl1, ddl2, 'checks'); + const groupedChecksDiff = groupDiffs(diffChecks); + const checkRenames = [] as { from: CheckConstraint; to: CheckConstraint }[]; + const checkCreates = [] as CheckConstraint[]; + const checkDeletes = [] as CheckConstraint[]; + + for (const entry of groupedChecksDiff) { + const { renamedOrMoved, created, deleted } = await checksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + checkCreates.push(...created); + checkDeletes.push(...deleted); + checkRenames.push(...renamedOrMoved); + } + + for (const rename of checkRenames) { + ddl1.checks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffIndexes = diff(ddl1, ddl2, 'indexes'); + const groupedIndexesDiff = groupDiffs(diffIndexes); + const indexesRenames = [] as { from: Index; to: Index }[]; + const indexesCreates = [] as Index[]; + const indexesDeletes = [] as Index[]; + + for (const entry of groupedIndexesDiff) { + const { renamedOrMoved, created, deleted } = await indexesResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + indexesCreates.push(...created); + indexesDeletes.push(...deleted); + indexesRenames.push(...renamedOrMoved); + } + + for (const rename of indexesRenames) { + ddl1.indexes.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffPKs = diff(ddl1, ddl2, 'pks'); + const groupedPKsDiff = groupDiffs(diffPKs); + const pksRenames = [] as { from: PrimaryKey; to: PrimaryKey }[]; + const pksCreates = [] as PrimaryKey[]; + const pksDeletes = [] as PrimaryKey[]; + + for (const entry of groupedPKsDiff) { + const { renamedOrMoved, created, deleted } = await pksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + pksCreates.push(...created); + pksDeletes.push(...deleted); + pksRenames.push(...renamedOrMoved); + } + + for (const rename of pksRenames) { + ddl1.pks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffFKs = diff(ddl1, ddl2, 'fks'); + const groupedFKsDiff = groupDiffs(diffFKs); + const fksRenames = [] as { from: ForeignKey; to: ForeignKey }[]; + const fksCreates = [] as ForeignKey[]; + const fksDeletes = [] as ForeignKey[]; + + for (const entry of groupedFKsDiff) { + const { renamedOrMoved, created, deleted } = await fksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + fksCreates.push(...created); + fksDeletes.push(...deleted); + fksRenames.push(...renamedOrMoved); + } + + for (const rename of fksRenames) { + ddl1.fks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const policiesDiff = diff(ddl1, ddl2, 'policies'); + const policiesDiffGrouped = groupDiffs(policiesDiff); + + const policyRenames = [] as { from: Policy; to: Policy }[]; + const policyCreates = [] as Policy[]; + const policyDeletes = [] as Policy[]; + + for (const entry of policiesDiffGrouped) { + const { renamedOrMoved, created, deleted } = await policyResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + policyCreates.push(...created); + policyDeletes.push(...deleted); + policyRenames.push(...renamedOrMoved); + } + + for (const rename of policyRenames) { + ddl1.policies.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const viewsDiff = diff(ddl1, ddl2, 'views'); + + const { + created: createdViews, + deleted: deletedViews, + renamedOrMoved: renamedOrMovedViews, + } = await viewsResolver({ + created: viewsDiff.filter((it) => it.$diffType === 'create'), + deleted: viewsDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedViews = renamedOrMovedViews.filter((it) => it.from.schema === it.to.schema); + const movedViews = renamedOrMovedViews.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedViews) { + ddl1.views.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + for (const move of movedViews) { + ddl1.views.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + } + + const alters = diff.alters(ddl1, ddl2); + + const jsonStatements: JsonStatement[] = []; + + /* + with new DDL when table gets created with constraints, etc. + or existing table with constraints and indexes gets deleted, + those entites are treated by diff as newly created or deleted + + we filter them out, because we either create them on table creation + or they get automatically deleted when table is deleted + */ + const tablesFilter = (type: 'deleted' | 'created') => { + return (it: { schema: string; table: string }) => { + if (type === 'created') { + return !createdTables.some((t) => t.schema === it.schema && t.name === it.table); + } else { + return !deletedTables.some((t) => t.schema === it.schema && t.name === it.table); + } + }; + }; + + const jsonCreateIndexes = indexesCreates + .map((index) => { + const tableCreated = !tablesFilter('created')({ + schema: index.schema, + table: index.table, + }); + return prepareStatement('create_index', { index, newTable: tableCreated }); + }).filter((st) => { + const { index, newTable } = st; + const forCreateTable = index.isUnique && (!index.method || index.method === defaults.index.method) + && !index.where; + return !(newTable && forCreateTable); + }); + const jsonDropIndexes = indexesDeletes.filter(tablesFilter('deleted')).map((index) => + prepareStatement('drop_index', { index }) + ); + + const jsonRenameIndexes = indexesRenames.map((r) => { + return prepareStatement('rename_index', { schema: r.to.schema, from: r.from.name, to: r.to.name }); + }); + + const indexesAlters = alters.filter((it): it is DiffEntities['indexes'] => { + if (it.entityType !== 'indexes') return false; + + return ddl2.indexes.hasDiff(it); + }); + + const jsonRecreateIndexes = indexesAlters.filter((idx) => { + const forWhere = !!idx.where && (idx.where.from !== null && idx.where.to !== null ? mode !== 'push' : true); + const forColumns = !!idx.columns && (idx.columns.from.length === idx.columns.to.length ? mode !== 'push' : true); + + return idx.isUnique || idx.method || forColumns || forWhere; + }).map((x) => { + return prepareStatement('recreate_index', { diff: x }); + }); + + const jsonDropTables = deletedTables.map((it) => { + const oldSchema = renamedSchemas.find((x) => x.to.name === it.schema); + const key = oldSchema ? `"${oldSchema.from.name}"."${it.name}"` : `"${it.schema}"."${it.name}"`; + return prepareStatement('drop_table', { table: tableFromDDL(it, ddl2), key }); + }); + const jsonRenameTables = renamedTables.map((it) => + prepareStatement('rename_table', { + schema: it.from.schema, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonRenameColumnsStatements = columnRenames.map((it) => prepareStatement('rename_column', it)); + const jsonDropColumnsStatemets = columnsToDelete.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_column', { column: it }) + ); + const jsonAddColumnsStatemets = columnsToCreate.filter(tablesFilter('created')).map((it) => + prepareStatement('add_column', { + column: it, + }) + ); + + // defaults + const columnAlters = alters + .filter((it) => it.entityType === 'columns') + .filter((it) => { + // decimal(19) === decimal(19,0) + if (it.type && it.type.from.replace(',0)', ')') === it.type.to) { + delete it.type; + } + + if ( + !it.type && it.default + && defaultsCommutative(it.default, it.$right.type, it.$right.dimensions, Boolean(it.$right.typeSchema)) + && mode === 'push' // TODO check on push only?? + ) { + delete it.default; + } + + return ddl2.columns.hasDiff(it); + }); + + const columnsToRecreate = columnAlters + .filter((it) => it.generated) + .filter((it) => { + // if push and definition changed + return !(it.generated?.to && it.generated.from && mode === 'push'); + }); + + const jsonRecreateColumns = columnsToRecreate.map((it) => + prepareStatement('recreate_column', { + diff: it, + }) + ); + + const jsonRenamePrimaryKey = pksRenames.map((it) => { + return prepareStatement('rename_constraint', { + schema: it.to.schema, + table: it.to.table, + from: it.from.name, + to: it.to.name, + }); + }); + + const jsonSetTableSchemas = movedTables.map((it) => + prepareStatement('move_table', { + name: it.to.name, // raname of table comes first + from: it.from.schema, + to: it.to.schema, + }) + ); + + const jsonCreatedCheckConstraints = checkCreates.filter(tablesFilter('created')).map((it) => + prepareStatement('add_check', { check: it }) + ); + const jsonDropCheckConstraints = checkDeletes.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_check', { check: it }) + ); + + const alteredChecks = alters.filter((it) => it.entityType === 'checks'); + + // group by tables? + const alteredPKs = alters + .filter((it) => it.entityType === 'pks') + .filter((it) => { + return !!it.columns; // ignore explicit name change + }); + const jsonAlteredPKs = alteredPKs.map((it) => prepareStatement('alter_pk', { diff: it, pk: it.$right })); + + const jsonRecreatePk = pksCreates.flatMap((created) => { + const matchingDeleted = pksDeletes.find((deleted) => + created.schema === deleted.schema && created.table === deleted.table + ); + + return matchingDeleted ? [prepareStatement('recreate_pk', { left: matchingDeleted, right: created })] : []; + }); + + const pksRecreatedFilter = () => { + return (it: { schema: string; table: string }) => { + return !jsonRecreatePk.some((t) => + (t.left.schema === it.schema && t.left.table === it.table) + || (t.right.schema === it.schema && t.right.table === it.table) + ); + }; + }; + const jsonAddPrimaryKeys = pksCreates + .filter(tablesFilter('created')) + .filter(pksRecreatedFilter()) + .map((it) => prepareStatement('add_pk', { pk: it })); + const jsonDropPrimaryKeys = pksDeletes + .filter(tablesFilter('deleted')) + .filter(pksRecreatedFilter()) + .map((it) => prepareStatement('drop_pk', { pk: it })); + + const jsonRecreateFKs = alters + .filter((it) => it.entityType === 'fks') + .filter((x) => { + if ( + x.nameExplicit + && ((mode === 'push' && x.nameExplicit.from && !x.nameExplicit.to) + || (x.nameExplicit.to && !x.nameExplicit.from)) + ) { + delete x.nameExplicit; + } + + return ddl2.fks.hasDiff(x); + }) + .map((it) => prepareStatement('recreate_fk', { fk: it.$right, diff: it })); + + const jsonCreateFKs = fksCreates.map((it) => prepareStatement('create_fk', { fk: it })); + + const jsonDropFks = fksDeletes.filter((fk) => { + const fromDeletedTable = deletedTables.some((x) => x.schema === fk.schema && x.name === fk.table); + const sameTable = fk.schema === fk.schemaTo && fk.table === fk.tableTo; + const toDeletedTable = !sameTable && deletedTables.some((x) => x.schema === fk.schemaTo && x.name === fk.tableTo); + + if (fromDeletedTable && !toDeletedTable) return false; + return true; + }) + .map((it) => prepareStatement('drop_fk', { fk: it })); + + const jsonRenameFks = fksRenames.map((it) => + prepareStatement('rename_constraint', { + schema: it.to.schema, + table: it.to.table, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonAlterCheckConstraints = alteredChecks.filter((it) => it.value && mode !== 'push').map((it) => + prepareStatement('alter_check', { check: it.$right, diff: it }) + ); + const jsonCreatePoliciesStatements = policyCreates.map((it) => prepareStatement('create_policy', { policy: it })); + const jsonDropPoliciesStatements = policyDeletes.map((it) => prepareStatement('drop_policy', { policy: it })); + const jsonRenamePoliciesStatements = policyRenames.map((it) => prepareStatement('rename_policy', it)); + + const alteredPolicies = alters.filter((it) => it.entityType === 'policies'); + + // using/withcheck in policy is a SQL expression which can be formatted by database in a different way, + // thus triggering recreations/alternations on push + const jsonAlterOrRecreatePoliciesStatements = alteredPolicies + .filter((it) => { + return it.as || it.for || it.roles || !((it.using || it.withCheck) && mode === 'push'); + }) + .map((it) => { + const to = ddl2.policies.one({ + schema: it.schema, + table: it.table, + name: it.name, + })!; + if (it.for || it.as) { + return prepareStatement('recreate_policy', { + policy: to, + diff: it, + }); + } else { + return prepareStatement('alter_policy', { + diff: it, + policy: to, + }); + } + }); + + // explicit rls alters + const rlsAlters = alters.filter((it) => it.entityType === 'tables').filter((it) => it.isRlsEnabled); + + const jsonAlterRlsStatements = rlsAlters.map((it) => + prepareStatement('alter_rls', { + schema: it.schema, + name: it.name, + isRlsEnabled: it.isRlsEnabled?.to || false, + }) + ); + + for (const it of policyDeletes) { + if (rlsAlters.some((alter) => alter.schema === it.schema && alter.name === it.table)) continue; // skip for explicit + + const had = ddl1.policies.list({ schema: it.schema, table: it.table }).length; + const has = ddl2.policies.list({ schema: it.schema, table: it.table }).length; + + const prevTable = ddl1.tables.one({ schema: it.schema, name: it.table }); + const table = ddl2.tables.one({ schema: it.schema, name: it.table }); + + // I don't want dedup here, not a valuable optimisation + if ( + table !== null // not external table + && had > 0 + && has === 0 + && prevTable + && prevTable.isRlsEnabled === false + && !jsonAlterRlsStatements.some((st) => st.schema === it.schema && st.name === it.table) + ) { + jsonAlterRlsStatements.push( + prepareStatement('alter_rls', { + schema: it.schema, + name: it.table, + isRlsEnabled: false, + }), + ); + } + } + + for (const it of policyCreates) { + if (rlsAlters.some((alter) => alter.schema === it.schema && alter.name === it.table)) continue; // skip for explicit + if (createdTables.some((t) => t.schema === it.schema && t.name === it.table)) continue; // skip for created tables + if (jsonAlterRlsStatements.some((st) => st.schema === it.schema && st.name === it.table)) continue; // skip for existing rls toggles + + const had = ddl1.policies.list({ schema: it.schema, table: it.table }).length; + const has = ddl2.policies.list({ schema: it.schema, table: it.table }).length; + + const table = ddl2.tables.one({ schema: it.schema, name: it.table }); + + if ( + table !== null // not external table + && had === 0 + && has > 0 + && !table.isRlsEnabled + ) { + jsonAlterRlsStatements.push( + prepareStatement('alter_rls', { + schema: it.schema, + name: it.table, + isRlsEnabled: true, + }), + ); + } + } + + // if I drop policy/ies, I should check if table only had this policy/ies and turn off + // for non explicit rls = + + const policiesAlters = alters.filter((it) => it.entityType === 'policies'); + // TODO: + const _jsonPloiciesAlterStatements = policiesAlters.map((it) => + prepareStatement('alter_policy', { diff: it, policy: it.$right }) + ); + + const jsonCreateEnums = createdEnums.map((it) => prepareStatement('create_enum', { enum: it })); + const jsonDropEnums = deletedEnums.map((it) => prepareStatement('drop_enum', { enum: it })); + const jsonMoveEnums = movedEnums.map((it) => prepareStatement('move_enum', it)); + const jsonRenameEnums = renamedEnums.map((it) => + prepareStatement('rename_enum', { + schema: it.to.schema, + from: it.from.name, + to: it.to.name, + }) + ); + const enumsAlters = alters.filter((it) => it.entityType === 'enums'); + + const recreateEnums = [] as Extract[]; + const jsonAlterEnums = [] as Extract[]; + + for (const alter of enumsAlters) { + const values = alter.values!; + const res = diffStringArrays(values.from, values.to); + const e = { ...alter, values: values.to }; + + if (res.some((it) => it.type === 'removed')) { + // recreate enum + const columns = ddl1.columns.list({ typeSchema: alter.schema, type: alter.name }).map((it) => { + const c2 = ddl2.columns.one({ schema: it.schema, table: it.table, name: it.name })!; + it.default = c2.default; + return it; + }); + recreateEnums.push(prepareStatement('recreate_enum', { to: e.$right, columns, from: e.$left })); + } else { + jsonAlterEnums.push(prepareStatement('alter_enum', { diff: res, to: e.$left, from: e.$right })); + } + } + + const jsonAlterAddNotNull: JsonAlterColumnAddNotNull[] = []; + const jsonAlterDropNotNull: JsonAlterColumnDropNotNull[] = []; + const jsonAlterColumns: JsonAlterColumn[] = []; + const filteredColumnAlters = columnAlters + .filter((it) => !it.generated) + .filter((it) => { + // if column is of type enum we're about to recreate - we will reset default anyway + if ( + it.default + && recreateEnums.some((x) => + x.columns.some((c) => it.schema === c.schema && it.table === c.table && it.name === c.name) + ) + ) { + delete it.default; + } + + if (it.type && typesCommutative(it.type.from, it.type.to)) { + delete it.type; + } + + if (it.notNull && it.notNull.to && (it.$right.generated || it.$right.identity)) { + delete it.notNull; + } + + if (it.notNull && (it.notNull.to && it.identity?.to)) { + delete it.notNull; + } + + return ddl2.columns.hasDiff(it); + }); + + // TODO: move to alter_column convertor + // cc: @AleksandrSherman + for (const it of filteredColumnAlters) { + if (it.notNull) { + if (it.notNull.from) { + jsonAlterDropNotNull.push( + prepareStatement('alter_drop_column_not_null', { + table: it.table, + schema: it.schema, + column: it.name, + }), + ); + } else { + jsonAlterAddNotNull.push(prepareStatement('alter_add_column_not_null', { + table: it.table, + schema: it.schema, + column: it.name, + })); + } + } + + const column = it.$right; + jsonAlterColumns.push(prepareStatement('alter_column', { + diff: it, + isEnum: ddl2.enums.one({ schema: column.typeSchema ?? 'public', name: column.type }) !== null, + wasEnum: (it.type && ddl1.enums.one({ schema: column.typeSchema ?? 'public', name: it.type.from }) !== null) + ?? false, + to: column, + })); + } + + const createSequences = createdSequences.map((it) => prepareStatement('create_sequence', { sequence: it })); + const dropSequences = deletedSequences.map((it) => prepareStatement('drop_sequence', { sequence: it })); + const moveSequences = movedSequences.map((it) => prepareStatement('move_sequence', it)); + const renameSequences = renamedSequences.map((it) => prepareStatement('rename_sequence', it)); + const sequencesAlter = alters.filter((it) => it.entityType === 'sequences'); + const jsonAlterSequences = sequencesAlter.map((it) => + prepareStatement('alter_sequence', { diff: it, sequence: it.$right }) + ); + + const jsonCreateRoles = createdRoles.map((it) => prepareStatement('create_role', { role: it })); + const jsonDropRoles = deletedRoles.map((it) => prepareStatement('drop_role', { role: it })); + const jsonAlterRoles = alters.filter((it) => it.entityType === 'roles').map((it) => + prepareStatement('alter_role', { diff: it, role: it.$right }) + ); + + const createSchemas = createdSchemas.map((it) => prepareStatement('create_schema', it)); + const dropSchemas = deletedSchemas.map((it) => prepareStatement('drop_schema', it)); + const renameSchemas = renamedSchemas.map((it) => prepareStatement('rename_schema', it)); + + const createTables = createdTables.map((it) => prepareStatement('create_table', { table: tableFromDDL(it, ddl2) })); + + const createViews = createdViews.map((it) => prepareStatement('create_view', { view: it })); + const jsonDropViews = deletedViews.map((it) => prepareStatement('drop_view', { view: it })); + + const jsonMoveViews = movedViews.map((it) => + prepareStatement('move_view', { fromSchema: it.from.schema, toSchema: it.to.schema, view: it.to }) + ); + + const filteredViewAlters = alters.filter((it): it is DiffEntities['views'] => { + if (it.entityType !== 'views') return false; + + if (mode === 'push' && it.definition) { + delete it.definition; + } + + if (mode === 'push' && it.withNoData) { + delete it.withNoData; + } + + return ddl2.views.hasDiff(it); + }); + + const viewsAlters = filteredViewAlters.map((it) => ({ diff: it, view: it.$right })); + + const jsonRecreateViews = viewsAlters.map((entry) => { + const it = entry.view; + const schemaRename = renamedSchemas.find((r) => r.to.name === it.schema); + const schema = schemaRename ? schemaRename.from.name : it.schema; + const viewRename = renamedViews.find((r) => r.to.schema === it.schema && r.to.name === it.name); + const name = viewRename ? viewRename.from.name : it.name; + const from = ddl1Copy.views.one({ schema, name }); + + if (!from) { + throw new Error(` + Missing view in original ddl: + ${it.schema}:${it.name} + ${schema}:${name} + `); + } + return prepareStatement('recreate_view', { from, to: it }); + }); + + const recreatedTargets = new Set(jsonRecreateViews.map((stmt) => `${stmt.to.schema}:${stmt.to.name}`)); + const jsonRenameViews = renamedViews.filter(({ to }) => !recreatedTargets.has(`${to.schema}:${to.name}`)).map(( + rename, + ) => prepareStatement('rename_view', rename)); + + jsonStatements.push(...createSchemas); + jsonStatements.push(...renameSchemas); + jsonStatements.push(...jsonCreateEnums); + jsonStatements.push(...jsonMoveEnums); + jsonStatements.push(...jsonRenameEnums); + jsonStatements.push(...jsonAlterEnums); + + jsonStatements.push(...createSequences); + jsonStatements.push(...moveSequences); + jsonStatements.push(...renameSequences); + jsonStatements.push(...jsonAlterSequences); + + jsonStatements.push(...jsonDropRoles); + jsonStatements.push(...jsonCreateRoles); + jsonStatements.push(...jsonAlterRoles); + + jsonStatements.push(...createTables); + + jsonStatements.push(...jsonAlterRlsStatements); + jsonStatements.push(...jsonDropViews); + jsonStatements.push(...jsonRenameViews); + jsonStatements.push(...jsonMoveViews); + jsonStatements.push(...jsonRecreateViews); + + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonDropFks); + + jsonStatements.push(...jsonDropPoliciesStatements); // before drop tables + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonSetTableSchemas); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDropCheckConstraints); + + // TODO: ? will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonRenameIndexes); + jsonStatements.push(...jsonDropIndexes); + jsonStatements.push(...jsonRecreateIndexes); + jsonStatements.push(...jsonDropPrimaryKeys); + + jsonStatements.push(...jsonRenamePrimaryKey); + jsonStatements.push(...jsonRenameFks); + jsonStatements.push(...jsonAddColumnsStatemets); + jsonStatements.push(...jsonRecreateColumns); + + jsonStatements.push(...recreateEnums); + + jsonStatements.push(...jsonAlterAddNotNull); + jsonStatements.push(...jsonAlterColumns); + jsonStatements.push(...jsonAddPrimaryKeys); + jsonStatements.push(...jsonAlteredPKs); + jsonStatements.push(...jsonRecreatePk); + jsonStatements.push(...jsonAlterDropNotNull); + + jsonStatements.push(...jsonCreateFKs); + jsonStatements.push(...jsonRecreateFKs); + jsonStatements.push(...jsonCreateIndexes); + + jsonStatements.push(...jsonDropColumnsStatemets); + + jsonStatements.push(...jsonCreatedCheckConstraints); + + jsonStatements.push(...jsonAlterCheckConstraints); + + jsonStatements.push(...createViews); + + jsonStatements.push(...jsonRenamePoliciesStatements); + jsonStatements.push(...jsonCreatePoliciesStatements); + jsonStatements.push(...jsonAlterOrRecreatePoliciesStatements); + + jsonStatements.push(...jsonDropEnums); + jsonStatements.push(...dropSequences); + jsonStatements.push(...dropSchemas); + + const { groupedStatements, sqlStatements } = fromJson(jsonStatements); + + const renames = prepareMigrationRenames([ + ...renameSchemas, + ...renamedEnums, + ...renamedOrMovedTables, + ...columnRenames, + ...checkRenames, + ...indexesRenames, + ...pksRenames, + ...fksRenames, + ...policyRenames, + ...renamedOrMovedViews, + ...renamedOrMovedSequences, + ]); + + return { + statements: jsonStatements, + sqlStatements, + groupedStatements: groupedStatements, + renames: renames, + }; +}; diff --git a/drizzle-kit/src/dialects/cockroach/drizzle.ts b/drizzle-kit/src/dialects/cockroach/drizzle.ts new file mode 100644 index 0000000000..2052555c82 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/drizzle.ts @@ -0,0 +1,731 @@ +import { getTableName, is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { + AnyCockroachColumn, + AnyCockroachTable, + CockroachEnum, + CockroachMaterializedView, + CockroachSequence, + UpdateDeleteAction, +} from 'drizzle-orm/cockroach-core'; +import { + CockroachArray, + CockroachDialect, + CockroachEnumColumn, + CockroachGeometry, + CockroachGeometryObject, + CockroachPolicy, + CockroachRole, + CockroachSchema, + CockroachTable, + CockroachView, + getMaterializedViewConfig, + getTableConfig, + getViewConfig, + IndexedColumn, + isCockroachEnum, + isCockroachMaterializedView, + isCockroachSequence, + isCockroachView, +} from 'drizzle-orm/cockroach-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { safeRegister } from 'src/utils/utils-node'; +import { assertUnreachable } from '../../utils'; +import { getColumnCasing } from '../drizzle'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + CockroachEntities, + Column, + Enum, + ForeignKey, + Index, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PrimaryKey, + Schema, + SchemaError, + SchemaWarning, +} from './ddl'; +import { + defaultNameForFK, + defaultNameForPK, + defaultNameForUnique, + defaults, + GeometryPoint, + indexName, + maxRangeForIdentityBasedOn, + minRangeForIdentityBasedOn, + splitSqlType, + stringFromIdentityProperty, + trimDefaultValueSuffix, + typeFor, +} from './grammar'; + +export const policyFrom = (policy: CockroachPolicy, dialect: CockroachDialect) => { + const mappedTo = !policy.to + ? ['public'] + : typeof policy.to === 'string' + ? [policy.to] + : is(policy.to, CockroachRole) + ? [(policy.to as CockroachRole).name] + : Array.isArray(policy.to) + ? policy.to.map((it) => { + if (typeof it === 'string') { + return it; + } else if (is(it, CockroachRole)) { + return it.name; + } + return '' as never; // unreachable unless error in types + }) + : ('' as never); // unreachable unless error in types + + const policyAs = (policy.as?.toUpperCase() as Policy['as']) ?? 'PERMISSIVE'; + const policyFor = (policy.for?.toUpperCase() as Policy['for']) ?? 'ALL'; + const policyTo = mappedTo.sort(); // TODO: ?? + const policyUsing = is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : null; + const withCheck = is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : null; + + return { + name: policy.name, + as: policyAs, + for: policyFor, + roles: policyTo, + using: policyUsing, + withCheck, + }; +}; + +export const unwrapColumn = (column: AnyCockroachColumn) => { + const { baseColumn, dimensions } = is(column, CockroachArray) + ? unwrapArray(column) + : { baseColumn: column, dimensions: 0 }; + + const isEnum = is(baseColumn, CockroachEnumColumn); + const typeSchema = isEnum ? baseColumn.enum.schema || 'public' : null; + + /* TODO: legacy, for not to patch orm and don't up snapshot */ + let sqlBaseType = baseColumn.getSQLType(); + sqlBaseType = sqlBaseType.startsWith('timestamp (') ? sqlBaseType.replace('timestamp (', 'timestamp(') : sqlBaseType; + + const { type, options } = splitSqlType(sqlBaseType); + const sqlType = dimensions > 0 ? `${sqlBaseType}${'[]'.repeat(dimensions)}` : sqlBaseType; + + return { + baseColumn, + dimensions, + isEnum, + typeSchema, + sqlType, + baseType: type, + options, + }; +}; + +export const unwrapArray = ( + column: CockroachArray, + dimensions: number = 1, +): { baseColumn: AnyCockroachColumn; dimensions: number } => { + const baseColumn = column.baseColumn; + if (is(baseColumn, CockroachArray)) return unwrapArray(baseColumn, dimensions + 1); + + return { baseColumn, dimensions }; +}; + +export const transformOnUpdateDelete = (on: UpdateDeleteAction): ForeignKey['onUpdate'] => { + if (on === 'no action') return 'NO ACTION'; + if (on === 'cascade') return 'CASCADE'; + if (on === 'restrict') return 'RESTRICT'; + if (on === 'set default') return 'SET DEFAULT'; + if (on === 'set null') return 'SET NULL'; + + assertUnreachable(on); +}; + +export const defaultFromColumn = ( + base: AnyCockroachColumn, + def: unknown, + dimensions: number, + dialect: CockroachDialect, +): Column['default'] => { + if (typeof def === 'undefined') return null; + + if (is(def, SQL)) { + let sql = dialect.sqlToQuery(def).sql; + sql = trimDefaultValueSuffix(sql); + + // TODO: check if needed + + // const isText = /^'(?:[^']|'')*'$/.test(sql); + // sql = isText ? trimChar(sql, "'") : sql; + + return sql; + } + const { baseColumn, isEnum } = unwrapColumn(base); + const grammarType = typeFor(base.getSQLType(), isEnum); + + if (is(baseColumn, CockroachGeometry) || is(baseColumn, CockroachGeometryObject)) { + return (dimensions > 0 && Array.isArray(def)) + ? def.flat(5).length === 0 + ? "'{}'" + : GeometryPoint.defaultArrayFromDrizzle(def, baseColumn.mode, baseColumn.srid) + : GeometryPoint.defaultFromDrizzle(def, baseColumn.mode, baseColumn.srid); + } + + if (grammarType) { + if (dimensions > 0 && Array.isArray(def)) { + if (def.flat(5).length === 0) return "'{}'"; + + return grammarType.defaultArrayFromDrizzle(def); + } + + return grammarType.defaultFromDrizzle(def); + } + + throw new Error(`Unhandled type: ${base.getSQLType()}`); +}; + +/* + We map drizzle entities into interim schema entities, + so that both Drizzle Kit and Drizzle Studio are able to share + common business logic of composing and diffing InternalSchema + + By having interim schemas based on arrays instead of records - we can postpone + collissions(duplicate indexes, columns, etc.) checking/or printing via extra `errors` field upwards, + while trimming serializer.ts of Hanji & Chalk dependencies +*/ +export const fromDrizzleSchema = ( + schema: { + schemas: CockroachSchema[]; + tables: AnyCockroachTable[]; + enums: CockroachEnum[]; + sequences: CockroachSequence[]; + roles: CockroachRole[]; + policies: CockroachPolicy[]; + views: CockroachView[]; + matViews: CockroachMaterializedView[]; + }, + casing: CasingType | undefined, + filter: EntityFilter, +): { + schema: InterimSchema; + errors: SchemaError[]; + warnings: SchemaWarning[]; +} => { + const dialect = new CockroachDialect({ casing }); + const errors: SchemaError[] = []; + const warnings: SchemaWarning[] = []; + + const res: InterimSchema = { + indexes: [], + pks: [], + fks: [], + checks: [], + columns: [], + policies: [], + enums: [], + roles: [], + schemas: [], + sequences: [], + tables: [], + viewColumns: [], + views: [], + }; + + res.schemas = schema.schemas + .filter((x) => { + return !x.isExisting && x.schemaName !== 'public' && filter({ type: 'schema', name: x.schemaName }); + }) + .map((it) => ({ + entityType: 'schemas', + name: it.schemaName, + })); + + const tableConfigPairs = schema.tables.map((it) => { + return { config: getTableConfig(it), table: it }; + }).filter((it) => { + return filter({ type: 'table', schema: it.config.schema ?? 'public', name: it.config.name }); + }); + + for (const policy of schema.policies) { + if (!('_linkedTable' in policy) || typeof policy._linkedTable === 'undefined') { + warnings.push({ type: 'policy_not_linked', policy: policy.name }); + continue; + } + + // @ts-expect-error + const { schema: configSchema, name: tableName } = getTableConfig(policy._linkedTable); + + const p = policyFrom(policy, dialect); + res.policies.push({ + entityType: 'policies', + schema: configSchema ?? 'public', + table: tableName, + name: p.name, + as: p.as, + for: p.for, + roles: p.roles, + using: p.using, + withCheck: p.withCheck, + }); + } + + res.tables = tableConfigPairs.map((it) => { + const config = it.config; + const schema = config.schema ?? 'public'; + const isRlsEnabled = config.enableRLS || config.policies.length > 0 + || res.policies.some((x) => x.schema === schema && x.table === config.name); + + return { + entityType: 'tables', + schema, + name: config.name, + isRlsEnabled, + } satisfies CockroachEntities['tables']; + }); + + for (const { config } of tableConfigPairs) { + const { + name: tableName, + columns: drizzleColumns, + indexes: drizzleIndexes, + foreignKeys: drizzleFKs, + checks: drizzleChecks, + schema: drizzleSchema, + primaryKeys: drizzlePKs, + uniqueConstraints: drizzleUniques, + policies: drizzlePolicies, + } = config; + + const schema = drizzleSchema || 'public'; + + res.pks.push( + ...drizzlePKs.map((pk) => { + const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); + + const name = pk.name || defaultNameForPK(tableName); + return { + entityType: 'pks', + schema: schema, + table: tableName, + name: name, + columns: columnNames, + nameExplicit: pk.isNameExplicit, + }; + }), + ); + + res.columns.push( + ...drizzleColumns.map((column) => { + const name = getColumnCasing(column, casing); + const notNull = column.notNull; + + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = Number(stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? 1); + + const generatedValue: Column['generated'] = generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : String(generated.as), + + type: 'stored', // TODO: why only stored? https://orm.drizzle.team/docs/generated-columns + } + : null; + + const identityValue = identity + ? { + type: identity.type, + increment, + startWith, + minValue, + maxValue, + cache, + } + : null; + + const { dimensions, sqlType, typeSchema, baseColumn } = unwrapColumn(column); + + const columnDefault = defaultFromColumn(baseColumn, column.default, dimensions, dialect); + + const isPk = column.primary + || config.primaryKeys.find((pk) => + pk.columns.some((col) => col.name ? col.name === column.name : col.keyAsName === column.keyAsName) + ) !== undefined; + + return { + entityType: 'columns', + schema: schema, + table: tableName, + name, + type: sqlType.replaceAll('[]', ''), + typeSchema: typeSchema ?? null, + dimensions: dimensions, + pk: column.primary, + pkName: null, + notNull: notNull || isPk, + default: columnDefault, + generated: generatedValue, + unique: column.isUnique, + uniqueName: column.uniqueName ?? null, + identity: identityValue, + } satisfies InterimColumn; + }), + ); + + res.fks.push( + ...drizzleFKs.map((fk) => { + const onDelete = fk.onDelete; + const onUpdate = fk.onUpdate; + const reference = fk.reference(); + + const tableTo = getTableName(reference.foreignTable); + + const schemaTo = getTableConfig(reference.foreignTable).schema || 'public'; + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + const name = fk.getName() || defaultNameForFK(tableName, columnsFrom, tableTo, columnsTo); + + return { + entityType: 'fks', + schema: schema, + table: tableName, + name, + nameExplicit: fk.isNameExplicit(), + tableTo, + schemaTo, + columns: columnsFrom, + columnsTo, + onDelete: onDelete ? transformOnUpdateDelete(onDelete) : null, + onUpdate: onUpdate ? transformOnUpdateDelete(onUpdate) : null, + } satisfies ForeignKey; + }), + ); + + for (const index of drizzleIndexes) { + const columns = index.config.columns; + for (const column of columns) { + if (is(column, IndexedColumn) && column.type !== 'CockroachVector') continue; + + if (is(column, SQL) && !index.isNameExplicit) { + errors.push({ + type: 'index_no_name', + schema: schema, + table: getTableName(index.config.table), + sql: dialect.sqlToQuery(column).sql, + }); + continue; + } + } + } + + for (const unique of drizzleUniques) { + const columns: InterimIndex['columns'] = unique.columns.map((c) => { + if (is(c, SQL)) { + const sql = dialect.sqlToQuery(c).sql; + return { value: sql, isExpression: true, asc: true }; + } + return { value: getColumnCasing(c, casing), isExpression: false, asc: true }; + }); + + const name = unique.name + ?? defaultNameForUnique(tableName, ...unique.columns.filter((c) => !is(c, SQL)).map((c) => c.name)); + + res.indexes.push({ + entityType: 'indexes', + columns: columns, + forPK: false, + isUnique: true, + method: defaults.index.method, + nameExplicit: unique.isNameExplicit, + name: name, + schema: schema, + table: tableName, + where: null, + }); + } + + res.indexes.push( + ...drizzleIndexes.map((value) => { + const columns = value.config.columns; + + let indexColumnNames = columns.map((it) => { + const name = getColumnCasing(it as IndexedColumn, casing); + return name; + }); + + const name = value.config.name + ?? (value.config.unique + ? defaultNameForUnique(tableName, ...indexColumnNames) + : indexName(tableName, indexColumnNames)); + const nameExplicit = value.isNameExplicit; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + return { + value: dialect.sqlToQuery(it, 'indexes').sql, + isExpression: true, + asc: true, + } satisfies Index['columns'][number]; + } else { + it = it as IndexedColumn; + + const asc = it.indexConfig?.order ? it.indexConfig.order === 'asc' : true; + return { + value: getColumnCasing(it as IndexedColumn, casing), + isExpression: false, + asc: asc, + } satisfies Index['columns'][number]; + } + }); + + let where = value.config.where ? dialect.sqlToQuery(value.config.where).sql : ''; + where = where === 'true' ? '' : where; + + return { + entityType: 'indexes', + schema, + table: tableName, + name, + nameExplicit, + columns: indexColumns, + isUnique: value.config.unique, + where: where ? where : null, + method: value.config.method ?? defaults.index.method, + forPK: false, + } satisfies InterimIndex; + }), + ); + + res.policies.push( + ...drizzlePolicies.map((policy) => { + const p = policyFrom(policy, dialect); + return { + entityType: 'policies', + schema: schema, + table: tableName, + name: p.name, + as: p.as, + for: p.for, + roles: p.roles, + using: p.using, + withCheck: p.withCheck, + }; + }), + ); + + res.checks.push( + ...drizzleChecks.map((check) => { + const checkName = check.name; + return { + entityType: 'checks', + schema, + table: tableName, + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }), + ); + } + + for (const sequence of schema.sequences) { + const name = sequence.seqName!; + const increment = stringFromIdentityProperty(sequence.seqOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(sequence.seqOptions?.minValue) + ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); + const maxValue = stringFromIdentityProperty(sequence.seqOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); + const startWith = stringFromIdentityProperty(sequence.seqOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = Number(stringFromIdentityProperty(sequence.seqOptions?.cache) ?? 1); + res.sequences.push({ + entityType: 'sequences', + schema: sequence.schema ?? 'public', + incrementBy: increment, + startWith, + name, + minValue, + maxValue, + cacheSize: cache, + }); + } + + for (const _role of schema.roles) { + const role = _role as any; + if (role._existing) continue; + + res.roles.push({ + entityType: 'roles', + name: role.name, + createDb: role.createDb ?? false, + createRole: role.createRole ?? false, + }); + } + + const combinedViews = [...schema.views, ...schema.matViews].map((it) => { + if (is(it, CockroachView)) { + return { + ...getViewConfig(it), + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: undefined, + }; + } else { + return { ...getMaterializedViewConfig(it), materialized: true }; + } + }); + + for (const view of combinedViews) { + if (view.isExisting) continue; + if (!filter({ type: 'table', schema: view.schema ?? 'public', name: view.name })) continue; + + const { name: viewName, schema, query, withNoData, materialized } = view; + + const viewSchema = schema ?? 'public'; + + res.views.push({ + entityType: 'views', + definition: dialect.sqlToQuery(query!).sql, + name: viewName, + schema: viewSchema, + withNoData: withNoData ?? null, + materialized, + }); + } + + res.enums = schema.enums.map((e) => { + return { + entityType: 'enums', + name: e.enumName, + schema: e.schema || 'public', + values: e.enumValues, + }; + }); + + return { + schema: res, + errors, + warnings, + }; +}; + +export const fromExports = (exports: Record) => { + const tables: AnyCockroachTable[] = []; + const enums: CockroachEnum[] = []; + const schemas: CockroachSchema[] = []; + const sequences: CockroachSequence[] = []; + const roles: CockroachRole[] = []; + const policies: CockroachPolicy[] = []; + const views: CockroachView[] = []; + const matViews: CockroachMaterializedView[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (isCockroachEnum(t)) { + enums.push(t); + return; + } + if (is(t, CockroachTable)) { + tables.push(t); + } + + if (is(t, CockroachSchema)) { + schemas.push(t); + } + + if (isCockroachView(t)) { + views.push(t); + } + + if (isCockroachMaterializedView(t)) { + matViews.push(t); + } + + if (isCockroachSequence(t)) { + sequences.push(t); + } + + if (is(t, CockroachRole)) { + roles.push(t); + } + + if (is(t, CockroachPolicy)) { + policies.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { + tables, + enums, + schemas, + sequences, + views, + matViews, + roles, + policies, + relations, + }; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnyCockroachTable[] = []; + const enums: CockroachEnum[] = []; + const schemas: CockroachSchema[] = []; + const sequences: CockroachSequence[] = []; + const views: CockroachView[] = []; + const roles: CockroachRole[] = []; + const policies: CockroachPolicy[] = []; + const matViews: CockroachMaterializedView[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = fromExports(i0); + + tables.push(...prepared.tables); + enums.push(...prepared.enums); + schemas.push(...prepared.schemas); + sequences.push(...prepared.sequences); + views.push(...prepared.views); + matViews.push(...prepared.matViews); + roles.push(...prepared.roles); + policies.push(...prepared.policies); + relations.push(...prepared.relations); + } + }); + + return { + tables, + enums, + schemas, + sequences, + views, + matViews, + roles, + policies, + relations, + }; +}; diff --git a/drizzle-kit/src/dialects/cockroach/grammar.ts b/drizzle-kit/src/dialects/cockroach/grammar.ts new file mode 100644 index 0000000000..30b2f9feb7 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/grammar.ts @@ -0,0 +1,1821 @@ +import { Temporal } from '@js-temporal/polyfill'; +import type { possibleIntervals } from '../../utils'; +import { + dateExtractRegex, + hasTimeZoneSuffix, + parseEWKB, + parseIntervalFields, + stringifyArray, + stringifyTuplesArray, + timeTzRegex, + timezoneSuffixRegexp, + trimChar, + wrapWith, +} from '../../utils'; +import { parseArray } from '../../utils/parse-pgarray'; +import { parse, stringify } from '../../utils/when-json-met-bigint'; +import { hash } from '../common'; +import { numberForTs, parseParams } from '../utils'; +import type { CockroachEntities, Column, DiffEntities } from './ddl'; +import type { Import } from './typescript'; + +export const splitSqlType = (sqlType: string) => { + const toMatch = sqlType.replaceAll('[]', ''); + const match = toMatch.match(/^(\w+(?:\s+\w+)*)\(([^)]*)\)?$/i); + let type = match ? match[1] : toMatch; + let options = match ? match[2].replaceAll(', ', ',') : null; + + return { type, options }; +}; + +export const vectorOps = [ + 'vector_l2_ops', + 'vector_ip_ops', + 'vector_cosine_ops', + 'vector_l1_ops', + 'bit_hamming_ops', + 'bit_jaccard_ops', + 'halfvec_l2_ops', + 'sparsevec_l2_ops', +]; + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +export function stringFromIdentityProperty(field: string | number | undefined): string | undefined { + return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); +} + +export function maxRangeForIdentityBasedOn(columnType: string) { + return columnType === 'int4' ? '2147483647' : columnType === 'int8' ? '9223372036854775807' : '32767'; +} + +export function minRangeForIdentityBasedOn(columnType: string) { + return columnType === 'int4' ? '-2147483648' : columnType === 'int8' ? '-9223372036854775808' : '-32768'; +} + +export function stringFromDatabaseIdentityProperty(field: any): string | null { + return typeof field === 'string' + ? (field as string) + : typeof field === 'undefined' || field === null + ? null + : typeof field === 'bigint' + ? field.toString() + : String(field); +} + +export type OnAction = CockroachEntities['fks']['onUpdate']; +export const parseOnType = (type: string): OnAction => { + switch (type) { + case 'a': + return 'NO ACTION'; + case 'r': + return 'RESTRICT'; + case 'n': + return 'SET NULL'; + case 'c': + return 'CASCADE'; + case 'd': + return 'SET DEFAULT'; + default: + throw new Error(`Unknown foreign key type: ${type}`); + } +}; + +export const systemNamespaceNames = ['crdb_internal', 'information_schema', 'pg_catalog', 'pg_extension']; +export const isSystemNamespace = (name: string) => { + return systemNamespaceNames.indexOf(name) >= 0; +}; + +export const systemRoles = ['admin', 'root', 'node']; +export const isSystemRole = (name: string) => { + return systemRoles.indexOf(name) >= 0; +}; + +/* + CHECK (((email)::text <> 'test@gmail.com'::text)) + Where (email) is column in table +*/ +export const parseCheckDefinition = (value: string): string => { + return value.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); +}; + +export const parseViewDefinition = (value: string | null | undefined): string | null => { + if (!value) return null; + return value.replace(/\s+/g, ' ').replace(';', '').trim(); +}; + +export const defaultNameForIdentitySequence = (table: string, column: string) => { + return `${table}_${column}_seq`; +}; + +export const defaultNameForPK = (table: string) => { + return `${table}_pkey`; +}; + +export const defaultNameForFK = (table: string, columns: string[], tableTo: string, columnsTo: string[]) => { + const desired = `${table}_${columns.join('_')}_${tableTo}_${columnsTo.join('_')}_fkey`; + const res = desired.length > 63 + ? table.length < 63 - 18 // _{hash(12)}_fkey + ? `${table}_${hash(desired)}_fkey` + : `${hash(desired)}_fkey` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const defaultNameForUnique = (table: string, ...columns: string[]) => { + return `${table}_${columns.join('_')}_key`; +}; + +export const defaultNameForIndex = (table: string, columns: string[]) => { + return `${table}_${columns.join('_')}_idx`; +}; + +// ::text, ::varchar(256), ::text::varchar(256) +export function trimDefaultValueSuffix(defaultValue: string) { + let res = defaultValue.endsWith('[]') ? defaultValue.slice(0, -2) : defaultValue; + res = res.replace(/(::[a-zA-Z_][\w\s.]*?(?:\([^()]*\))?(?:\[\])?)+$/g, ''); + return res; +} + +export const defaultForColumn = ( + type: string, + def: string | boolean | number | null | undefined, + dimensions: number, + isEnum: boolean, +): Column['default'] => { + if (def === null || def === undefined) { + return null; + } + + // trim ::type and [] + let value = trimDefaultValueSuffix(String(def)); + + const grammarType = typeFor(type, isEnum); + if (grammarType) { + if (dimensions > 0) return grammarType.defaultArrayFromIntrospect(value); + return grammarType.defaultFromIntrospect(String(value)); + } + + throw Error(); +}; + +export const defaultToSQL = (it: Pick) => { + if (!it.default) return ''; + + const { type: columnType, dimensions, typeSchema } = it; + const value = it.default; + + if (typeSchema) { + const schemaPrefix = typeSchema && typeSchema !== 'public' ? `"${typeSchema}".` : ''; + return `${value}::${schemaPrefix}"${columnType}"${dimensions > 0 ? '[]' : ''}`; + } + + // const { type: rawType } = splitSqlType(columnType); + const suffix = dimensions > 0 ? `::${columnType}[]` : ''; + + const grammarType = typeFor(columnType, Boolean(typeSchema)); + + if (grammarType) { + const value = it.default ?? ''; + return `${value}${suffix}`; + } + + throw Error(); + + // assertUnreachable(defaultType); +}; + +const dateTimeRegex = + /^(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?|\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?)$/; +const dateRegex = + /^(\d{4}-\d{2}-\d{2}(?:[T ]\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?)?|\d{4}-\d{2}-\d{2})$/; +// TODO write descriptions for all functions +// why that was made, etc. + +export function formatTimestamp(date: string, modify: boolean, precision?: number) { + if (!dateTimeRegex.test(date)) return date; + + // Convert to Temporal.Instant + const instant = hasTimeZoneSuffix(date) ? Temporal.Instant.from(date) : Temporal.Instant.from(date + 'Z'); + + const iso = instant.toString(); + + const fractionalDigits = iso.replace('Z', '').split('.')[1]?.length ?? 0; + + if (!precision && fractionalDigits > 6) precision = 6; + + if (!precision) return iso; + + // decide whether to limit precision + const formattedPrecision = fractionalDigits > precision + // @ts-expect-error + ? instant.toString({ fractionalSecondDigits: precision }) + : iso; + + return modify ? formattedPrecision : iso; +} +export function formatTime(date: string, modify: boolean, precision: number = 0) { + const match = date.match(timeTzRegex); + if (!match) return date; + const time: string = match[0]; + + const timestampInstant = hasTimeZoneSuffix(time) + ? Temporal.Instant.from(`1970-01-01T${time}`) + : Temporal.Instant.from(`1970-01-01T${time}` + 'Z'); + const iso = timestampInstant.toString(); + + // 2024-05-23T14:20:33.123Z + const fractionalDigits = iso.replace('Z', '').split('.')[1]?.length ?? 0; + + if (!precision && fractionalDigits > 6) precision = 6; + + if (!precision) return iso; + // decide whether to limit precision + const formattedPrecision = fractionalDigits > precision + // @ts-expect-error + ? timestampInstant.toString({ fractionalSecondDigits: precision }) + : iso; + + return modify ? formattedPrecision : iso; +} +export function formatDate(date: string) { + if (!dateRegex.test(date)) return date; // invalid format + const match = date.match(dateExtractRegex); + if (!match) return date; + + const extractedDate: string = match[0]; + + return extractedDate; +} +// CockroachDb trims and pads defaults under the hood +export function formatDecimal(type: string, value: string) { + const { options } = splitSqlType(type); + const [integerPart, dp] = value.split('.'); + const decimalPart = dp ?? ''; + + let scale: number | undefined; + + // if precision exists and scale not -> scale = 0 + // if scale exists -> scale = scale + // if options does not exists (p,s are not present) -> scale is undefined + if (options) { + // if option exists we have 2 possible variants + // 1. p exists + // 2. p and s exists + const [_, s] = options.split(','); + + // if scale exists - use scale + // else use 0 (cause p exists) + scale = s !== undefined ? Number(s) : 0; + } + + if (typeof scale === 'undefined') return value; + if (scale === 0) return integerPart; + if (scale === decimalPart.length) return value; + + const fixedDecimal = scale > decimalPart.length ? decimalPart.padEnd(scale, '0') : decimalPart.slice(0, scale); + + return `${integerPart}.${fixedDecimal}`; +} +export function formatBit(type: string, value?: string | null, trimToOneLength: boolean = false) { + if (!value) return value; + + const { options } = splitSqlType(type); + + const length = !options ? (trimToOneLength ? 1 : Number(options)) : Number(options); + if (value.length > length) return value.substring(0, length); + return value.padEnd(length, '0'); +} +export function formatString(type: string, value: string, mode: 'default' | 'arr' = 'default') { + if (!value) return value; + + const { options } = splitSqlType(type); + + if (!options && mode === 'default') { + return value; + } + + const length = !options ? 1 : Number(options); + + if (value.length <= length) return value; + value = value.substring(0, length); + + return value; +} + +export const escapeForSqlDefault = (input: string, mode: 'default' | 'arr' | 'enum-arr' = 'default') => { + let value = input.replace(/\\/g, '\\\\'); + if (mode === 'arr') value = value.replace(/'/g, "''").replaceAll('"', '\\"'); + else if (mode === ('enum-arr')) value = value.replace(/'/g, "''").replaceAll('"', '\\"').replace(',', '\\,'); + else value = value.replace(/'/g, "\\'"); + + return value; +}; +// export const escapeJsonbForSqlDefault = (input: string) => { +// let value = input.replace(/\\/g, '\\\\'); +// if (mode === 'arr') value = value.replace(/'/g, "''").replaceAll('"', '\\"'); +// else value = value.replace(/'/g, "\\'"); + +// return value; +// }; + +export const unescapeFromSqlDefault = (input: string) => { + // starts with e' and ends with ' + input = /^e'.*'$/s.test(input) ? input.replace(/e'/g, "'") : input; + + input = trimChar(input, "'"); + + let res = input.replace(/\\"/g, '"').replace(/\\'/g, "'").replace(/\\\\/g, '\\'); + + // if (mode === 'arr') return res; + return res; +}; + +export const escapeForTsLiteral = (input: string) => { + return input.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); +}; + +export const isDefaultAction = (action: string) => { + return action.toLowerCase() === 'no action'; +}; + +export const defaults = { + identity: { + startWith: '1', + increment: '1', + min: '1', + maxFor: (type: string) => { + if (type === 'int2') return '32767'; + if (type === 'int4') return '2147483647'; + if (type === 'int8') return '9223372036854775807'; + throw new Error(`Unknow identity column type: ${type}`); + }, + cache: 1, + }, + index: { + method: 'btree', + }, +} as const; + +// from -> db +// to -> code +// TODO write description +export const defaultsCommutative = ( + diffDef: DiffEntities['columns']['default'], + type: string, + dimensions: number, + isEnum: boolean, +): boolean => { + if (!diffDef) return false; + + if (diffDef.from === diffDef.to) return true; + + let from = diffDef.from; + let to = diffDef.to; + + if (from === to) return true; + + const commutativeTypes = [ + ['current_timestamp', 'now', 'now()', 'current_timestamp()'], + ]; + for (const it of commutativeTypes) { + const leftIn = it.some((x) => x === from); + const rightIn = it.some((x) => x === to); + + if (leftIn && rightIn) return true; + } + + if (dimensions > 0 && from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + } + + if (isEnum && dimensions > 0 && from && to) { + try { + to = stringifyArray(parseArray(to), 'ts', (v) => `"${v}"`); + from = stringifyArray(parseArray(from), 'ts', (v) => { + v = unescapeFromSqlDefault(v); + + return `"${v}"`; + }); + + if (to === from) return true; + } catch {} + return false; + } + + if ((type.startsWith('bit') || type.startsWith('varbit')) && from && to) { + if ( + formatBit(type, diffDef.from, true) === formatBit(type, diffDef?.to, true) + ) { + return true; + } + + try { + const stringify = (v: any) => { + return `${formatBit(type, v, true)}`; + }; + const toArray = stringifyArray(parseArray(to), 'sql', stringify); + if (from === toArray) return true; + } catch {} + + return false; + } + + // only if array + if (type.startsWith('decimal') && dimensions > 0 && from && to) { + try { + const stringify = (v: any) => { + return `${formatDecimal(type, v)}`; + }; + const toArray = stringifyArray(parseArray(to), 'sql', stringify); + if (from === toArray) return true; + } catch {} + return false; + } + + if (type.startsWith('timestamp')) { + // "Z" can be inserted in mode:string + from = from?.replace('Z', '+00') ?? null; + to = to?.replace('Z', '+00') ?? null; + if (from === to) return true; + + const { options } = splitSqlType(type); + const precision = options ? Number(options) : undefined; // def precision + + if (from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + + if (dimensions > 0) { + try { + const stringify = (v: any, modify: boolean) => { + v = trimChar(v, '"'); + if (!type.includes('tz')) v = v.replace(timezoneSuffixRegexp, ''); + + const formatted = formatTimestamp(v, modify, precision); + return `"${type.includes('tz') ? formatted : formatted.replace(timezoneSuffixRegexp, '')}"`; + }; + const toArray = stringifyArray(parseArray(to), 'sql', (v) => stringify(v, true)); + + const fromArrayOriginal = stringifyArray(parseArray(from), 'sql', (v) => stringify(v, false)); + + if (fromArrayOriginal === toArray) return true; + } catch { + } + + return false; + } + + const trimTz = (value: string, type: string) => { + return type.includes('tz') ? value : value.replace(timezoneSuffixRegexp, ''); + }; + + from = trimTz(from, type); + to = trimTz(to, type); + const formattedTo = trimTz(formatTimestamp(to, true, precision), type); + const formattedFromOriginal = trimTz(formatTimestamp(from, false, precision), type); + if (formattedFromOriginal === formattedTo) return true; + } + + return false; + } + + if (type.startsWith('time')) { + from = from?.replace('Z', '+00') ?? null; + to = to?.replace('Z', '+00') ?? null; + + if (from === to) return true; + + const { options } = splitSqlType(type); + const precision = options ? Number(options) : undefined; // def precision + + if (from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + + if (dimensions > 0) { + try { + const stringify = (v: any, modify: boolean) => { + if (!type.includes('tz')) v = v.replace(timezoneSuffixRegexp, ''); + + const formatted = formatTime(v, modify, precision); + return `"${type.includes('tz') ? formatted : formatted.replace(timezoneSuffixRegexp, '')}"`; + }; + const toArray = stringifyArray(parseArray(to), 'sql', (v) => stringify(v, true)); + + const fromArrayOriginal = stringifyArray(parseArray(from), 'sql', (v) => stringify(v, false)); + + if (fromArrayOriginal === toArray) return true; + } catch {} + + return false; + } + + const trimTz = (value: string, type: string) => { + return type.includes('tz') ? value : value.replace(timezoneSuffixRegexp, ''); + }; + + from = trimTz(from, type); + to = trimTz(to, type); + + const formattedTo = trimTz(formatTime(to, true, precision), type); + const formattedFromOriginal = trimTz(formatTime(from, false, precision), type); + if (formattedFromOriginal === formattedTo) return true; + } + + return false; + } + + if (type.startsWith('date')) { + if (from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + + if (dimensions > 0) { + try { + const toArray = stringifyArray(parseArray(to), 'sql', (v) => formatDate(v)); + if (from === toArray) return true; + } catch {} + + return false; + } + + if (from === formatDate(to)) return true; + } + + return false; + } + + if (type.startsWith('char') || type.startsWith('varchar') || type.startsWith('text') || type.startsWith('string')) { + if (from && to) { + if (dimensions > 0) { + try { + const toArray = stringifyArray(parseArray(to), 'sql', (v) => formatString(type, v, 'arr')); + + // parse to identical format + const fromArrayOriginal = stringifyArray(parseArray(from), 'sql', (v) => String(v)); + if (fromArrayOriginal === toArray) return true; + } catch {} + + return false; + } + } + return false; + } + + // const timeCommutatives = [['now', 'now()', 'current_timestamp', 'current_timestamp()']]; + // if (type.startsWith('timestamp')) { + // for (const it of timeCommutatives) { + // const leftIn = it.some((x) => x === diffDef.from); + // const rightIn = it.some((x) => x === diffDef.to); + + // if (leftIn && rightIn) return true; + // } + // } + + if (type.startsWith('vector')) { + if (from?.replaceAll('.0', '') === to) return true; + if (to?.replaceAll('.0', '') === from) return true; + } + + // real and float adds .0 to the end for the numbers + // 100 === 100.0 + const dataTypesWithExtraZero = ['real', 'float']; + if ( + dataTypesWithExtraZero.find((dataType) => type.startsWith(dataType)) + && (from?.replace('.0', '') === to || to === from?.replace('.0', '')) + ) { + return true; + } + + if (type === 'jsonb' && from && to) { + const left = stringify(parse(trimChar(from, "'"))); + const right = stringify(parse(trimChar(to, "'"))); + if (left === right) return true; + } + + return false; +}; + +const commutativeTypes = [['char(1)', 'char']]; +export const typesCommutative = (left: string, right: string) => { + for (const it of commutativeTypes) { + const leftIn = it.some((x) => x === left); + const rightIn = it.some((x) => x === right); + + if (leftIn && rightIn) return true; + } +}; + +export interface SqlType { + is(type: string): boolean; + drizzleImport(): Import; + defaultFromDrizzle(value: unknown, mode?: string, config?: unknown): Column['default']; + defaultArrayFromDrizzle(value: any[], mode?: string, config?: unknown): Column['default']; + defaultFromIntrospect(value: string): Column['default']; + defaultArrayFromIntrospect(value: string): Column['default']; // todo: remove? + toTs(type: string, value: string | null): { options?: Record; default: string; customType?: string }; + toArrayTs( + type: string, + value: string | null, + ): { options?: Record; default: string; customType?: string }; +} + +export const Int2: SqlType = { + is: (type: string) => /^\s*int2(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'int2', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return value; // 10, but '-10' + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `${v}`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Int4: SqlType = { + is: (type: string) => /^\s*int4(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'int4', + defaultFromDrizzle: Int2.defaultFromDrizzle, + defaultArrayFromDrizzle: Int2.defaultArrayFromDrizzle, + defaultFromIntrospect: Int2.defaultFromIntrospect, + defaultArrayFromIntrospect: Int2.defaultArrayFromIntrospect, + toTs: Int2.toTs, + toArrayTs: Int2.toArrayTs, +}; + +export const Int8: SqlType = { + is: (type: string) => /^\s*int8(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'int8', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return value; // 10, but '-10' + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (_, value) => { + if (!value) return { options: { mode: 'number' }, default: '' }; + const { mode, value: def } = numberForTs(value); + return { options: { mode }, default: def }; + }, + toArrayTs: (_, value) => { + if (!value) return { options: { mode: 'number' }, default: '' }; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + return { + options: { mode: 'bigint' }, + default: stringifyArray(res, 'ts', (v) => `${v}n`), + }; + } catch { + return { options: { mode: 'bigint' }, default: `sql\`${value}\`` }; + } + }, +}; + +export const Bool: SqlType = { + is: (type: string) => /^\s*bool(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'bool', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return trimChar(value, "'"); + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return v === 'true' ? 'true' : 'false'; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Uuid: SqlType = { + is: (type: string) => /^\s*uuid(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'uuid', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + value = trimChar(value, "'"); + if (value === 'gen_random_uuid()') { + return { options, default: '.defaultRandom()' }; + } + return { options, default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Real: SqlType = { + is: (type: string) => /^\s*real(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'real', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + // 100 will be stored as 100.0 + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `${v}`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Float: SqlType = { + is: (type: string) => /^\s*float(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'float', + defaultFromDrizzle: Real.defaultFromDrizzle, + defaultArrayFromDrizzle: Real.defaultArrayFromDrizzle, + defaultFromIntrospect: Real.defaultFromIntrospect, + defaultArrayFromIntrospect: Real.defaultArrayFromIntrospect, + toTs: Real.toTs, + toArrayTs: Real.toArrayTs, +}; + +export const Decimal: SqlType = { + // decimal OR decimal(1)[] OR decimal(2,1)[] + is: (type: string) => /^\s*decimal(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'decimal', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const [precision, scale] = parseParams(type); + const options = {} as any; + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + + const { mode, value: def } = numberForTs(value); + + if (mode === 'number') return { options, default: `"${def}"` }; + + return { default: def, options: { mode, ...options } }; + }, + toArrayTs: (type, value) => { + const [precision, scale] = parseParams(type); + const options = {} as any; + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + /* + If we'd want it to be smart - we need to check if decimal array has + any bigints recuresively, it's waaaaay easier to just do sql`` + */ + // try { + // const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + // const res = parseArray(trimmed); + + // return { + // options: { mode: 'bigint', ...options }, + // default: stringifyArray(res, 'ts', (v) => { + + // return `${v}`; + // }), + // }; + // } catch { + return { options, default: `sql\`${value}\`` }; + // } + }, +}; + +export const Bit: SqlType = { + is: (type: string) => /^\s*bit(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'bit', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + // it is stored as B'' + return value.replace(/^B'/, "'"); + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const [length] = parseParams(type); + const options = length ? { length: Number(length) } : {}; + + if (!value) return { options, default: '' }; + + if (/^'[01]+'$/.test(value)) { + return { options, default: value }; + } + + return { options, default: `sql\`${value}\`` }; + }, + toArrayTs: (type, value) => { + const [length] = parseParams(type); + const options = length ? { length: Number(length) } : {}; + + if (!value) return { options, default: '' }; + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (!/^[01]+$/.test(v)) isDrizzleSql = true; + return `"${v}"`; + }); + + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const VarBit: SqlType = { + is: (type: string) => /^\s*varbit(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'varbit', + defaultFromDrizzle: Bit.defaultFromDrizzle, + defaultArrayFromDrizzle: Bit.defaultArrayFromDrizzle, + defaultFromIntrospect: Bit.defaultFromIntrospect, + defaultArrayFromIntrospect: Bit.defaultArrayFromIntrospect, + toTs: Bit.toTs, + toArrayTs: Bit.toArrayTs, +}; + +export const Timestamp: SqlType = { + is: (type) => /^\s*timestamp(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'timestamp', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `'${value.toISOString().replace('T', ' ').replace('Z', '')}'`; + } + + return `'${String(value)}'`; + }, + defaultArrayFromDrizzle(value) { + return `'${ + stringifyArray(value, 'sql', (v) => { + if (v instanceof Date) { + return `"${v.toISOString().replace('T', ' ').replace('Z', '')}"`; + } + + return `"${String(v)}"`; + }) + }'`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: { mode: string; precision?: number } = { mode: 'string' }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + if (value === 'now()' || value === 'current_timestamp()') { + return { default: '.defaultNow()', options }; + } + + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, + toArrayTs: (type, value) => { + const options: { mode: string; precision?: number } = { mode: 'string' }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => `"${v}"`), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const TimestampTZ: SqlType = { + is: (type) => /^\s*timestamptz(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'timestamp', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `'${value.toISOString().replace('T', ' ').replace('Z', '+00')}'`; + } + + return `'${String(value)}'`; + }, + defaultArrayFromDrizzle(value) { + return `'${ + stringifyArray(value, 'sql', (v) => { + if (v instanceof Date) { + return `"${v.toISOString().replace('T', ' ').replace('Z', '+00')}"`; + } + + return `"${String(v)}"`; + }) + }'`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: { mode: string; withTimezone: boolean; precision?: number } = { mode: 'string', withTimezone: true }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + if (value === 'now()' || value === 'current_timestamp()') { + return { default: '.defaultNow()', options }; + } + + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, + toArrayTs: (type, value) => { + const options: { mode: string; withTimezone: boolean; precision?: number } = { withTimezone: true, mode: 'string' }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => `"${v}"`), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Time: SqlType = { + is: (type) => /^\s*time(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'time', + defaultFromDrizzle: (value: unknown) => { + return `'${String(value)}'`; + }, + defaultArrayFromDrizzle(value) { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: { precision?: number } = {}; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + if (value === 'now()' || value === 'current_timestamp()') { + return { default: '.defaultNow()', options }; + } + + // check for valid date + try { + Temporal.PlainTime.from(value.substring(1, value.length - 1)); + return { default: value, options }; + } catch { + return { default: `sql\`${value}\``, options }; + } + }, + toArrayTs: (type, value) => { + const options: { precision?: number } = {}; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => `"${v}"`), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const TimeTz: SqlType = { + is: (type) => /^\s*timetz(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'time', + defaultFromDrizzle: Time.defaultFromDrizzle, + defaultArrayFromDrizzle: Time.defaultArrayFromDrizzle, + defaultFromIntrospect: Time.defaultFromIntrospect, + defaultArrayFromIntrospect: Time.defaultArrayFromIntrospect, + toTs: (type, value) => { + const options: { withTimezone: boolean; precision?: number } = { + withTimezone: true, + }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + if (value === 'now()' || value === 'current_timestamp()') { + return { default: '.defaultNow()', options }; + } + + // check for valid date + try { + Temporal.PlainTime.from(value.substring(1, value.length - 1)); + return { default: value, options }; + } catch { + return { default: `sql\`${value}\``, options }; + } + }, + toArrayTs: (type, value) => { + const options: { withTimezone: boolean; precision?: number } = { + withTimezone: true, + }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => `"${v}"`), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const DateType: SqlType = { + is: (type) => /^\s*date(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'date', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `'${value.toISOString().split('T')[0]}'`; + } + + return `'${String(value)}'`; + }, + defaultArrayFromDrizzle(value) { + return `'${ + stringifyArray(value, 'sql', (v) => { + if (v instanceof Date) { + return v.toISOString().split('T')[0]; + } + + return String(v); + }) + }'`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (_, value) => { + const options: { mode: string } = { mode: 'string' }; + + if (!value) return { default: '', options }; + + if (value === 'now()' || value === 'current_timestamp()') { + return { default: '.defaultNow()', options }; + } + + // check for valid date + try { + Temporal.PlainDate.from(value.substring(1, value.length - 1)); + return { default: value, options }; + } catch { + return { default: `sql\`${value}\``, options }; + } + }, + toArrayTs: (type, value) => { + const options: { mode: string; precision?: number } = { mode: 'string' }; + + const [precision] = parseParams(type); + if (precision) options.precision = Number(precision); + + if (!value) return { default: '', options }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => `"${v}"`), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Char: SqlType = { + is: (type: string) => /^\s*char|character(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'char', + defaultFromDrizzle: (value) => { + const escaped = escapeForSqlDefault(String(value)); + const result = String(value).includes('\\') || String(value).includes("'") + ? `e'${escaped}'` + : `'${escaped}'`; + + return result; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + const escaped = escapeForSqlDefault(v, 'arr'); + if (v.includes('\\') || v.includes('"') || v.includes(',')) { + return `"${escaped}"`; + } + + return escaped; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(value)); + return { options, default: `"${escaped}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(v)); + return `"${escaped}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const Varchar: SqlType = { + is: (type: string) => /^\s*varchar|character varying(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'varchar', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultArrayFromDrizzle: Char.defaultArrayFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + defaultArrayFromIntrospect: Char.defaultArrayFromIntrospect, + toTs: Char.toTs, + toArrayTs: Char.toArrayTs, +}; +// export const Text: SqlType = { +// is: (type: string) => /^\s*(?:text)(?:[\s(].*)*\s*$/i.test(type), +// drizzleImport: () => 'text', +// defaultFromDrizzle: Char.defaultFromDrizzle, +// defaultArrayFromDrizzle: Char.defaultArrayFromDrizzle, +// defaultFromIntrospect: Char.defaultFromIntrospect, +// defaultArrayFromIntrospect: Char.defaultArrayFromIntrospect, +// toTs: Char.toTs, +// toArrayTs: Char.toArrayTs, +// }; +export const StringType: SqlType = { + is: (type: string) => /^\s*string(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'string', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultArrayFromDrizzle: Char.defaultArrayFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + defaultArrayFromIntrospect: Char.defaultArrayFromIntrospect, + toTs: Char.toTs, + toArrayTs: Char.toArrayTs, +}; + +export const Jsonb: SqlType = { + is: (type: string) => /^\s*jsonb\s*$/i.test(type), + drizzleImport: () => 'jsonb', + defaultFromDrizzle: (value) => { + let shouldEscape = false; + const stringified = stringify( + value, + (_, value) => { + if (typeof value !== 'string') return value; + if (value.includes("'") || value.includes('"') || value.includes('\\')) shouldEscape = true; + return value; + }, + undefined, + undefined, + ); + return shouldEscape + ? `e'${stringified.replaceAll("'", "\\'").replaceAll('\\"', '\\\\"')}'` + : `'${stringified}'`; + }, + // not supported + defaultArrayFromDrizzle: () => { + return `'[]'`; + }, + /* + TODO: make less hacky, + from: { type: 'unknown', value: `'{"key": "value"}'` }, + to: { type: 'unknown', value: `'{"key":"value"}'` } + */ + defaultFromIntrospect: (value) => value.replaceAll(`": "`, `":"`), + // not supported + defaultArrayFromIntrospect: () => { + return `'[]'`; + }, + toTs: (_, value) => { + if (!value) return { default: '' }; + + const trimmed = trimChar(unescapeFromSqlDefault(value), "'"); + + try { + const parsed = parse(trimmed); + const stringified = stringify( + parsed, + (_, value) => { + return value; + }, + undefined, + true, + )!; + return { default: stringified }; + } catch { + /*(e: any)*/ + // console.log('error: ', e); + } + return { default: `sql\`${value}\`` }; + }, + // not supported + toArrayTs: () => { + return { + default: '', + options: {}, + }; + }, +}; + +// This is not handled the way cockroach stores it +// since user can pass `1 2:3:4` and it will be stored as `1 day 02:03:04` +// so we just compare row values +export const Interval: SqlType = { + is: (type: string) => + /^interval(\s+(year|month|day|hour|minute|second)(\s+to\s+(month|day|hour|minute|second))?)?(?:\((\d+)\))?(\[\])?$/i + .test(type), + drizzleImport: () => 'interval', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return `"${v}"`; + }); + + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: { + precision?: number; + fields?: (typeof possibleIntervals)[number]; + } = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + const fields = parseIntervalFields(type); + if (fields.fields) options['fields'] = fields.fields; + + if (!value) return { options, default: '' }; + + return { options, default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + const fields = parseIntervalFields(type); + if (fields.fields) options['fields'] = fields.fields; + + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Vector: SqlType = { + is: (type: string) => /^\s*vector(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'vector', + defaultFromDrizzle: (value) => { + return `'[${String(value).replaceAll(' ', '')}]'`; + }, + // not supported + defaultArrayFromDrizzle: () => { + return ''; + }, + defaultFromIntrospect: (value) => { + return value; + }, + // not supported + defaultArrayFromIntrospect: () => { + return ''; + }, + toTs: (type, value) => { + const options: any = {}; + const [dimensions] = parseParams(type); + if (dimensions) options['dimensions'] = Number(dimensions); + + if (!value) return { options, default: '' }; + + return { options, default: trimChar(value, "'") }; + }, + // not supported + toArrayTs: () => { + return { default: '', options: {} }; + }, +}; + +// Enums in cockroach are stored in strange way +// '{text\\text}' is parsed to '{"e''text\\\\text''"}' +// BUT if try to create table with default '{"e''text\\\\text''"}' query will fail +// so create in simplest way and check in diff +export const Enum: SqlType = { + is: (_type: string) => { + throw Error('Mocked'); + }, + drizzleImport: () => 'cockroachEnum', + defaultFromDrizzle: (value: string) => { + if (!value) return ''; + + if (value.includes("'") || value.includes('\\')) { + return `e'${escapeForSqlDefault(value, 'default')}'`; + } + return `'${value}'`; + }, + + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray( + value, + 'sql', + (v) => { + if (typeof v !== 'string') throw new Error(); + const escaped = escapeForSqlDefault(v, 'enum-arr'); + + if (v.includes("'") || v.includes(',') || v.includes('\\') || v.includes('"')) return `"${escaped}"`; + return escaped; + }, + ); + + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(value, "'"))); + return { options, default: `"${escaped}"` }; + }, + toArrayTs: (type, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(v)); + + return `"${escaped}"`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Custom: SqlType = { + is: (_type: string) => { + throw Error('Mocked'); + }, + drizzleImport: () => 'customType', + defaultFromDrizzle: (value) => { + if (!value) return ''; + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '', customType: type }; + const escaped = escapeForTsLiteral(value); + return { default: `"${escaped}"`, customType: type }; + }, + toArrayTs: (type, value) => { + if (!value) return { default: '', customType: type }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral(v); + return `"${escaped}"`; + }), + customType: type, + }; + } catch { + return { default: `sql\`${value}\``, customType: type }; + } + }, +}; + +export const GeometryPoint: SqlType = { + is: (type: string) => /^\s*geometry\(point(?:,\d+)?\)(?:\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'geometry', + defaultFromDrizzle: (value, mode, config) => { + if (!value) return ''; + + const srid: number | undefined = config ? Number(config) : undefined; + let sridPrefix = srid ? `SRID=${srid};` : ''; + if (mode === 'tuple') { + const v: number[] = value as number[]; + return v.length > 0 ? `'${sridPrefix}POINT(${v[0]} ${v[1]})'` : ''; + } + + if (mode === 'object') { + const v: { x: number; y: number } = value as { x: number; y: number }; + return Object.values(v).length > 0 + ? `'${sridPrefix}POINT(${v.x} ${v.y})'` + : ''; + } + + throw new Error('unknown geometry type'); + }, + defaultArrayFromDrizzle: function(value: any[], mode: string, config: unknown): Column['default'] { + let res: string; + const srid: number | undefined = config ? Number(config) : undefined; + let sridPrefix = srid ? `SRID=${srid};` : ''; + + if (mode === 'tuple') { + res = stringifyTuplesArray(value, 'sql', (x: number[]) => { + const res = `${sridPrefix}POINT(${x[0]} ${x[1]})`; + return res; + }); + } else if (mode === 'object') { + res = stringifyArray(value, 'sql', (x: { x: number; y: number }, _depth: number) => { + const res = `${sridPrefix}POINT(${x.x} ${x.y})`; + return res; + }); + } else throw new Error('unknown geometry type'); + + return `'${res}'`; + }, + defaultFromIntrospect: function(value: string): Column['default'] { + try { + const { point, srid } = parseEWKB(trimChar(value, "'")); + value = `'${(srid ? `SRID=${srid};` : ``) + `POINT(${point[0]} ${point[1]})`}'`; + } catch {} + + return value; + }, + defaultArrayFromIntrospect: function(value: string): Column['default'] { + try { + const parsedArray = parseArray(trimChar(value, "'")); + + value = stringifyArray(parsedArray, 'sql', (v) => { + const { srid, point } = parseEWKB(v); + return (srid ? `SRID=${srid};` : ``) + `POINT(${point[0]} ${point[1]})`; + }); + + value = wrapWith(value, "'"); + } catch {} + + return value; + }, + toTs: function(type: string, value: string | null): { options?: Record; default: string } { + const options: { srid?: number; type: 'point' } = { type: 'point' }; + + const sridOption = splitSqlType(type).options?.split(',')[1]; + if (sridOption) options.srid = Number(sridOption); + if (!value) return { default: '', options }; + + if (!value.includes('POINT(')) return { default: `sql\`${value}\``, options }; + + const sridInDef = value.startsWith("'SRID=") ? Number(value.split('SRID=')[1].split(';')[0]) : undefined; + if (!sridOption && sridInDef) { + return { default: `sql\`${value}\``, options }; + } + + const [res1, res2] = value.split('POINT(')[1].split(')')[0].split(' '); + + return { default: `[${res1},${res2}]`, options }; + }, + toArrayTs: function(type: string, value: string | null): { options?: Record; default: string } { + const options: { srid?: number; type: 'point' } = { type: 'point' }; + const sridOption = splitSqlType(type).options?.split(',')[1]; + if (sridOption) options.srid = Number(sridOption); + + if (!value) return { default: '', options }; + + let isDrizzleSql; + const srids: number[] = []; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (v.includes('SRID=')) { + srids.push(Number(v.split('SRID=')[1].split(';')[0])); + } + const [res1, res2] = value.split('POINT(')[1].split(')')[0].split(' '); + if (!value.includes('POINT(')) isDrizzleSql = true; + + return `[${res1}, ${res2}]`; + }); + + if (!isDrizzleSql) isDrizzleSql = srids.some((it) => it !== srids[0]); + // if there is no srid in type and user defines srids in default + // we need to return point with srids + if (!isDrizzleSql && !sridOption && srids.length > 0) isDrizzleSql = true; + + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Inet: SqlType = { + is: (type: string) => /^\s*inet(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'inet', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + value = trimChar(value, "'"); + return { options, default: `"${value}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const typeFor = (type: string, isEnum: boolean): SqlType => { + if (isEnum) return Enum; + if (Int2.is(type)) return Int2; + if (Int4.is(type)) return Int4; + if (Int8.is(type)) return Int8; + if (Bool.is(type)) return Bool; + if (Uuid.is(type)) return Uuid; + if (Real.is(type)) return Real; + if (Float.is(type)) return Float; + if (Decimal.is(type)) return Decimal; + if (Bit.is(type)) return Bit; + if (VarBit.is(type)) return VarBit; + if (Timestamp.is(type)) return Timestamp; + if (TimestampTZ.is(type)) return TimestampTZ; + if (Time.is(type)) return Time; + if (TimeTz.is(type)) return TimeTz; + if (DateType.is(type)) return DateType; + if (Char.is(type)) return Char; + if (Varchar.is(type)) return Varchar; + // if (Text.is(type)) return Text; + if (StringType.is(type)) return StringType; + if (Jsonb.is(type)) return Jsonb; + if (Interval.is(type)) return Interval; + if (Vector.is(type)) return Vector; + if (GeometryPoint.is(type)) return GeometryPoint; + if (Inet.is(type)) return Inet; + return Custom; +}; diff --git a/drizzle-kit/src/dialects/cockroach/introspect.ts b/drizzle-kit/src/dialects/cockroach/introspect.ts new file mode 100644 index 0000000000..55d0ae1e68 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/introspect.ts @@ -0,0 +1,1133 @@ +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import { type DB, splitExpressions, trimChar } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + CockroachEntities, + Enum, + ForeignKey, + Index, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PrimaryKey, + Role, + Schema, + Sequence, + View, + ViewColumn, +} from './ddl'; +import { + defaultForColumn, + isSystemNamespace, + parseOnType, + parseViewDefinition, + stringFromDatabaseIdentityProperty as parseIdentityProperty, +} from './grammar'; + +// TODO: tables/schema/entities -> filter: (entity: {type: ..., metadata....})=>boolean; +// TODO: since we by default only introspect public +export const fromDatabase = async ( + db: DB, + filter: EntityFilter, + progressCallback: (stage: IntrospectStage, count: number, status: IntrospectStatus) => void = () => {}, + queryCallback: (id: string, rows: Record[], error: Error | null) => void = () => {}, +): Promise => { + const schemas: Schema[] = []; + const enums: Enum[] = []; + const tables: CockroachEntities['tables'][] = []; + const columns: InterimColumn[] = []; + const indexes: InterimIndex[] = []; + const pks: PrimaryKey[] = []; + const fks: ForeignKey[] = []; + const checks: CheckConstraint[] = []; + const sequences: Sequence[] = []; + const roles: Role[] = []; + const policies: Policy[] = []; + const views: View[] = []; + const viewColumns: ViewColumn[] = []; + + type Namespace = { + oid: number; + name: string; + }; + + // TODO: potential improvements + // --- default access method + // SHOW default_table_access_method; + // SELECT current_setting('default_table_access_method') AS default_am; + + const accessMethodsQuery = db + .query<{ oid: number; name: string }>(`SELECT oid, amname as name FROM pg_am WHERE amtype = 't' ORDER BY amname;`) + .then((rows) => { + queryCallback('accessMethods', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('accessMethods', [], err); + throw err; + }); + + const tablespacesQuery = db + .query<{ + oid: number; + name: string; + }>('SELECT oid, spcname as "name" FROM pg_tablespace ORDER BY lower(spcname);') + .then((rows) => { + queryCallback('tablespaces', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('tablespaces', [], err); + throw err; + }); + + const namespacesQuery = db + .query('select oid, nspname as name from pg_namespace ORDER BY lower(nspname);') + .then((rows) => { + queryCallback('namespaces', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('namespaces', [], err); + throw err; + }); + + const [_ams, _tablespaces, namespaces] = await Promise.all([ + accessMethodsQuery, + tablespacesQuery, + namespacesQuery, + ]); + + const { system: _, other: filteredNamespaces } = namespaces.reduce<{ system: Namespace[]; other: Namespace[] }>( + (acc, it) => { + if (isSystemNamespace(it.name)) { + acc.system.push(it); + } else { + acc.other.push(it); + } + return acc; + }, + { system: [], other: [] }, + ); + + const filteredNamespacesStringForSQL = filteredNamespaces.map((ns) => `'${ns.name}'`).join(','); + + schemas.push(...filteredNamespaces.map((it) => ({ entityType: 'schemas', name: it.name }))); + + const tablesList = await db + .query<{ + oid: number; + schema: string; + name: string; + + /* r - table, v - view, m - materialized view */ + kind: 'r' | 'v' | 'm'; + accessMethod: number; + options: string[] | null; + rlsEnabled: boolean; + tablespaceid: number; + definition: string | null; + }>( + ` + SELECT + pg_class.oid, + nspname as "schema", + relname AS "name", + relkind AS "kind", + relam as "accessMethod", + reloptions::text[] as "options", + reltablespace as "tablespaceid", + relrowsecurity AS "rlsEnabled", + CASE + WHEN relkind OPERATOR(pg_catalog.=) 'v' OR relkind OPERATOR(pg_catalog.=) 'm' + THEN pg_catalog.pg_get_viewdef(pg_class.oid, true) + ELSE null + END as "definition" + FROM + pg_catalog.pg_class + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) relnamespace + WHERE + relkind IN ('r', 'p', 'v', 'm') + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname);`, + ) + .then((rows) => { + queryCallback('tables', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('tables', [], err); + throw err; + }); + + const viewsList = tablesList.filter((it) => (it.kind === 'v' || it.kind === 'm')) + .map((it) => { + return { + ...it, + schema: trimChar(it.schema, '"'), + }; + }); + const filteredTables = tablesList + .filter((it) => it.kind === 'r') + .map((it) => { + return { + ...it, + schema: trimChar(it.schema, '"'), // when camel case name e.x. mySchema -> it gets wrapped to "mySchema" + }; + }); + const filteredTableIds = filteredTables.map((it) => it.oid); + const viewsIds = viewsList.map((it) => it.oid); + const filteredViewsAndTableIds = [...filteredTableIds, ...viewsIds]; + + const filterByTableIds = filteredTableIds.length > 0 ? `(${filteredTableIds.join(',')})` : ''; + const filterByTableAndViewIds = filteredViewsAndTableIds.length > 0 ? `(${filteredViewsAndTableIds.join(',')})` : ''; + + for (const table of filteredTables) { + tables.push({ + entityType: 'tables', + schema: table.schema, + name: table.name, + isRlsEnabled: table.rlsEnabled, + }); + } + + const dependQuery = db + .query<{ + oid: number; + tableId: number; + ordinality: number; + + /* + a - An “auto” dependency means the dependent object can be dropped separately, + and will be automatically removed if the referenced object is dropped—regardless of CASCADE or RESTRICT. + Example: A named constraint on a table is auto-dependent on the table, so it vanishes when the table is dropped + + i - An “internal” dependency marks objects that were created as part of building another object. + Directly dropping the dependent is disallowed—you must drop the referenced object instead. + Dropping the referenced object always cascades to the dependent + Example: A trigger enforcing a foreign-key constraint is internally dependent on its pg_constraint entry + */ + deptype: 'a' | 'i'; + }>( + ` + SELECT + -- sequence id + objid as oid, + refobjid as "tableId", + refobjsubid as "ordinality", + + -- a = auto + deptype + FROM + pg_depend + where ${filterByTableIds ? ` refobjid in ${filterByTableIds}` : 'false'}; + `, + ) + .then((rows) => { + queryCallback('dependencies', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('dependencies', [], err); + throw err; + }); + + const enumsQuery = db + .query<{ + oid: number; + name: string; + schema: string; + arrayTypeId: number; + ordinality: number; + value: string; + }>( + `SELECT + pg_type.oid as "oid", + typname as "name", + nspname as "schema", + pg_type.typarray as "arrayTypeId", + pg_enum.enumsortorder AS "ordinality", + pg_enum.enumlabel AS "value" + FROM + pg_catalog.pg_type + JOIN pg_catalog.pg_enum ON pg_enum.enumtypid OPERATOR(pg_catalog.=) pg_type.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_type.typnamespace + WHERE + pg_type.typtype OPERATOR(pg_catalog.=) 'e' + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_type.oid, pg_enum.enumsortorder + `, + ) + .then((rows) => { + queryCallback('enums', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('enums', [], err); + throw err; + }); + + const sequencesQuery = db + .query<{ + schema: string; + oid: number; + name: string; + startWith: string; + minValue: string; + maxValue: string; + incrementBy: string; + cycle: boolean; + cacheSize: string; + }>( + `SELECT + nspname as "schema", + relname as "name", + seqrelid as "oid", + seqstart as "startWith", + seqmin as "minValue", + seqmax as "maxValue", + seqincrement as "incrementBy", + seqcycle as "cycle", + COALESCE(pgs.cache_size, pg_sequence.seqcache) as "cacheSize" + FROM pg_catalog.pg_sequence + JOIN pg_catalog.pg_class ON pg_sequence.seqrelid OPERATOR(pg_catalog.=) pg_class.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_class.relnamespace + LEFT JOIN pg_sequences pgs ON ( + pgs.sequencename = pg_class.relname + AND pgs.schemaname = pg_class.relnamespace::regnamespace::text + ) + WHERE nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); +;`, + ) + .then((rows) => { + queryCallback('sequences', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('sequences', [], err); + throw err; + }); + + // I'm not yet aware of how we handle policies down the pipeline for push, + // and since postgres does not have any default policies, we can safely fetch all of them for now + // and filter them out in runtime, simplifying filterings + const policiesQuery = db + .query<{ + schema: string; + table: string; + name: string; + as: Policy['as']; + to: string | string[]; // TODO: | string[] ?? + for: Policy['for']; + using: string | undefined | null; + withCheck: string | undefined | null; + }>( + `SELECT + schemaname as "schema", + tablename as "table", + policyname as "name", + UPPER(permissive) as "as", + roles as "to", + cmd as "for", + qual as "using", + with_check as "withCheck" + FROM pg_policies + ORDER BY lower(schemaname), lower(tablename), lower(policyname) + ;`, + ) + .then((rows) => { + queryCallback('policies', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('policies', [], err); + throw err; + }); + + const rolesQuery = db + .query<{ username: string; options: string; member_of: string[] }>( + `SHOW roles;`, + ) + .then((rows) => { + queryCallback('roles', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('roles', [], err); + throw err; + }); + + const constraintsQuery = db + .query<{ + oid: number; + schemaId: number; + tableId: number; + name: string; + type: 'p' | 'u' | 'f' | 'c'; // p - primary key, u - unique, f - foreign key, c - check + definition: string; + indexId: number; + columnsOrdinals: number[]; + tableToId: number; + columnsToOrdinals: number[]; + onUpdate: 'a' | 'd' | 'r' | 'c' | 'n'; + onDelete: 'a' | 'd' | 'r' | 'c' | 'n'; + }>( + ` + SELECT + oid, + connamespace AS "schemaId", + conrelid AS "tableId", + conname AS "name", + contype AS "type", + pg_get_constraintdef(oid) AS "definition", + conindid AS "indexId", + conkey AS "columnsOrdinals", + confrelid AS "tableToId", + confkey AS "columnsToOrdinals", + confupdtype AS "onUpdate", + confdeltype AS "onDelete" + FROM + pg_constraint + WHERE ${filterByTableIds ? ` conrelid in ${filterByTableIds}` : 'false'} + ORDER BY connamespace, conrelid, lower(conname) + `, + ) + .then((rows) => { + queryCallback('constraints', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('constraints', [], err); + throw err; + }); + + const defaultsQuery = db + .query<{ + tableId: number; + ordinality: number; + expression: string; + }>( + ` + SELECT + adrelid AS "tableId", + adnum AS "ordinality", + pg_get_expr(adbin, adrelid) AS "expression" + FROM + pg_attrdef + WHERE ${filterByTableAndViewIds ? `adrelid IN ${filterByTableAndViewIds}` : 'false'}; + `, + ) + .then((rows) => { + queryCallback('defaults', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('defaults', [], err); + throw err; + }); + + // for serials match with pg_attrdef via attrelid(tableid)+adnum(ordinal position), for enums with pg_enum above + const columnsQuery = db + .query<{ + tableId: number; + kind: 'r' | 'v' | 'm'; + name: string; + ordinality: number; + notNull: boolean; + type: string; + typeId: number; + /* s - stored */ + generatedType: 's' | ''; + /* + 'a' for GENERATED ALWAYS + 'd' for GENERATED BY DEFAULT + */ + identityType: 'a' | 'd' | ''; + metadata: { + seqId: string | null; + generation: string | null; + start: string | null; + increment: string | null; + max: string | null; + min: string | null; + cycle: string; + generated: 'ALWAYS' | 'BY DEFAULT'; + expression: string | null; + } | null; + isHidden: boolean; + dimensions: '0' | '1'; + }>( + `SELECT + attrelid AS "tableId", + relkind AS "kind", + attname AS "name", + attnum AS "ordinality", + attnotnull AS "notNull", + atttypid as "typeId", + attgenerated as "generatedType", + attidentity as "identityType", + format_type(atttypid, atttypmod) as "type", + CASE + WHEN typ.typcategory = 'A' THEN 1 + ELSE 0 + END AS "dimensions", + CASE + WHEN attidentity in ('a', 'd') or attgenerated = 's' THEN ( + SELECT + row_to_json(c.*) + FROM + ( + SELECT + pg_get_serial_sequence( + quote_ident("table_schema") || '.' || quote_ident("table_name"), + "attname" + )::regclass::oid as "seqId", + "identity_generation" AS generation, + "identity_start" AS "start", + "identity_increment" AS "increment", + "identity_maximum" AS "max", + "identity_minimum" AS "min", + "identity_cycle" AS "cycle", + "generation_expression" AS "expression" + FROM + information_schema.columns c + WHERE + c.column_name = attname + -- relnamespace is schemaId, regnamescape::text converts to schemaname + AND c.table_schema = cls.relnamespace::regnamespace::text + -- attrelid is tableId, regclass::text converts to table name + AND c.table_name = attrelid::regclass::text + ) c + ) + ELSE NULL + END AS "metadata", + tc.hidden AS "isHidden" + FROM + pg_attribute attr + LEFT JOIN pg_class cls ON cls.oid = attr.attrelid + LEFT JOIN crdb_internal.table_columns tc ON tc.descriptor_id = attrelid AND tc.column_id = attnum + LEFT JOIN pg_type typ ON typ.oid = attr.atttypid + WHERE + ${filterByTableAndViewIds ? ` attrelid in ${filterByTableAndViewIds}` : 'false'} + AND attnum > 0 + AND attisdropped = FALSE + ORDER BY attnum + ;`, + ) + .then((rows) => { + queryCallback('columns', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('columns', [], err); + throw err; + }); + + const extraColumnDataTypesQuery = db + .query<{ + table_schema: string; + table_name: string; + column_name: string; + data_type: string; + }>( + `SELECT + table_schema as table_schema, + table_name as table_name, + column_name as column_name, + lower(crdb_sql_type) as data_type + FROM information_schema.columns + WHERE ${tablesList.length ? `table_name in (${tablesList.map((it) => `'${it.name}'`).join(', ')})` : 'false'} + `, + ) + .then((rows) => { + queryCallback('extraColumnDataTypes', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('extraColumnDataTypes', [], err); + throw err; + }); + + const [ + dependList, + enumsList, + sequencesList, + policiesList, + rolesList, + constraintsList, + columnsList, + extraColumnDataTypesList, + defaultsList, + ] = await Promise.all([ + dependQuery, + enumsQuery, + sequencesQuery, + policiesQuery, + rolesQuery, + constraintsQuery, + columnsQuery, + extraColumnDataTypesQuery, + defaultsQuery, + ]); + + const groupedEnums = enumsList.reduce( + (acc, it) => { + if (!(it.oid in acc)) { + acc[it.oid] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.oid].values.push(it.value); + } + return acc; + }, + {} as Record, + ); + + const groupedArrEnums = enumsList.reduce( + (acc, it) => { + if (!(it.arrayTypeId in acc)) { + acc[it.arrayTypeId] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.arrayTypeId].values.push(it.value); + } + return acc; + }, + {} as Record, + ); + + for (const it of Object.values(groupedEnums)) { + enums.push({ + entityType: 'enums', + schema: it.schema, + name: it.name, + values: it.values, + }); + } + + let columnsCount = columnsList.filter((it) => !it.isHidden).length; + let indexesCount = 0; + let foreignKeysCount = constraintsList.filter((it) => it.type === 'f').length; + let tableCount = tablesList.filter((it) => it.kind === 'r').length; + let checksCount = constraintsList.filter((it) => it.type === 'c').length; + let viewsCount = tablesList.filter((it) => it.kind === 'm' || it.kind === 'v').length; + + for (const seq of sequencesList) { + const depend = dependList.find((it) => it.oid === seq.oid); + + if (depend && (depend.deptype === 'a' || depend.deptype === 'i')) { + // TODO: add type field to sequence in DDL + // skip fo sequences or identity columns + // console.log('skip for auto created', seq.name); + continue; + } + + sequences.push({ + entityType: 'sequences', + schema: seq.schema, + name: seq.name, + startWith: parseIdentityProperty(seq.startWith), + minValue: parseIdentityProperty(seq.minValue), + maxValue: parseIdentityProperty(seq.maxValue), + incrementBy: parseIdentityProperty(seq.incrementBy), + cacheSize: Number(parseIdentityProperty(seq.cacheSize) ?? 1), + }); + } + + progressCallback('enums', Object.keys(groupedEnums).length, 'done'); + + for (const dbRole of rolesList) { + const createDb = dbRole.options.includes('CREATEDB'); + const createRole = dbRole.options.includes('CREATEROLE'); + roles.push({ + entityType: 'roles', + name: dbRole.username, + createDb: createDb, + createRole: createRole, + }); + } + + for (const it of policiesList) { + policies.push({ + entityType: 'policies', + schema: it.schema, + table: it.table, + name: it.name, + as: it.as, + for: it.for, + roles: typeof it.to === 'string' ? it.to.slice(1, -1).split(',') : it.to, + using: it.using ?? null, + withCheck: it.withCheck ?? null, + }); + } + + progressCallback('policies', policiesList.length, 'done'); + + type DBColumn = (typeof columnsList)[number]; + + for (const column of columnsList.filter((x) => x.kind === 'r' && !x.isHidden)) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + const extraColumnConfig = extraColumnDataTypesList.find((it) => + it.column_name === column.name && it.table_name === table.name && it.table_schema === table.schema + )!; + + // supply enums + const enumType = column.typeId in groupedEnums + ? groupedEnums[column.typeId] + : column.typeId in groupedArrEnums + ? groupedArrEnums[column.typeId] + : null; + + let columnTypeMapped; + + columnTypeMapped = enumType + ? enumType.name + : extraColumnConfig.data_type.replace('character', 'char').replace('float8', 'float').replace( + 'float4', + 'real', + ).replaceAll('[]', ''); + const columnDimensions = Number(column.dimensions); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + const columnDefault = defaultsList.find((it) => + it.tableId === column.tableId && it.ordinality === column.ordinality + ); + + const defaultValue = defaultForColumn( + columnTypeMapped, + columnDefault?.expression, + columnDimensions, + Boolean(enumType), + ); + + const unique = constraintsList.find((it) => { + return it.type === 'u' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const pk = constraintsList.find((it) => { + return it.type === 'p' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const metadata = column.metadata; + if (column.generatedType === 's' && (!metadata || !metadata.expression)) { + throw new Error( + `Generated ${table.schema}.${table.name}.${column.name} columns missing expression: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + if (column.identityType !== '' && !metadata) { + throw new Error( + `Identity ${table.schema}.${table.name}.${column.name} columns missing metadata: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + const sequence = metadata?.seqId ? (sequencesList.find((it) => it.oid === Number(metadata.seqId)) ?? null) : null; + + columns.push({ + entityType: 'columns', + schema: table.schema, + table: table.name, + name: column.name, + type: columnTypeMapped, + typeSchema: enumType ? enumType.schema ?? 'public' : null, + dimensions: columnDimensions, + default: column.generatedType === 's' || column.identityType ? null : defaultValue, + unique: !!unique, + uniqueName: unique ? unique.name : null, + notNull: column.notNull, + pk: pk !== null, + pkName: pk !== null ? pk.name : null, + generated: column.generatedType === 's' ? { type: 'stored', as: metadata!.expression! } : null, + identity: column.identityType !== '' + ? { + type: column.identityType === 'a' ? 'always' : 'byDefault', + increment: parseIdentityProperty(metadata?.increment), + minValue: parseIdentityProperty(metadata?.min), + maxValue: parseIdentityProperty(metadata?.max), + startWith: parseIdentityProperty(metadata?.start), + cache: Number(sequence?.cacheSize ?? 1), + } + : null, + }); + } + + for (const pk of constraintsList.filter((it) => it.type === 'p')) { + const table = tablesList.find((it) => it.oid === pk.tableId)!; + const schema = namespaces.find((it) => it.oid === pk.schemaId)!; + + // Check if any column in the PK is hidden, skip if so + const hasHiddenColumn = pk.columnsOrdinals.some((ordinal) => { + const column = columnsList.find((column) => column.tableId === pk.tableId && column.ordinality === ordinal); + return !column || column.isHidden; // skip if not found or hidden + }); + + if (hasHiddenColumn) { + continue; + } + + const columns: typeof columnsList = []; + for (const ordinal of pk.columnsOrdinals) { + const column = columnsList.find((column) => column.tableId === pk.tableId && column.ordinality === ordinal); + + if (!column) { + continue; + } + + columns.push(column); + } + + if (columns.some((c) => c.isHidden)) continue; + + pks.push({ + entityType: 'pks', + schema: schema.name, + table: table.name, + name: pk.name, + columns: columns.map((c) => c.name), + nameExplicit: true, + }); + } + + for (const fk of constraintsList.filter((it) => it.type === 'f')) { + const table = tablesList.find((it) => it.oid === fk.tableId)!; + const schema = namespaces.find((it) => it.oid === fk.schemaId)!; + const tableTo = tablesList.find((it) => it.oid === fk.tableToId)!; + + const columns = fk.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableId && column.ordinality === it)!; + return column.name; + }); + + const columnsTo = fk.columnsToOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableToId && column.ordinality === it)!; + return column.name; + }); + + fks.push({ + entityType: 'fks', + schema: schema.name, + table: table.name, + name: fk.name, + nameExplicit: true, + columns, + tableTo: tableTo.name, + schemaTo: tableTo.schema, + columnsTo, + onUpdate: parseOnType(fk.onUpdate), + onDelete: parseOnType(fk.onDelete), + }); + } + + for (const check of constraintsList.filter((it) => it.type === 'c')) { + const table = tablesList.find((it) => it.oid === check.tableId)!; + const schema = namespaces.find((it) => it.oid === check.schemaId)!; + + // Check if any column in the PK is hidden, skip if so + const hasHiddenColumn = check.columnsOrdinals && check.columnsOrdinals.some((ordinal) => { + const column = columnsList.find((column) => column.tableId === check.tableId && column.ordinality === ordinal); + return !column || column.isHidden; // skip if not found or hidden + }); + + if (hasHiddenColumn) { + continue; + } + + checks.push({ + entityType: 'checks', + schema: schema.name, + table: table.name, + name: check.name, + value: check.definition, + }); + } + + const idxs = await db + .query<{ + oid: number; + schemaId: number; + name: string; + accessMethod: string; + with?: string[]; + metadata: { + tableId: number; + expression: string | null; + where: string; + columnOrdinals: number[]; + index_def: string; + opclassIds: number[]; + options: number[]; + isUnique: boolean; + isPrimary: boolean; + }; + }>( + ` + SELECT + pg_class.oid, + relnamespace AS "schemaId", + relname AS "name", + am.amname AS "accessMethod", + reloptions AS "with", + row_to_json(metadata.*) as "metadata" + FROM + pg_class + JOIN pg_am am ON am.oid = pg_class.relam + LEFT JOIN LATERAL ( + SELECT + pg_get_expr(indexprs, indrelid) AS "expression", + pg_get_expr(indpred, indrelid) AS "where", + indrelid::int AS "tableId", + pg_get_indexdef(indexrelid) AS index_def, + indkey::int[] as "columnOrdinals", + indclass::int[] as "opclassIds", + indoption::int[] as "options", + indisunique as "isUnique", + indisprimary as "isPrimary" + FROM + pg_index + WHERE + pg_index.indexrelid = pg_class.oid + ) metadata ON TRUE + WHERE + relkind = 'i' and ${filterByTableIds ? `metadata."tableId" in ${filterByTableIds}` : 'false'} + ORDER BY relnamespace, lower(relname) + `, + ) + .then((rows) => { + queryCallback('indexes', rows, null); + return rows; + }) + .catch((err) => { + queryCallback('indexes', [], err); + throw err; + }); + + for (const idx of idxs) { + const { metadata, accessMethod } = idx; + + // filter for drizzle only? + const forPK = metadata.isPrimary && constraintsList.some((x) => x.type === 'p' && x.indexId === idx.oid); + if (!forPK) indexesCount += 1; + + const expr = splitExpressions(metadata.expression); + + const schema = namespaces.find((it) => it.oid === idx.schemaId)!; + const table = tablesList.find((it) => it.oid === idx.metadata.tableId)!; + + const nonColumnsCount = metadata.columnOrdinals.reduce((acc, it) => { + if (it === 0) acc += 1; + return acc; + }, 0); + + if (expr.length !== nonColumnsCount) { + throw new Error( + `expression split doesn't match non-columns count: [${ + metadata.columnOrdinals.join(', ') + }] '${metadata.expression}':${expr.length}:${nonColumnsCount}`, + ); + } + + const opts = metadata.options.map((it) => { + return { + descending: (it & 1) === 1, + }; + }); + + const res = [] as (({ type: 'expression'; value: string } | { type: 'column'; value: DBColumn }) & { + options: (typeof opts)[number]; + })[]; + + let k = 0; + for (let i = 0; i < metadata.columnOrdinals.length; i++) { + const ordinal = metadata.columnOrdinals[i]; + if (ordinal === 0) { + res.push({ + type: 'expression', + value: expr[k], + options: opts[i], + }); + k += 1; + } else { + const column = columnsList.find((column) => { + return column.tableId === metadata.tableId && column.ordinality === ordinal; + }); + + if (column?.isHidden) continue; + if (!column) throw new Error(`missing column: ${metadata.tableId}:${ordinal}`); + + res.push({ + type: 'column', + value: column, + options: opts[i], + }); + } + } + + const columns = res.map((it) => { + return { + asc: !it.options.descending, + isExpression: it.type === 'expression', + value: it.type === 'expression' ? it.value : it.value.name, // column name + } satisfies Index['columns'][number]; + }); + + const getUsing = (def: string, accessMethod: string): Index['method'] => { + const regex = /USING\s+(HASH|CSPANN)/gi; + + let match: RegExpExecArray | null; + while ((match = regex.exec(def)) !== null) { + const beforeMatch = def.slice(0, match.index); + + // count how many double quotes before this match + const quoteCount = (beforeMatch.match(/"/g) || []).length; + + // if even number of quotes - outside quotes + if (quoteCount % 2 === 0) { + return match[1].toLowerCase(); + } + } + + if (accessMethod === 'inverted') return 'gin'; + + return 'btree'; + }; + + const indexAccessMethod = getUsing(metadata.index_def, accessMethod); + + indexes.push({ + entityType: 'indexes', + schema: schema.name, + table: table.name, + name: idx.name, + nameExplicit: true, + method: indexAccessMethod, + isUnique: metadata.isUnique, + where: idx.metadata.where, + columns: columns, + forPK, + }); + } + + progressCallback('columns', columnsCount, 'fetching'); + progressCallback('checks', checksCount, 'fetching'); + progressCallback('indexes', indexesCount, 'fetching'); + progressCallback('tables', tableCount, 'fetching'); + progressCallback('views', viewsCount, 'fetching'); + + for (const it of columnsList.filter((x) => (x.kind === 'm' || x.kind === 'v') && !x.isHidden)) { + const view = viewsList.find((x) => x.oid === it.tableId)!; + + const enumType = it.typeId in groupedEnums + ? groupedEnums[it.typeId] + : it.typeId in groupedArrEnums + ? groupedArrEnums[it.typeId] + : null; + + let columnTypeMapped = enumType ? enumType.name : it.type.replace('[]', ''); + columnTypeMapped = trimChar(columnTypeMapped, '"'); + for (let i = 0; i < Number(it.dimensions); i++) { + columnTypeMapped += '[]'; + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char') + .replace('integer', 'int4') + .replace('bigint', 'int8') + .replace('smallint', 'int2'); + + viewColumns.push({ + schema: view.schema, + view: view.name, + name: it.name, + type: columnTypeMapped, + notNull: it.notNull, + dimensions: Number(it.dimensions), + typeSchema: enumType ? enumType.schema : null, + }); + } + + for (const view of viewsList) { + const definition = parseViewDefinition(view.definition); + + views.push({ + entityType: 'views', + schema: view.schema, + name: view.name, + definition, + materialized: view.kind === 'm', + withNoData: null, + }); + } + + progressCallback('tables', tableCount, 'done'); + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + const resultSchemas = schemas.filter((x) => filter({ type: 'schema', name: x.name })); + const resultTables = tables.filter((x) => filter({ type: 'table', schema: x.schema, name: x.name })); + const resultEnums = enums.filter((x) => resultSchemas.some((s) => s.name === x.schema)); + const resultColumns = columns.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultIndexes = indexes.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultPKs = pks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultFKs = fks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultChecks = checks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultSequences = sequences.filter((x) => resultSchemas.some((t) => t.name === x.schema)); + // TODO: drizzle link + const resultRoles = roles.filter((x) => filter({ type: 'role', name: x.name })); + const resultViews = views.filter((x) => filter({ type: 'table', schema: x.schema, name: x.name })); + const resultViewColumns = viewColumns.filter((x) => + resultViews.some((v) => v.schema === x.schema && v.name === x.view) + ); + + return { + schemas: resultSchemas, + tables: resultTables, + enums: resultEnums, + columns: resultColumns, + indexes: resultIndexes, + pks: resultPKs, + fks: resultFKs, + checks: resultChecks, + sequences: resultSequences, + roles: resultRoles, + policies, + views: resultViews, + viewColumns: resultViewColumns, + } satisfies InterimSchema; +}; + +export const fromDatabaseForDrizzle = async ( + db: DB, + filter: EntityFilter, + progressCallback: (stage: IntrospectStage, count: number, status: IntrospectStatus) => void = () => {}, +) => { + const res = await fromDatabase(db, filter, progressCallback); + + res.schemas = res.schemas.filter((it) => it.name !== 'public'); + res.indexes = res.indexes.filter((it) => !it.forPK); + + return res; +}; diff --git a/drizzle-kit/src/dialects/cockroach/serializer.ts b/drizzle-kit/src/dialects/cockroach/serializer.ts new file mode 100644 index 0000000000..b4465e8c07 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/serializer.ts @@ -0,0 +1,78 @@ +import type { CasingType } from '../../cli/validations/common'; +import { postgresSchemaError, postgresSchemaWarning } from '../../cli/views'; +import { prepareFilenames } from '../../utils/utils-node'; +import type { CockroachDDL } from './ddl'; +import { createDDL, interimToDDL } from './ddl'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; +import type { CockroachSnapshot } from './snapshot'; +import { drySnapshot, snapshotValidator } from './snapshot'; + +export const prepareSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: CockroachDDL; + ddlCur: CockroachDDL; + snapshot: CockroachSnapshot; + snapshotPrev: CockroachSnapshot; + custom: CockroachSnapshot; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + + const res = await prepareFromSchemaFiles(filenames); + + // TODO: do we wan't to export everything or ignore .existing and respect entity filters in config + const { schema, errors, warnings } = fromDrizzleSchema(res, casing, () => true); + + if (warnings.length > 0) { + console.log(warnings.map((it) => postgresSchemaWarning(it)).join('\n\n')); + } + + if (errors.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const { ddl: ddlCur, errors: errors2 } = interimToDDL(schema); + + if (errors2.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '1', + dialect: 'cockroach', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies CockroachSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: CockroachSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom }; +}; diff --git a/drizzle-kit/src/dialects/cockroach/snapshot.ts b/drizzle-kit/src/dialects/cockroach/snapshot.ts new file mode 100644 index 0000000000..2565e5b8f8 --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/snapshot.ts @@ -0,0 +1,243 @@ +import { randomUUID } from 'crypto'; +import type { TypeOf } from 'zod'; +import { any, boolean, enum as enumType, literal, number, object, record, string } from 'zod'; +import { originUUID } from '../../utils'; +import { array, validator } from '../simpleValidator'; +import type { CockroachDDL, CockroachEntity } from './ddl'; +import { createDDL } from './ddl'; +import { defaults } from './grammar'; + +const enumSchema = object({ + name: string(), + schema: string(), + values: string().array(), +}).strict(); + +const indexColumn = object({ + expression: string(), + isExpression: boolean(), + asc: boolean(), + nulls: string().optional(), + opclass: string().optional(), +}); + +export type IndexColumnType = TypeOf; + +const index = object({ + name: string(), + columns: indexColumn.array(), + isUnique: boolean(), + with: record(string(), any()).optional(), + method: string().default(defaults.index.method), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + schemaTo: string().optional(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +export const sequenceSchema = object({ + name: string(), + increment: string().optional(), + minValue: string().optional(), + maxValue: string().optional(), + startWith: string().optional(), + cache: string().optional(), + cycle: boolean().optional(), + schema: string(), +}).strict(); + +export const identitySchema = sequenceSchema.omit({ schema: true }).merge( + object({ type: enumType(['always', 'byDefault']) }), +); + +export const roleSchema = object({ + name: string(), + createDb: boolean().optional(), + createRole: boolean().optional(), + inherit: boolean().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: identitySchema.optional(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), + nullsNotDistinct: boolean(), +}).strict(); + +export const policy = object({ + name: string(), + as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), + for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), + to: string().array().optional(), + using: string().optional(), + withCheck: string().optional(), + on: string().optional(), + schema: string().optional(), +}).strict(); + +const viewWithOption = object({ + checkOption: enumType(['local', 'cascaded']).optional(), + securityBarrier: boolean().optional(), + securityInvoker: boolean().optional(), +}).strict(); + +const matViewWithOption = object({ + fillfactor: number().optional(), + toastTupleTarget: number().optional(), + parallelWorkers: number().optional(), + autovacuumEnabled: boolean().optional(), + vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), + vacuumTruncate: boolean().optional(), + autovacuumVacuumThreshold: number().optional(), + autovacuumVacuumScaleFactor: number().optional(), + autovacuumVacuumCostDelay: number().optional(), + autovacuumVacuumCostLimit: number().optional(), + autovacuumFreezeMinAge: number().optional(), + autovacuumFreezeMaxAge: number().optional(), + autovacuumFreezeTableAge: number().optional(), + autovacuumMultixactFreezeMinAge: number().optional(), + autovacuumMultixactFreezeMaxAge: number().optional(), + autovacuumMultixactFreezeTableAge: number().optional(), + logAutovacuumMinDuration: number().optional(), + userCatalogTable: boolean().optional(), +}).strict(); + +export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); + +export const view = object({ + name: string(), + schema: string(), + columns: record(string(), column), + definition: string().optional(), + materialized: boolean(), + with: mergedViewWithOption.optional(), + isExisting: boolean(), + withNoData: boolean().optional(), + using: string().optional(), + tablespace: string().optional(), +}).strict(); + +const table = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + policies: record(string(), policy).default({}), + checkConstraints: record(string(), checkConstraint).default({}), + isRLSEnabled: boolean().default(false).optional(), +}).strict(); + +const schemaHash = object({ + id: string(), + prevId: string(), +}); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ + isArray: boolean().optional(), + dimensions: number().optional(), + rawType: string().optional(), + isDefaultAnExpression: boolean().optional(), + }).optional(), + ), + }).optional(), + ), +}).optional(); + +export const cockroachSchemaInternal = object({ + version: literal('1'), + dialect: literal('cockroach'), + tables: record(string(), table), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + views: record(string(), view).default({}), + sequences: record(string(), sequenceSchema).default({}), + roles: record(string(), roleSchema).default({}), + policies: record(string(), policy).default({}), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const cockroachSchema = cockroachSchemaInternal.merge(schemaHash); + +export type CockroachSchema = TypeOf; + +export type Index = TypeOf; +export type Column = TypeOf; + +export const toJsonSnapshot = (ddl: CockroachDDL, prevIds: string[], renames: string[]): CockroachSnapshot => { + return { dialect: 'cockroach', id: randomUUID(), prevIds, version: '1', ddl: ddl.entities.list(), renames }; +}; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['1'], + dialect: ['cockroach'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => { + const res = ddl.entities.validate(it); + if (!res) { + console.log(it); + } + return res; + }), + renames: array((_) => true), +}); + +export type CockroachSnapshot = typeof snapshotValidator.shape; + +export const drySnapshot = snapshotValidator.strict( + { + version: '1', + dialect: 'cockroach', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], + } satisfies CockroachSnapshot, +); diff --git a/drizzle-kit/src/dialects/cockroach/statements.ts b/drizzle-kit/src/dialects/cockroach/statements.ts new file mode 100644 index 0000000000..4a760060ab --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/statements.ts @@ -0,0 +1,445 @@ +import type { Simplify } from '../../utils'; +import type { + CheckConstraint, + Column, + DiffEntities, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Role, + Schema, + Sequence, + Table, + View, +} from './ddl'; + +export interface JsonCreateTable { + type: 'create_table'; + table: Table; +} + +export interface JsonDropTable { + type: 'drop_table'; + table: Table; + key: string; +} + +export interface JsonRenameTable { + type: 'rename_table'; + schema: string; + from: string; + to: string; +} + +export interface JsonCreateEnum { + type: 'create_enum'; + enum: Enum; +} + +export interface JsonDropEnum { + type: 'drop_enum'; + enum: Enum; +} + +export interface JsonMoveEnum { + type: 'move_enum'; + from: { name: string; schema: string | null }; + to: { name: string; schema: string | null }; +} + +export interface JsonRenameEnum { + type: 'rename_enum'; + schema: string; + from: string; + to: string; +} + +export interface JsonRecreateEnum { + type: 'recreate_enum'; + to: Enum; + columns: Column[]; + from: Enum; +} + +export interface JsonAlterEnum { + type: 'alter_enum'; + to: Enum; + from: Enum; + diff: { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; + }[]; +} + +export interface JsonAlterColumnAddNotNull { + type: 'alter_add_column_not_null'; + table: string; + schema: string; + column: string; +} + +export interface JsonAlterColumnDropNotNull { + type: 'alter_drop_column_not_null'; + table: string; + schema: string; + column: string; +} + +export interface JsonCreateRole { + type: 'create_role'; + role: Role; +} + +export interface JsonDropRole { + type: 'drop_role'; + role: Role; +} +export interface JsonRenameRole { + type: 'rename_role'; + from: Role; + to: Role; +} + +export interface JsonAlterRole { + type: 'alter_role'; + diff: DiffEntities['roles']; + role: Role; +} + +export interface JsonDropValueFromEnum { + type: 'alter_type_drop_value'; + deletedValues: string[]; + enum: Enum; + columns: Column[]; +} + +export interface JsonCreateSequence { + type: 'create_sequence'; + sequence: Sequence; +} + +export interface JsonDropSequence { + type: 'drop_sequence'; + sequence: Sequence; +} + +export interface JsonMoveSequence { + type: 'move_sequence'; + from: { name: string; schema: string | null }; + to: { name: string; schema: string | null }; +} + +export interface JsonRenameSequence { + type: 'rename_sequence'; + from: Sequence; + to: Sequence; +} + +export interface JsonAlterSequence { + type: 'alter_sequence'; + diff: DiffEntities['sequences']; + sequence: Sequence; +} + +export interface JsonDropColumn { + type: 'drop_column'; + column: Column; +} + +export interface JsonAddColumn { + type: 'add_column'; + column: Column; +} + +export interface JsonCreatePolicy { + type: 'create_policy'; + policy: Policy; +} + +export interface JsonDropPolicy { + type: 'drop_policy'; + policy: Policy; +} + +export interface JsonRenamePolicy { + type: 'rename_policy'; + from: Policy; + to: Policy; +} + +export interface JsonAlterRLS { + type: 'alter_rls'; + schema: string; + name: string; + isRlsEnabled: boolean; +} + +export interface JsonAlterPolicy { + type: 'alter_policy'; + diff: DiffEntities['policies']; + policy: Policy; +} +export interface JsonRecreatePolicy { + type: 'recreate_policy'; + diff: DiffEntities['policies']; + policy: Policy; +} + +export interface JsonCreateIndex { + type: 'create_index'; + index: Index; + newTable: boolean; +} + +export interface JsonRecreateIndex { + type: 'recreate_index'; + diff: DiffEntities['indexes']; +} + +export interface JsonCreateFK { + type: 'create_fk'; + fk: ForeignKey; +} + +export interface JsonDropFK { + type: 'drop_fk'; + fk: ForeignKey; +} + +export interface JsonRecreateFK { + type: 'recreate_fk'; + fk: ForeignKey; + diff: DiffEntities['fks']; +} + +export interface JsonAddCheck { + type: 'add_check'; + check: CheckConstraint; +} + +export interface JsonDropCheck { + type: 'drop_check'; + check: CheckConstraint; +} + +export interface JsonAlterCheck { + type: 'alter_check'; + check: CheckConstraint; + diff: DiffEntities['checks']; +} + +export interface JsonAddPrimaryKey { + type: 'add_pk'; + pk: PrimaryKey; +} + +export interface JsonDropPrimaryKey { + type: 'drop_pk'; + pk: PrimaryKey; +} + +export interface JsonRenameConstraint { + type: 'rename_constraint'; + schema: string; + table: string; + from: string; + to: string; +} + +export interface JsonAlterPrimaryKey { + type: 'alter_pk'; + pk: PrimaryKey; + diff: DiffEntities['pks']; +} + +export interface JsonRecreatePrimaryKey { + type: 'recreate_pk'; + right: PrimaryKey; + left: PrimaryKey; +} + +export interface JsonMoveTable { + type: 'move_table'; + name: string; + from: string; + to: string; +} + +export interface JsonAlterTableRemoveFromSchema { + type: 'remove_from_schema'; + table: string; + schema: string; +} + +export interface JsonAlterTableSetNewSchema { + type: 'set_new_schema'; + table: string; + from: string; + to: string; +} + +export interface JsonDropIndex { + type: 'drop_index'; + index: Index; +} + +export interface JsonRenameIndex { + type: 'rename_index'; + schema: string; + from: string; + to: string; +} + +export interface JsonRenameColumn { + type: 'rename_column'; + from: Column; + to: Column; +} + +export interface JsonAlterColumn { + type: 'alter_column'; + to: Column; + wasEnum: boolean; + isEnum: boolean; + diff: DiffEntities['columns']; +} + +export interface JsonRecreateColumn { + type: 'recreate_column'; + diff: DiffEntities['columns']; +} + +export interface JsonAlterColumnSetPrimaryKey { + type: 'alter_column_set_pk'; + table: string; + schema: string; + column: string; +} + +export interface JsonCreateSchema { + type: 'create_schema'; + name: string; +} + +export interface JsonDropSchema { + type: 'drop_schema'; + name: string; +} + +export interface JsonRenameSchema { + type: 'rename_schema'; + from: Schema; + to: Schema; +} + +export interface JsonCreateView { + type: 'create_view'; + view: View; +} + +export interface JsonDropView { + type: 'drop_view'; + view: View; +} + +export interface JsonRenameView { + type: 'rename_view'; + from: View; + to: View; +} + +export interface JsonMoveView { + type: 'move_view'; + fromSchema: string; + toSchema: string; + view: View; +} + +export interface JsonAlterView { + type: 'alter_view'; + diff: DiffEntities['views']; + view: View; +} + +export interface JsonRecreateView { + type: 'recreate_view'; + from: View; + to: View; +} + +export type JsonStatement = + | JsonCreateTable + | JsonDropTable + | JsonRenameTable + | JsonRenameColumn + | JsonAlterColumn + | JsonRecreateColumn + | JsonMoveView + | JsonAlterView + | JsonRecreateView + | JsonCreateEnum + | JsonDropEnum + | JsonMoveEnum + | JsonRenameEnum + | JsonRecreateEnum + | JsonAlterEnum + | JsonDropColumn + | JsonAddColumn + | JsonCreateIndex + | JsonDropIndex + | JsonRecreateIndex + | JsonRenameIndex + | JsonAddPrimaryKey + | JsonDropPrimaryKey + | JsonRenameConstraint + | JsonAlterPrimaryKey + | JsonCreateFK + | JsonDropFK + | JsonRecreateFK + | JsonDropCheck + | JsonAddCheck + | JsonCreateSchema + | JsonDropSchema + | JsonRenameSchema + | JsonMoveTable + | JsonAlterTableRemoveFromSchema + | JsonAlterTableSetNewSchema + | JsonAlterSequence + | JsonDropSequence + | JsonCreateSequence + | JsonMoveSequence + | JsonRenameSequence + | JsonDropPolicy + | JsonCreatePolicy + | JsonAlterPolicy + | JsonRecreatePolicy + | JsonRenamePolicy + | JsonAlterRLS + | JsonRenameRole + | JsonCreateRole + | JsonDropRole + | JsonAlterRole + | JsonCreateView + | JsonDropView + | JsonRenameView + | JsonAlterCheck + | JsonDropValueFromEnum + | JsonAlterColumnAddNotNull + | JsonAlterColumnDropNotNull + | JsonRecreatePrimaryKey; + +export const prepareStatement = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + args: Omit, +): Simplify => { + return { + type, + ...args, + } as TStatement; +}; diff --git a/drizzle-kit/src/dialects/cockroach/typescript.ts b/drizzle-kit/src/dialects/cockroach/typescript.ts new file mode 100644 index 0000000000..b9f6a1c5df --- /dev/null +++ b/drizzle-kit/src/dialects/cockroach/typescript.ts @@ -0,0 +1,706 @@ +import '../../@types/utils'; +import { toCamelCase } from 'drizzle-orm/casing'; +import type { Casing } from '../../cli/validations/common'; +import { assertUnreachable, trimChar } from '../../utils'; +import { inspect } from '../utils'; +import type { CheckConstraint, CockroachDDL, Column, ForeignKey, Index, Policy, PrimaryKey, ViewColumn } from './ddl'; +import { tableFromDDL } from './ddl'; +import { defaults, typeFor } from './grammar'; + +// TODO: omit defaults opclass... +const imports = [ + 'cockroachEnum', + 'int2', + 'int4', + 'int8', + 'bool', + 'varchar', + 'char', + 'decimal', + 'real', + 'json', + 'jsonb', + 'time', + 'timestamp', + 'date', + 'interval', + 'inet', + 'uuid', + 'vector', + 'bit', + 'geometry', + 'float', + 'string', + 'text', + 'varbit', + 'customType', +] as const; +export type Import = (typeof imports)[number]; + +const cockroachImportsList = new Set(['cockroachTable', ...imports]); + +const objToStatement2 = (json: { [s: string]: unknown }) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +// const intervalStrToObj = (str: string) => { +// if (str.startsWith('interval(')) { +// return { +// precision: Number(str.substring('interval('.length, str.length - 1)), +// }; +// } +// const splitted = str.split(' '); +// if (splitted.length === 1) { +// return {}; +// } +// const rest = splitted.slice(1, splitted.length).join(' '); +// if (possibleIntervals.includes(rest)) { +// return { fields: `"${rest}"` }; +// } + +// for (const s of possibleIntervals) { +// if (rest.startsWith(`${s}(`)) { +// return { +// fields: `"${s}"`, +// precision: Number(rest.substring(s.length + 1, rest.length - 1)), +// }; +// } +// } +// return {}; +// }; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(toCamelCase(value)); + } + + assertUnreachable(casing); +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +// export const relationsToTypeScriptForStudio = ( +// schema: Record>>, +// relations: Record>>>>, +// ) => { +// const relationalSchema: Record = { +// ...Object.fromEntries( +// Object.entries(schema) +// .map(([key, val]) => { +// // have unique keys across schemas +// const mappedTableEntries = Object.entries(val).map((tableEntry) => { +// return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; +// }); + +// return mappedTableEntries; +// }) +// .flat(), +// ), +// ...relations, +// }; + +// const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); + +// let result = ''; + +// function findColumnKey(table: AnyCockroachTable, columnName: string) { +// for (const tableEntry of Object.entries(table)) { +// const key = tableEntry[0]; +// const value = tableEntry[1]; + +// if (value.name === columnName) { +// return key; +// } +// } +// } + +// Object.values(relationsConfig.tables).forEach((table) => { +// const tableName = table.tsName.split('.')[1]; +// const relations = table.relations; +// let hasRelations = false; +// let relationsObjAsStr = ''; +// let hasOne = false; +// let hasMany = false; + +// Object.values(relations).forEach((relation) => { +// hasRelations = true; + +// if (is(relation, Many)) { +// hasMany = true; +// relationsObjAsStr += `\t\t${relation.fieldName}: many(${ +// relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] +// }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; +// } + +// if (is(relation, One)) { +// hasOne = true; +// relationsObjAsStr += `\t\t${relation.fieldName}: one(${ +// relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] +// }, { fields: [${ +// relation.config?.fields.map( +// (c) => +// `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ +// findColumnKey(relation.sourceTable, c.name) +// }`, +// ) +// }], references: [${ +// relation.config?.references.map( +// (c) => +// `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ +// findColumnKey(relation.referencedTable, c.name) +// }`, +// ) +// }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; +// } +// }); + +// if (hasRelations) { +// result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ +// hasOne && hasMany ? ', ' : '' +// }${hasMany ? 'many' : ''}}) => ({ +// ${relationsObjAsStr} +// }));\n`; +// } +// }); + +// return result; +// }; + +function generateIdentityParams(column: Column) { + if (column.identity === null) return ''; + const identity = column.identity; + + const tuples = []; + + if (identity.startWith && defaults.identity.startWith !== identity.startWith) { + tuples.push(['startWith', identity.startWith]); + } + if (identity.increment && defaults.identity.increment !== identity.increment) { + tuples.push(['increment', identity.increment]); + } + if (identity.minValue && defaults.identity.min !== identity.minValue) tuples.push(['minValue', identity.minValue]); + if (identity.maxValue && defaults.identity.maxFor(column.type) !== identity.maxValue) { + tuples.push(['maxValue', identity.maxValue]); + } + if (identity.cache && defaults.identity.cache !== identity.cache) tuples.push(['cache', identity.cache]); + + const params = tuples.length > 0 ? `{ ${tuples.map((x) => `${x[0]}: ${x[1]}`).join(' ,')} }` : ''; + + if (identity?.type === 'always') { + return `.generatedAlwaysAsIdentity(${params})`; + } + return `.generatedByDefaultAsIdentity(${params})`; +} + +export const paramNameFor = (name: string, schema: string | null) => { + const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; + return `${name}${schemaSuffix}`; +}; + +export const ddlToTypeScript = (ddl: CockroachDDL, columnsForViews: ViewColumn[], casing: Casing) => { + const tableFn = `cockroachTable`; + for (const fk of ddl.fks.list()) { + relations.add(`${fk.table}-${fk.tableTo}`); + } + + const schemas = Object.fromEntries( + ddl.schemas + .list() + .filter((it) => it.name !== 'public') + .map((it) => { + return [it.name, withCasing(it.name, casing)]; + }), + ); + + const enumTypes = new Set(ddl.enums.list().map((x) => `${x.schema}.${x.name}`)); + + const imports = new Set(); + const vcs = columnsForViews.map((it) => ({ entityType: 'viewColumns' as const, ...it })); + const entities = [...ddl.entities.list(), ...vcs]; + for (const x of entities) { + if (x.entityType === 'schemas' && x.name !== 'public') imports.add('cockroachSchema'); + if (x.entityType === 'enums' && x.schema === 'public') imports.add('cockroachEnum'); + if (x.entityType === 'tables') imports.add(tableFn); + + if (x.entityType === 'indexes') { + if (x.isUnique) imports.add('uniqueIndex'); + else imports.add('index'); + } + + if (x.entityType === 'fks') { + imports.add('foreignKey'); + + if (isCyclic(x) && !isSelf(x)) imports.add('type AnyCockroachColumn'); + } + if (x.entityType === 'pks') imports.add('primaryKey'); + if (x.entityType === 'checks') imports.add('check'); + if (x.entityType === 'views' && x.schema === 'public') { + if (x.materialized) imports.add('cockroachMaterializedView'); + else imports.add('cockroachView'); + } + + if (x.entityType === 'columns' || x.entityType === 'viewColumns') { + let patched = x.type.replace('[]', ''); + const isEnum = Boolean(x.typeSchema); + const grammarType = typeFor(x.type, isEnum); + if (grammarType) imports.add(grammarType.drizzleImport()); + if (cockroachImportsList.has(patched)) imports.add(patched); + } + + if (x.entityType === 'sequences' && x.schema === 'public') imports.add('cockroachSequence'); + if (x.entityType === 'enums' && x.schema === 'public') imports.add('cockroachEnum'); + if (x.entityType === 'policies') imports.add('cockroachPolicy'); + if (x.entityType === 'roles') imports.add('cockroachRole'); + } + + const enumStatements = ddl.enums + .list() + .map((it) => { + const enumSchema = schemas[it.schema]; + // const func = schema || schema === "public" ? "cockroachTable" : schema; + const paramName = paramNameFor(it.name, enumSchema); + + const func = enumSchema ? `${enumSchema}.enum` : 'cockroachEnum'; + + const values = Object.values(it.values) + .map((it) => { + return `\`${it.replaceAll('\\', '\\\\').replace('`', '\\`')}\``; + }) + .join(', '); + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; + }) + .join('') + .concat('\n'); + + const sequencesStatements = ddl.sequences + .list() + .map((it) => { + const seqSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, seqSchema); + + const func = seqSchema ? `${seqSchema}.sequence` : 'cockroachSequence'; + + let params = ''; + if (it.startWith) params += `, startWith: "${it.startWith}"`; + if (it.incrementBy) params += `, increment: "${it.incrementBy}"`; + if (it.minValue) params += `, minValue: "${it.minValue}"`; + if (it.maxValue) params += `, maxValue: "${it.maxValue}"`; + if (it.cacheSize) params += `, cache: "${it.cacheSize}"`; + else params += `, cycle: false`; + + params = params ? `, { ${trimChar(params, ',')} }` : ''; + + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${params})\n`; + }) + .join('') + .concat(''); + + const schemaStatements = Object.entries(schemas) + .map((it) => { + return `export const ${it[1]} = cockroachSchema("${it[0]}");\n`; + }) + .join(''); + + const rolesNameToTsKey: Record = {}; + const rolesStatements = ddl.roles.list().map((it) => { + const identifier = withCasing(it.name, casing); + rolesNameToTsKey[it.name] = identifier; + const params = { + ...(it.createDb ? { createDb: true } : {}), + ...(it.createRole ? { createRole: true } : {}), + }; + const paramsString = inspect(params); + const comma = paramsString ? ', ' : ''; + + return `export const ${identifier} = cockroachRole("${it.name}"${comma}${paramsString});\n`; + }) + .join(''); + + const tableStatements = ddl.tables.list().map((it) => { + const tableSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, tableSchema); + const table = tableFromDDL(it, ddl); + const columns = ddl.columns.list({ schema: table.schema, table: table.name }); + const fks = ddl.fks.list({ schema: table.schema, table: table.name }); + + let func = tableSchema ? `${tableSchema}.table` : tableFn; + func += table.isRlsEnabled ? '.withRLS' : ''; + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; + statement += createTableColumns(columns, table.pk, fks, enumTypes, schemas, casing); + statement += '}'; + + // copied from pg + const filteredFKs = table.fks.filter((it) => { + return it.columns.length > 1 || isSelf(it); + }); + + const hasCallback = table.indexes.length > 0 || filteredFKs.length > 0 || table.policies.length > 0 + || (table.pk && table.pk.columns.length > 1) || table.checks.length > 0; + + if (hasCallback) { + statement += ', '; + statement += '(table) => [\n'; + // TODO: or pk has non-default name + statement += table.pk && table.pk.columns.length > 1 ? createTablePK(table.pk, casing) : ''; + statement += createTableFKs(filteredFKs, schemas, casing); + statement += createTableIndexes(table.name, table.indexes, casing); + statement += createTablePolicies(table.policies, casing, rolesNameToTsKey); + statement += createTableChecks(table.checks, casing); + statement += ']'; + } + statement += ');'; + return statement; + }); + + const viewsStatements = Object.values(ddl.views.list()) + .map((it) => { + const viewSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, viewSchema); + + // TODO: casing? + const func = it.schema !== 'public' + ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) + : it.materialized + ? 'cockroachMaterializedView' + : 'cockroachView'; + + const as = `sql\`${it.definition}\``; + + const viewColumns = columnsForViews.filter((x) => x.schema === it.schema && x.view === it.name); + + const columns = createViewColumns(viewColumns, enumTypes, casing); + + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; + statement += `.as(${as});`; + + return statement; + }) + .join('\n\n'); + + const uniqueCockroachImports = [...imports]; + + const importsTs = `import { ${uniqueCockroachImports.join(', ')} } from "drizzle-orm/cockroach-core" +import { sql } from "drizzle-orm"\n\n`; + + let decalrations = schemaStatements; + decalrations += rolesStatements; + decalrations += enumStatements; + decalrations += sequencesStatements; + decalrations += '\n'; + decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements; + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(ddl.tables) + .map((it) => withCasing(it.name, casing)) + .join(',\n') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.table}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.table}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.table === fk.tableTo; +}; + +const column = ( + type: string, + dimensions: number, + name: string, + typeSchema: string | null, + casing: Casing, + def: Column['default'], +) => { + const isEnum = Boolean(typeSchema); + const grammarType = typeFor(type, isEnum); + + const { options, default: defaultValue, customType } = dimensions > 0 + ? grammarType.toArrayTs(type, def ?? null) + : grammarType.toTs(type, def ?? null); + + const dbName = dbColumnName({ name, casing }); + const opts = inspect(options); + const comma = (dbName && opts) ? ', ' : ''; + + let columnStatement = `${withCasing(name, casing)}: ${ + isEnum ? withCasing(paramNameFor(type, typeSchema), casing) : grammarType.drizzleImport() + }${customType ? `({ dataType: () => '${customType}' })` : ''}(${dbName}${comma}${opts})`; + columnStatement += '.array()'.repeat(dimensions); + + if (defaultValue) columnStatement += `.default(${defaultValue})`; + return columnStatement; +}; + +const createViewColumns = (columns: ViewColumn[], enumTypes: Set, casing: Casing) => { + let statement = ''; + + columns.forEach((it) => { + const columnStatement = column(it.type, it.dimensions, it.name, it.typeSchema, casing, null); + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + statement += '.array()'.repeat(it.dimensions); + statement += it.notNull ? '.notNull()' : ''; + statement += ',\n'; + }); + return statement; +}; + +const createTableColumns = ( + columns: Column[], + primaryKey: PrimaryKey | null, + fks: ForeignKey[], + enumTypes: Set, + schemas: Record, + casing: Casing, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columns.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce( + (res, it) => { + const arr = res[it.columns[0]] || []; + arr.push(it); + res[it.columns[0]] = arr; + return res; + }, + {} as Record, + ); + + for (const it of columns) { + const { name, type, dimensions, default: def, identity, generated, typeSchema } = it; + const stripped = type.replaceAll('[]', ''); + const isEnum = Boolean(typeSchema); + const grammarType = typeFor(stripped, isEnum); + + const { options, default: defaultValue, customType } = dimensions > 0 + ? grammarType.toArrayTs(type, def ?? null) + : grammarType.toTs(type, def ?? null); + + const dbName = dbColumnName({ name, casing }); + const opts = inspect(options); + const comma = (dbName && opts) ? ', ' : ''; + + const pk = primaryKey && primaryKey.columns.length === 1 && primaryKey.columns[0] === it.name + ? primaryKey + : null; + + let columnStatement = `${withCasing(name, casing)}: ${ + isEnum ? withCasing(paramNameFor(type, typeSchema), casing) : grammarType.drizzleImport() + }${customType ? `({ dataType: () => '${customType}' })` : ''}(${dbName}${comma}${opts})`; + columnStatement += '.array()'.repeat(dimensions); + if (defaultValue) columnStatement += defaultValue.startsWith('.') ? defaultValue : `.default(${defaultValue})`; + if (pk) columnStatement += '.primaryKey()'; + if (it.notNull && !it.identity && !pk) columnStatement += '.notNull()'; + if (identity) columnStatement += generateIdentityParams(it); + if (generated) columnStatement += `.generatedAlwaysAs(sql\`${generated.as}\`)`; + + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + + const fks = fkByColumnName[it.name]; + // Andrii: I switched it off until we will get a custom naem setting in references + if (fks) { + const fksStatement = fks + .map((it) => { + const onDelete = it.onDelete && it.onDelete !== 'NO ACTION' ? it.onDelete : null; + const onUpdate = it.onUpdate && it.onUpdate !== 'NO ACTION' ? it.onUpdate : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(it) ? ': AnyCockroachColumn' : ''; + + const paramsStr = objToStatement2(params); + const tableSchema = schemas[it.schemaTo || '']; + const paramName = paramNameFor(it.tableTo, tableSchema); + if (paramsStr) { + return `.references(()${typeSuffix} => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; + } + return `.references(()${typeSuffix} => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)})`; + }) + .join(''); + statement += fksStatement; + } + + statement += ',\n'; + } + + return statement; +}; + +const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { + let statement = ''; + + idxs.forEach((it) => { + // TODO: cc: @AndriiSherman we have issue when index is called as table called + // let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; + // idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; + // idxKey = withCasing(idxKey, casing); + // const indexGeneratedName = indexName( + // tableName, + // it.columns.map((it) => it.value), + // ); + + const name = it.nameExplicit ? it.name : ''; + // const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += it.isUnique ? '\tuniqueIndex(' : '\tindex('; + statement += name ? `"${name}")` : ')'; + + statement += `.using("${it.method}", ${ + it.columns + .map((it) => { + if (it.isExpression) { + return `sql\`${it.isExpression}\``; + } else { + return `table.${withCasing(it.value, casing)}${it.asc ? '.asc()' : '.desc()'}`; + } + }) + .join(', ') + })`; + statement += it.where ? `.where(sql\`${it.where}\`)` : ''; + + statement += `,\n`; + }); + + return statement; +}; + +const createTablePK = (it: PrimaryKey, casing: Casing): string => { + let statement = '\tprimaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${withCasing(c, casing)}`; + }) + .join(', ') + }`; + statement += `]${it.nameExplicit ? `, name: "${it.name}"` : ''}}),\n`; + return statement; +}; + +// get a map of db role name to ts key +// if to by key is in this map - no quotes, otherwise - quotes + +const createTablePolicies = ( + policies: Policy[], + casing: Casing, + rolesNameToTsKey: Record = {}, +): string => { + let statement = ''; + + policies.forEach((it) => { + const mappedItTo = it.roles.map((v) => { + return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; + }); + + const tuples = []; + if (it.as === 'RESTRICTIVE') tuples.push(['as', `"${it.as.toLowerCase}"`]); + if (it.for !== 'ALL') tuples.push(['for', `"${it.for.toLowerCase()}"`]); + if (!(mappedItTo.length === 1 && mappedItTo[0] === '"public"')) { + tuples.push(['to', `[${mappedItTo.map((x) => `${x}`).join(', ')}]`]); + } + if (it.using !== null) tuples.push(['using', `sql\`${it.using}\``]); + if (it.withCheck !== null) tuples.push(['withCheck', `sql\`${it.withCheck}\``]); + const opts = tuples.length > 0 ? `, { ${tuples.map((x) => `${x[0]}: ${x[1]}`).join(', ')} }` : ''; + statement += `\tcockroachPolicy("${it.name}"${opts}),\n`; + }); + + return statement; +}; + +const createTableChecks = (checkConstraints: CheckConstraint[], _casing: Casing) => { + let statement = ''; + + checkConstraints.forEach((it) => { + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,`; + }); + + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { + let statement = ''; + + fks.forEach((it) => { + const tableSchema = it.schemaTo === 'public' ? '' : schemas[it.schemaTo]; + const paramName = paramNameFor(it.tableTo, tableSchema); + + const isSelf = it.tableTo === it.table; + const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; + statement += `\tforeignKey({\n`; + statement += `\t\tcolumns: [${it.columns.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; + statement += `\t\tforeignColumns: [${ + it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') + }],\n`; + statement += it.nameExplicit ? `\t\tname: "${it.name}"\n` : ''; + statement += `\t})`; + + statement += it.onUpdate && it.onUpdate !== 'NO ACTION' ? `.onUpdate("${it.onUpdate}")` : ''; + statement += it.onDelete && it.onDelete !== 'NO ACTION' ? `.onDelete("${it.onDelete}")` : ''; + statement += `,\n`; + }); + return statement; +}; diff --git a/drizzle-kit/src/dialects/common.ts b/drizzle-kit/src/dialects/common.ts new file mode 100644 index 0000000000..c6a882cc7d --- /dev/null +++ b/drizzle-kit/src/dialects/common.ts @@ -0,0 +1,30 @@ +export type Resolver = (it: { + created: T[]; + deleted: T[]; +}) => Promise<{ created: T[]; deleted: T[]; renamedOrMoved: { from: T; to: T }[] }>; + +const dictionary = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'.split(''); + +export const hash = (input: string, len: number = 12) => { + const dictLen = BigInt(dictionary.length); + const combinationsCount = BigInt(dictionary.length) ** BigInt(len); + const p = 53n; + let power = 1n; + + let hash = 0n; + for (const ch of input) { + hash = (hash + (BigInt(ch.codePointAt(0) || 0) * power)) % combinationsCount; + power = (power * p) % combinationsCount; + } + + const result = [] as string[]; + + let index = hash; + for (let i = len - 1; i >= 0; i--) { + const element = dictionary[Number(index % dictLen)]!; + result.unshift(element); + index = index / dictLen; + } + + return result.join(''); +}; diff --git a/drizzle-kit/src/dialects/dialect.ts b/drizzle-kit/src/dialects/dialect.ts new file mode 100644 index 0000000000..bb12a491f4 --- /dev/null +++ b/drizzle-kit/src/dialects/dialect.ts @@ -0,0 +1,950 @@ +type DataType = 'string' | 'string[]' | 'number' | 'boolean'; + +type TypeMap = { + string: string; + number: number; + boolean: boolean; + 'string[]': string[]; +}; + +type Simplify = + & { + [K in keyof T]: T[K]; + } + & {}; + +type Assume = T extends U ? T : U; + +type ExtendedType = + | (`${Exclude}?` | DataType) + | 'required' + | [string, ...(string | null)[]] + | { + [K: string]: Exclude; + } + | ([{ + [K: string]: Exclude; + }]); + +type InferField = T extends (string | null)[] ? T[number] + : T extends [Record] ? { + [K in keyof T[0]]: InferField; + }[] + : T extends Record ? + | { + [K in keyof T]: InferField; + } + | null + : T extends `${infer Type extends DataType}?` ? TypeMap[Type] | null + : T extends DataType ? TypeMap[T] + : never; + +type Definition = Record; + +type InferSchema = Simplify< + { + -readonly [K in keyof TSchema]: K extends keyof Common ? Exclude + : InferField>; + } +>; + +type NullAsUndefined> = + & { + [K in keyof TData as null extends TData[K] ? K : never]: TData[K] | undefined; + } + & { + [K in keyof TData as null extends TData[K] ? never : K]: TData[K]; + }; + +type Schema = + & Record + & { + [K in keyof Common as null extends Common[K] ? K : never]?: 'required'; + } + & { + [K in keyof Common as null extends Common[K] ? never : K]?: never; + } + & { + [K in `${keyof Common}?`]?: never; + } + & { + entityType?: never; + CONTAINS?: never; + }; + +type Common = { + schema: string | null; + table: string | null; + name: string; +}; + +const commonConfig: Record = { + schema: 'string?', + table: 'string?', + name: 'string', +}; + +type InferEntities< + TDefinition extends Definition, +> = { + [K in keyof TDefinition]: Simplify< + & InferSchema + & { + [C in keyof Common as C extends keyof TDefinition[K] ? never : null extends Common[C] ? never : C]: Common[C]; + } + & { + entityType: K; + } + >; +}; + +type Filter = Record> = { + [K in keyof TInput]?: + | TInput[K] + | (TInput[K] extends (any[] | null) ? { + CONTAINS: TInput[K][number]; + } + : never); +}; + +type UpdateOperators> = { + [K in keyof TInput]?: + | TInput[K] + | (( + item: TInput[K] extends any[] | null ? Exclude[number] : TInput[K], + ) => TInput[K] extends any[] | null ? Exclude[number] : TInput[K]); +}; + +type CollectionStore = { + collection: Record[]; +}; + +function matchesFilters(item: Record, filter: Filter): boolean { + for (const [k, v] of Object.entries(filter)) { + if (v === undefined) continue; + const target = item[k]; + + if ((v && typeof v === 'object' && v.CONTAINS !== undefined)) { + if (!Array.isArray(target)) return false; + if (!target.find((e) => isEqual(e, v.CONTAINS))) return false; + } else { + if (!isEqual(target, v)) return false; + } + } + + return true; +} + +function filterCollection(collection: Record[], filter: Filter) { + return collection.filter((e) => matchesFilters(e, filter)); +} + +type CommonEntity = Common & { + entityType: string; +}; + +function getCompositeKey( + row: CommonEntity, +): string { + return `${row.schema ?? ''}:${row.table ?? ''}:${row.name}:${row.entityType}`; +} + +function findCompositeKey(dataSource: (CommonEntity)[], target: CommonEntity) { + const targetKey = getCompositeKey(target); + const match = dataSource.find((e) => getCompositeKey(e) === targetKey); + + return match; +} + +function findCompositeKeys(dataSource: (CommonEntity)[], target: CommonEntity) { + const targetKey = getCompositeKey(target); + const match = dataSource.filter((e) => getCompositeKey(e) === targetKey); + + return match; +} + +// function replaceValue(arr: Array, target: any, update: any) { +// for (var i = 0; i < arr.length; i++) { +// if (arr[i] === target) { +// arr[i] = update; +// } +// } +// return arr; +// } + +export type InferInsert, TCommon extends boolean = false> = TShape extends + infer Shape ? Simplify< + TCommon extends true ? NullAsUndefined< + { + [ + K in keyof Shape as K extends keyof Common ? (null extends Common[K] ? null extends Shape[K] ? never + : K + : K) + : K + ]: Shape[K]; + } + > + : Omit< + NullAsUndefined< + { + [ + K in keyof TShape as K extends keyof Common ? (null extends Common[K] ? null extends TShape[K] ? never + : K + : K) + : K + ]: TShape[K]; + } + >, + 'entityType' + > + > + : never; + +type PushFn< + TInput extends Record, + TCommon extends boolean = false, +> = ( + input: InferInsert, + uniques?: TInput extends infer Input ? (Exclude, 'entityType'>)[] : never, +) => { + status: 'OK' | 'CONFLICT'; + data: TInput extends [Record, Record, ...Record[]] ? TInput[] : TInput; +}; +type ListFn> = (where?: Filter) => TInput[]; +type OneFn> = (where?: Filter) => TInput | null; +type UpdateFn> = ( + config: TInput extends infer Input extends Record + ? { set: Simplify>>; where?: Filter } + : never, +) => { + status: 'OK' | 'CONFLICT'; + data: TInput[]; +}; +type DeleteFn> = ( + where?: TInput extends infer Input extends Record ? Filter : never, +) => TInput[]; +type ValidateFn> = (data: unknown) => data is TInput; +type HasDiffFn< + TSchema extends Record, + TType extends string, +> = ( + input: DiffAlter, +) => boolean; + +const generateInsert: (configs: Record, store: CollectionStore, type?: string) => PushFn = ( + configs, + store, + type, +) => { + let nulls = type + ? Object.fromEntries( + Object.keys(configs[type]).filter((e) => !commonConfig[e] || !(commonConfig[e] as string).endsWith('?')).map(( + e, + ) => [e, null]), + ) + : undefined; + + return (input, uniques) => { + const filteredElement = Object.fromEntries(Object.entries(input).filter(([_, value]) => value !== undefined)); + const localType = (type ?? filteredElement.entityType) as string; + const localNulls = nulls ?? Object.fromEntries( + Object.keys(configs[localType]).map(( + e, + ) => [e, null]), + ); + + const mapped = { + ...localNulls, + ...filteredElement, + entityType: localType, + }; + + const conflict = uniques + ? store.collection.find((e) => { + if ((e as CommonEntity).entityType !== mapped.entityType) return false; + for (const k of uniques) { + if (k in mapped && !isEqual(mapped[k as keyof typeof mapped], e[k])) return false; + } + + return true; + }) + : findCompositeKey(store.collection as CommonEntity[], mapped as CommonEntity); + if (conflict) { + return { status: 'CONFLICT', data: conflict }; + } + + store.collection.push(mapped); + + return { status: 'OK', data: mapped }; + }; +}; + +const generateList: (store: CollectionStore, type?: string) => ListFn = ( + store, + type, +) => { + return (where) => { + const from = type + ? filterCollection(store.collection, { + entityType: type, + }) + : store.collection; + + if (!where) return from; + + return (filterCollection(from, where)); + }; +}; + +const generateOne: (store: CollectionStore, type?: string) => OneFn = ( + store, + type, +) => { + return (where) => { + const from = type + ? filterCollection(store.collection, { + entityType: type, + }) + : store.collection; + + if (!where) return from[0] ?? null; + + return (filterCollection(from, where)[0] ?? null); + }; +}; + +const generateUpdate: (store: CollectionStore, type?: string) => UpdateFn = ( + store, + type, +) => { + return ({ set, where }) => { + const filter = type + ? { + ...where, + entityType: type, + } + : where; + + const targets = filter ? filterCollection(store.collection, filter) : store.collection; + const entries = Object.entries(set); + const newItems: { + index: number; + item: Record; + }[] = []; + let i = 0; + const dupes: Record[] = []; + + for (const item of targets) { + const newItem: Record = { ...item }; + + for (const [k, v] of entries) { + if (!(k in item)) continue; + const target = item[k]; + + newItem[k] = typeof v === 'function' + ? (Array.isArray(target)) + ? target.map(v) + : v(target) + : v; + } + + const dupe = findCompositeKeys(store.collection as CommonEntity[], newItem as CommonEntity).filter((e) => + e !== item + ); + + dupes.push(...dupe.filter((e) => !dupes.find((d) => d === e))); + + if (!dupe.length) { + newItems.push({ + item: newItem, + index: i++, + }); + } + } + + // Swap this + if (dupes.length) { + return { + status: 'CONFLICT', + data: dupes, + }; + } + + // ^ with this + // If you want non-conflicting changes to apply regardless of conflicts' existence + for (const { index, item } of newItems) { + Object.assign(targets[index]!, item); + } + + return { status: 'OK', data: targets }; + }; +}; + +const generateDelete: (store: CollectionStore, type?: string) => DeleteFn = ( + store, + type, +) => { + return (where) => { + const updatedCollection = [] as Record[]; + const deleted = [] as Record[]; + + const filter = type + ? { + ...where, + entityType: type, + } + : where; + + if (!filter) { + store.collection = updatedCollection; + + return deleted; + } + + store.collection.forEach((e) => { + if (matchesFilters(e, filter)) deleted.push(e); + else updatedCollection.push(e); + }); + + store.collection = updatedCollection; + + return deleted; + }; +}; + +const generateHasDiff: ( + lengths: Record, +) => HasDiffFn = ( + lengths, +) => { + return (input) => { + const type = input.entityType; + const length = lengths[type]; + + return Object.keys(input).length > length; + }; +}; + +function validate(data: any, schema: Config, deep = false): boolean { + if (typeof data !== 'object' || data === null) return false; + + for (const k of Array.from(new Set([...Object.keys(data), ...Object.keys(schema)]))) { + if (!deep && k === 'entityType') continue; + + if (!schema[k]) return false; + + if (schema[k] === 'string[]') { + if (!Array.isArray(data[k])) return false; + + if (!data[k].every((e) => typeof e === 'string')) return false; + } else if (typeof schema[k] === 'string') { + const isNullable = schema[k].endsWith('?'); + if (data[k] === null && !isNullable) return false; + if (data[k] !== null && typeof data[k] !== removeQuestionMark(schema[k])) return false; + } else if (Array.isArray(schema[k])) { + if (typeof schema[k][0] === 'string') { + if (!schema[k].some((e) => e === data[k])) return false; + } else { + if (!Array.isArray(data[k])) return false; + if ( + !data[k].every( + (e) => validate(e, (schema[k] as [Config])[0]), + true, + ) + ) return false; + } + } else { + if (data[k] !== null && !validate(data[k], schema[k], true)) return false; + } + } + + return true; +} + +const generateValidate: (configs: Record, type?: string) => ValidateFn = ( + configs, + type, +) => { + return ((data) => { + if (typeof data !== 'object' || data === null) return false; + + const localType = type ?? ( data).entityType as string; + if (typeof localType !== 'string' || ( data).entityType !== localType) return false; + + const config = configs[localType]; + if (!config) return false; + + return validate(data, config); + }) as ValidateFn; +}; + +type GenerateProcessors< + T extends AnyDbConfig, + TCommon extends boolean = false, + TTypes extends Record = T['types'], +> = { + [K in keyof TTypes]: { + push: PushFn; + list: ListFn; + one: OneFn; + update: UpdateFn; + delete: DeleteFn; + validate: ValidateFn; + hasDiff: HasDiffFn; + }; +}; + +function initSchemaProcessors, 'diffs'>, TCommon extends boolean>( + { entities }: T, + store: CollectionStore, + common: TCommon, + extraConfigs?: Record, +): GenerateProcessors { + const entries = Object.entries(entities); + + // left, right, entityType, diffType + const extraKeys = 4; + + const lengths: Record = Object.fromEntries( + Object.entries(common ? extraConfigs! : entities).map(([k, v]) => { + // name, table?, schema? + const commonCount = Object.keys(v).filter((e) => e in commonConfig).length; + + return [k, commonCount + extraKeys]; + }), + ); + + return Object.fromEntries(entries.map(([k, _v]) => { + return [k, { + push: generateInsert(common ? extraConfigs! : entities, store, common ? undefined : k), + list: generateList(store, common ? undefined : k), + one: generateOne(store, common ? undefined : k), + update: generateUpdate(store, common ? undefined : k), + delete: generateDelete(store, common ? undefined : k), + validate: generateValidate(common ? extraConfigs! : entities, common ? undefined : k), + hasDiff: generateHasDiff(lengths), + }]; + })) as GenerateProcessors; +} + +export type Config = { + [K: string]: `${Exclude}?` | DataType | [string, ...string[]] | Config | [Config]; +}; + +type DbConfig = { + /** Type-level fields only, do not attempt to access at runtime */ + types: InferEntities; + /** Type-level fields only, do not attempt to access at runtime */ + definition: TDefinition; + entities: { + [K in keyof TDefinition]: Config; + }; + diffs: { + alter: { + [K in keyof TDefinition | 'entities']: DiffAlter; + }; + create: { + [K in keyof TDefinition | 'entities']: DiffCreate; + }; + drop: { + [K in keyof TDefinition | 'entities']: DiffDrop; + }; + createdrop: { + [K in keyof TDefinition | 'entities']: DiffCreate | DiffDrop; + }; + all: { + [K in keyof TDefinition | 'entities']: DiffStatement; + }; + }; + store: CollectionStore; +}; + +type AnyDbConfig = { + /** Type-level fields only, do not attempt to access at runtime */ + types: Record>; + entities: Record; + definition: Record; +}; + +type ValueOf = T[keyof T]; + +export type DiffCreate< + TSchema extends Definition = {}, + TType extends keyof TSchema | 'entities' = string, + TShape extends Record = TType extends 'entities' ? {} : Simplify< + InferSchema & Omit & { + entityType: TType; + } + >, +> = TType extends 'entities' ? ValueOf< + { + [K in keyof TSchema]: DiffCreate; + } + > + : Simplify< + & { + $diffType: 'create'; + entityType: TType; + } + & { + [ + K in keyof Common as K extends keyof TShape ? null extends TShape[K] ? never : K : K + ]: Exclude; + } + & Omit + >; + +export type DiffDrop< + TSchema extends Definition = {}, + TType extends keyof TSchema | 'entities' = string, + TShape extends Record = TType extends 'entities' ? {} : Simplify< + InferSchema & Omit & { + entityType: TType; + } + >, +> = TType extends 'entities' ? ValueOf< + { + [K in keyof TSchema]: DiffDrop; + } + > + : Simplify< + & { + $diffType: 'drop'; + entityType: TType; + } + & { + [ + K in keyof Common as K extends keyof TShape ? null extends TShape[K] ? never : K : K + ]: Exclude; + } + & Omit + >; + +export type DiffAlter< + TSchema extends Definition = {}, + TType extends keyof TSchema | 'entities' = string, + TShape extends Record = TType extends 'entities' ? {} : Simplify< + InferSchema & Omit & { + entityType: TType; + } + >, + TFullShape extends Record = TType extends 'entities' ? {} : Simplify< + & InferSchema + & { + [C in keyof Common as C extends keyof TSchema[TType] ? never : null extends Common[C] ? never : C]: Common[C]; + } + & { + entityType: TType; + } + >, +> = TType extends 'entities' ? ValueOf< + { + [K in keyof TSchema]: DiffAlter; + } + > + : Simplify< + & { + $diffType: 'alter'; + entityType: TType; + } + & { + [ + K in keyof Common as K extends keyof TShape ? null extends TShape[K] ? never : K : K + ]: Exclude; + } + & { + [K in Exclude]?: { + from: TShape[K]; + to: TShape[K]; + }; + } + & { + $left: TFullShape; + $right: TFullShape; + } + >; + +export type DiffStatement< + TSchema extends Definition, + TType extends keyof TSchema | 'entities', +> = + | DiffCreate + | DiffDrop + | DiffAlter; + +type CollectionRow = Record & Common & { + entityType: string; + key: string; +}; + +const ignoreChanges: Record = { + entityType: true, + name: true, + schema: true, + table: true, +}; + +function isEqual(a: any, b: any): boolean { + if (typeof a !== typeof b) return false; + + if (Array.isArray(a) && Array.isArray(b)) { + if (a.length !== b.length) return false; + return a.every((v, i) => isEqual(v, b[i])); + } + + if (typeof a === 'object') { + if (a === b) return true; + if ((a === null || b === null) && a !== b) return false; + + const keys = Array.from(new Set([...Object.keys(a), ...Object.keys(b)])); + + return keys.every((k) => isEqual(a[k], b[k])); + } + + return a === b; +} + +function sanitizeRow(row: Record) { + return Object.fromEntries( + Object.entries(row).filter(([k, _v]) => !ignoreChanges[k as keyof typeof ignoreChanges]), + ); +} + +function getRowCommons(row: Record): { + [K in keyof Common]: Common[K]; +} { + const res: Record = {}; + for (const k of Object.keys(commonConfig)) { + if (row[k] === undefined || row[k] === null) continue; + + res[k] = row[k]; + } + + return res as any; +} + +function _diff< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', + TMode extends 'all' | 'create' | 'drop' | 'createdrop' | 'alter' = 'all', + TDataBase extends SimpleDb = SimpleDb, +>( + dbOld: SimpleDb, + dbNew: SimpleDb, + collection?: TCollection, + mode?: TMode, +): Simplify[] { + collection = collection ?? 'entities' as TCollection; + mode = mode ?? 'all' as TMode; + + const leftEntities = dbOld.entities.list( + collection === 'entities' ? undefined : { + // @ts-ignore + entityType: collection, + }, + ) as CollectionRow[]; + const rightEntities = dbNew.entities.list( + collection === 'entities' ? undefined : { + // @ts-ignore + entityType: collection, + }, + ) as CollectionRow[]; + + const left: Record = {}; + const right: Record = {}; + + for (const row of leftEntities) { + left[getCompositeKey(row)] = row; + } + for (const row of rightEntities) { + right[getCompositeKey(row)] = row; + } + + const created: DiffCreate[] = []; + const dropped: DiffDrop[] = []; + const altered: DiffAlter[] = []; + + for (const [key, oldRow] of Object.entries(left)) { + const newRow = right[key]; + if (!newRow) { + if (mode === 'all' || mode === 'drop' || mode === 'createdrop') { + dropped.push({ + $diffType: 'drop', + entityType: oldRow.entityType, + ...getRowCommons(oldRow), + ...sanitizeRow(oldRow), + }); + } + } else if (mode === 'all' || mode === 'alter') { + const changes: Record = {}; + let isChanged = false; + + for (const [k, _v] of Object.entries(oldRow)) { + if (ignoreChanges[k as keyof typeof ignoreChanges]) continue; + + if (!isEqual(oldRow[k], newRow[k])) { + isChanged = true; + changes[k] = { from: oldRow[k], to: newRow[k] }; + } + } + + if (isChanged) { + altered.push({ + $diffType: 'alter', + entityType: newRow.entityType, + ...getRowCommons(newRow), + ...changes, + $left: oldRow, + $right: newRow, + }); + } + } + + delete right[key]; + } + + if (mode === 'all' || mode === 'create' || mode === 'createdrop') { + for (const newRow of Object.values(right)) { + created.push({ + $diffType: 'create', + entityType: newRow.entityType as string, + ...getRowCommons(newRow), + ...sanitizeRow(newRow), + }); + } + } + + return [...created, ...dropped, ...altered] as any; +} + +export function diff< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', +>(dbOld: SimpleDb, dbNew: SimpleDb, collection?: TCollection) { + return _diff(dbOld, dbNew, collection, 'createdrop'); +} + +export namespace diff { + export function all< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', + >(dbOld: SimpleDb, dbNew: SimpleDb, collection?: TCollection) { + return _diff(dbOld, dbNew, collection, 'all'); + } + + export function creates< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', + >(dbOld: SimpleDb, dbNew: SimpleDb, collection?: TCollection) { + return _diff(dbOld, dbNew, collection, 'create'); + } + + export function drops< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', + >(dbOld: SimpleDb, dbNew: SimpleDb, collection?: TCollection) { + return _diff(dbOld, dbNew, collection, 'drop'); + } + + export function alters< + TDefinition extends Definition, + TCollection extends keyof TDefinition | 'entities' = 'entities', + >(dbOld: SimpleDb, dbNew: SimpleDb, collection?: TCollection) { + return _diff(dbOld, dbNew, collection, 'alter'); + } +} + +function removeQuestionMark( + str: T, +): TResult { + if (!str.endsWith('?')) return str as string as TResult; + + return str.slice(0, str.length - 1) as TResult; +} + +class SimpleDb> { + public readonly _: DbConfig = { + diffs: {} as any, + store: { + collection: [] as Record[], + }, + } as any; + + public entities: GenerateProcessors<{ + types: { + entities: InferEntities extends infer TInferred ? Simplify< + ValueOf + > + : never; + }; + entities: any; + definition: TDefinition; + }, true>['entities']; + + constructor(definition: TDefinition) { + const entries = Object.entries(definition); + const configs = Object.fromEntries(entries.map(([type, def]) => { + if (type === 'entities' || type === '_') throw new Error(`Illegal entity type name: "${type}"`); + const cloneDef: Record = {}; + + Object.entries(def).forEach(([fieldName, fieldValue]) => { + cloneDef[fieldName] = fieldValue; + + if (fieldValue === 'required') { + if (!(fieldName in commonConfig)) { + throw new Error( + `Type value "required" is only applicable to common keys [ ${ + Object.keys(commonConfig).map((e) => `"${e}"`).join(', ') + } ], used on: "${fieldName}"`, + ); + } + + cloneDef[fieldName] = (removeQuestionMark(commonConfig[fieldName] as string)) as Exclude< + ExtendedType, + 'required' + >; + } else { + if (fieldName in commonConfig) { + throw new Error(`Used forbidden key "${fieldName}" in entity "${type}"`); + } + } + }); + + for (const k in commonConfig) { + if (commonConfig[k].endsWith('?')) continue; + + cloneDef[k] = commonConfig[k]; + } + + return [type, cloneDef]; + })); + + this._.entities = configs as any; + + const entConfig = { + ...this._, + entities: { + entities: commonConfig, + }, + }; + + this.entities = initSchemaProcessors(entConfig, this._.store, true, this._.entities).entities as any; + } +} + +export function create< + TDefinition extends Definition, + TResult = SimpleDb extends infer DB extends SimpleDb ? Simplify> + : never, +>( + definition: TDefinition, +): TResult { + const db = new SimpleDb(definition); + + const processors = initSchemaProcessors(db._, db._.store, false); + for (const [k, v] of Object.entries(processors)) { + (db as any)[k] = v; + } + + return db as any; +} diff --git a/drizzle-kit/src/dialects/drizzle.ts b/drizzle-kit/src/dialects/drizzle.ts new file mode 100644 index 0000000000..1359dc93d1 --- /dev/null +++ b/drizzle-kit/src/dialects/drizzle.ts @@ -0,0 +1,140 @@ +import type { SQL } from 'drizzle-orm'; +import { CasingCache, toCamelCase, toSnakeCase } from 'drizzle-orm/casing'; +import { + type CockroachMaterializedView, + type CockroachSchema, + type CockroachView, + getMaterializedViewConfig as crdbMatViewConfig, + getViewConfig as crdbViewConfig, +} from 'drizzle-orm/cockroach-core'; +import { getViewConfig as mssqlViewConfig, type MsSqlSchema, type MsSqlView } from 'drizzle-orm/mssql-core'; +import { getViewConfig as mysqlViewConfig, type MySqlView } from 'drizzle-orm/mysql-core'; +import { + getMaterializedViewConfig as pgMatViewConfig, + getViewConfig as pgViewConfig, + type PgMaterializedView, + type PgSchema, + type PgView, +} from 'drizzle-orm/pg-core'; +import { getViewConfig as sqliteViewConfig, type SQLiteView } from 'drizzle-orm/sqlite-core'; +import type { CasingType } from '../cli/validations/common'; +import type { Schema, Table } from './pull-utils'; + +export const extractPostgresExisting = ( + schemas: PgSchema[], + views: PgView[], + matViews: PgMaterializedView[], +): (Schema | Table)[] => { + const existingSchemas = schemas.filter((x) => x.isExisting).map((x) => ({ + type: 'schema', + name: x.schemaName, + })); + const existingViews = views.map((x) => pgViewConfig(x)).filter((x) => x.isExisting).map
((x) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + const existingMatViews = matViews.map((x) => pgMatViewConfig(x)).filter((x) => x.isExisting).map
(( + x, + ) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + return [...existingSchemas, ...existingViews, ...existingMatViews]; +}; + +export const extractCrdbExisting = ( + schemas: CockroachSchema[], + views: CockroachView[], + matViews: CockroachMaterializedView[], +): (Schema | Table)[] => { + const existingSchemas = schemas.filter((x) => x.isExisting).map((x) => ({ + type: 'schema', + name: x.schemaName, + })); + const existingViews = views.map((x) => crdbViewConfig(x)).filter((x) => x.isExisting).map
((x) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + const existingMatViews = matViews.map((x) => crdbMatViewConfig(x)).filter((x) => x.isExisting).map
(( + x, + ) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + return [...existingSchemas, ...existingViews, ...existingMatViews]; +}; + +export const extractMssqlExisting = ( + schemas: MsSqlSchema[], + views: MsSqlView[], +): (Schema | Table)[] => { + const existingSchemas = schemas.filter((x) => x.isExisting).map((x) => ({ + type: 'schema', + name: x.schemaName, + })); + const existingViews = views.map((x) => mssqlViewConfig(x)).filter((x) => x.isExisting).map
((x) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + return [...existingSchemas, ...existingViews]; +}; + +export const extractMysqlExisting = ( + views: MySqlView[], +): Table[] => { + const existingViews = views.map((x) => mysqlViewConfig(x)).filter((x) => x.isExisting).map
((x) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + return [...existingViews]; +}; + +export const extractSqliteExisting = ( + views: SQLiteView[], +): Table[] => { + const existingViews = views.map((x) => sqliteViewConfig(x)).filter((x) => x.isExisting).map
((x) => ({ + type: 'table', + schema: x.schema ?? 'public', + name: x.name, + })); + + return [...existingViews]; +}; + +export const getColumnCasing = ( + column: { keyAsName: boolean; name: string | undefined }, + casing: CasingType | undefined, +) => { + if (!column.name) return ''; + return !column.keyAsName || casing === undefined + ? column.name + : casing === 'camelCase' + ? toCamelCase(column.name) + : toSnakeCase(column.name); +}; + +export const sqlToStr = (sql: SQL, casing: CasingType | undefined) => { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + casing: new CasingCache(casing), + }).sql; +}; diff --git a/drizzle-kit/src/dialects/gel/snapshot.ts b/drizzle-kit/src/dialects/gel/snapshot.ts new file mode 100644 index 0000000000..d3cab41051 --- /dev/null +++ b/drizzle-kit/src/dialects/gel/snapshot.ts @@ -0,0 +1,314 @@ +import type { TypeOf } from 'zod'; +import { any, array, boolean, enum as enumType, literal, number, object, record, string } from 'zod'; +import { originUUID } from '../../utils'; + +const enumSchema = object({ + name: string(), + schema: string(), + values: string().array(), +}).strict(); + +const enumSchemaV1 = object({ + name: string(), + values: record(string(), string()), +}).strict(); + +const indexColumn = object({ + expression: string(), + isExpression: boolean(), + asc: boolean(), + nulls: string().optional(), + opclass: string().optional(), +}); + +export type IndexColumnType = TypeOf; + +const index = object({ + name: string(), + columns: indexColumn.array(), + isUnique: boolean(), + with: record(string(), any()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + schemaTo: string().optional(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +export const sequenceSchema = object({ + name: string(), + increment: string().optional(), + minValue: string().optional(), + maxValue: string().optional(), + startWith: string().optional(), + cache: string().optional(), + cycle: boolean().optional(), + schema: string(), +}).strict(); + +export const roleSchema = object({ + name: string(), + createDb: boolean().optional(), + createRole: boolean().optional(), + inherit: boolean().optional(), +}).strict(); + +export const sequenceSquashed = object({ + name: string(), + schema: string(), + values: string(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: sequenceSchema + .merge(object({ type: enumType(['always', 'byDefault']) })) + .optional(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const columnSquashed = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: string().optional(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), + nullsNotDistinct: boolean(), +}).strict(); + +export const policy = object({ + name: string(), + as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), + for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), + to: string().array().optional(), + using: string().optional(), + withCheck: string().optional(), + on: string().optional(), + schema: string().optional(), +}).strict(); + +export const policySquashed = object({ + name: string(), + values: string(), +}).strict(); + +const viewWithOption = object({ + checkOption: enumType(['local', 'cascaded']).optional(), + securityBarrier: boolean().optional(), + securityInvoker: boolean().optional(), +}).strict(); + +const matViewWithOption = object({ + fillfactor: number().optional(), + toastTupleTarget: number().optional(), + parallelWorkers: number().optional(), + autovacuumEnabled: boolean().optional(), + vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), + vacuumTruncate: boolean().optional(), + autovacuumVacuumThreshold: number().optional(), + autovacuumVacuumScaleFactor: number().optional(), + autovacuumVacuumCostDelay: number().optional(), + autovacuumVacuumCostLimit: number().optional(), + autovacuumFreezeMinAge: number().optional(), + autovacuumFreezeMaxAge: number().optional(), + autovacuumFreezeTableAge: number().optional(), + autovacuumMultixactFreezeMinAge: number().optional(), + autovacuumMultixactFreezeMaxAge: number().optional(), + autovacuumMultixactFreezeTableAge: number().optional(), + logAutovacuumMinDuration: number().optional(), + userCatalogTable: boolean().optional(), +}).strict(); + +export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); + +export const view = object({ + name: string(), + schema: string(), + columns: record(string(), column), + definition: string().optional(), + materialized: boolean(), + with: mergedViewWithOption.optional(), + isExisting: boolean(), + withNoData: boolean().optional(), + using: string().optional(), + tablespace: string().optional(), +}).strict(); + +const table = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + policies: record(string(), policy).default({}), + checkConstraints: record(string(), checkConstraint).default({}), + isRLSEnabled: boolean().default(false), +}).strict(); + +const schemaHash = object({ + id: string(), + prevIds: array(string()), +}); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ + isArray: boolean().optional(), + dimensions: number().optional(), + rawType: string().optional(), + isDefaultAnExpression: boolean().optional(), + }).optional(), + ), + }).optional(), + ), +}).optional(); + +export const gelSchemaExternal = object({ + version: literal('1'), + dialect: literal('gel'), + tables: array(table), + enums: array(enumSchemaV1), + schemas: array(object({ name: string() })), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), +}).strict(); + +export const gelSchemaInternal = object({ + version: literal('1'), + dialect: literal('gel'), + tables: record(string(), table), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + views: record(string(), view).default({}), + sequences: record(string(), sequenceSchema).default({}), + roles: record(string(), roleSchema).default({}), + policies: record(string(), policy).default({}), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +const tableSquashed = object({ + name: string(), + schema: string(), + columns: record(string(), columnSquashed), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()), + policies: record(string(), string()), + checkConstraints: record(string(), string()), + isRLSEnabled: boolean().default(false), +}).strict(); + +export const gelSchemaSquashed = object({ + version: literal('1'), + dialect: literal('gel'), + tables: record(string(), tableSquashed), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + views: record(string(), view), + sequences: record(string(), sequenceSquashed), + roles: record(string(), roleSchema).default({}), + policies: record(string(), policySquashed).default({}), +}).strict(); + +export const gelSchema = gelSchemaInternal.merge(schemaHash); + +export type Enum = TypeOf; +export type Sequence = TypeOf; +export type Role = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type GelSchema = TypeOf; +export type GelSchemaInternal = TypeOf; +export type GelSchemaExternal = TypeOf; +export type GelSchemaSquashed = TypeOf; +export type Index = TypeOf; +export type ForeignKey = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; +export type Policy = TypeOf; +export type View = TypeOf; +export type MatViewWithOption = TypeOf; +export type ViewWithOption = TypeOf; + +export type GelKitInternals = TypeOf; +export type CheckConstraint = TypeOf; + +// no prev version +export const backwardCompatibleGelSchema = gelSchema; + +export const dryGel = gelSchema.parse({ + version: '1', + dialect: 'gel', + id: originUUID, + prevIds: [], + tables: {}, + enums: {}, + schemas: {}, + policies: {}, + roles: {}, + sequences: {}, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, +}); diff --git a/drizzle-kit/src/dialects/mssql/convertor.ts b/drizzle-kit/src/dialects/mssql/convertor.ts new file mode 100644 index 0000000000..a42f9934f3 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/convertor.ts @@ -0,0 +1,557 @@ +import type { Simplify } from '../../utils'; +import type { DefaultConstraint } from './ddl'; +import type { DropColumn, JsonStatement, RenameColumn } from './statements'; + +export const convertor = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + convertor: (statement: Simplify>) => string | string[], +) => { + return { + type, + can: (st: JsonStatement) => { + return st.type === type; + }, + convert: convertor, + }; +}; + +const createTable = convertor('create_table', (st) => { + const { name, schema, columns, pk, checks, uniques, defaults } = st.table; + + let statement = ''; + + const key = schema !== 'dbo' ? `[${schema}].[${name}]` : `[${name}]`; + statement += `CREATE TABLE ${key} (\n`; + + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const isPK = pk && pk.columns.includes(column.name); + + const identity = column.identity; + const identityStatement = identity ? ` IDENTITY(${identity.seed}, ${identity.increment})` : ''; + const notNullStatement = isPK ? '' : column.notNull && !column.identity && !column.generated ? ' NOT NULL' : ''; + + const hasDefault = defaults.find((it) => + it.table === column.table && it.column === column.name && it.schema === column.schema + ); + const defaultStatement = !hasDefault + ? '' + : ` CONSTRAINT [${hasDefault.name}] DEFAULT ${hasDefault.default}`; + + const generatedType = column.generated?.type.toUpperCase() === 'VIRTUAL' + ? '' + : column.generated?.type.toUpperCase(); + const generatedStatement = column.generated + ? ` AS (${column.generated?.as})${' ' + generatedType}` + : ''; + + statement += '\t' + + `[${column.name}] ${ + generatedStatement ? '' : column.type + }${identityStatement}${generatedStatement}${notNullStatement}${defaultStatement}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (pk) { + statement += ',\n'; + statement += `\tCONSTRAINT [${pk.name}] PRIMARY KEY([${pk.columns.join(`],[`)}])`; + } + + for (const unique of uniques) { + statement += ',\n'; + const uniqueString = unique.columns.join('],['); + + statement += `\tCONSTRAINT [${unique.name}] UNIQUE([${uniqueString}])`; + } + + for (const check of checks) { + statement += ',\n'; + statement += `\tCONSTRAINT [${check.name}] CHECK (${check.value})`; + } + + statement += `\n);`; + statement += `\n`; + return statement; +}); + +const dropTable = convertor('drop_table', (st) => { + const { table } = st; + + const key = table.schema !== 'dbo' ? `[${table.schema}].[${table.name}]` : `[${table.name}]`; + + return `DROP TABLE ${key};`; +}); + +const renameTable = convertor('rename_table', (st) => { + const { from, schema, to } = st; + + const key = schema !== 'dbo' ? `${schema}.${from}` : `${from}`; + + return `EXEC sp_rename '${key}', [${to}];`; +}); + +const addColumn = convertor('add_column', (st) => { + const { column, defaults } = st; + const { + name, + notNull, + table, + generated, + identity, + schema, + } = column; + + const notNullStatement = notNull && !column.generated && !column.identity ? ' NOT NULL' : ''; + const identityStatement = identity ? ` IDENTITY(${identity.seed}, ${identity.increment})` : ''; + + const generatedType = column.generated?.type.toUpperCase() === 'VIRTUAL' + ? '' + : column.generated?.type.toUpperCase(); + const generatedStatement = generated + ? ` AS (${generated?.as})${generatedType ? ' ' + generatedType : ''}` + : ''; + + const hasDefault = defaults.find((it) => + it.table === column.table && it.column === column.name && it.schema === column.schema + ); + const defaultStatement = !hasDefault + ? '' + : ` CONSTRAINT [${hasDefault.name}] DEFAULT ${hasDefault.default}`; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + + let statement = `ALTER TABLE ${key} ADD [${name}]`; + if (!generated) statement += ` ${column.type}`; + statement += `${identityStatement}${generatedStatement}${notNullStatement}${defaultStatement};`; + + return statement; +}); + +const dropColumn = convertor('drop_column', (st) => { + const { column } = st; + + const key = column.schema !== 'dbo' ? `[${column.schema}].[${column.table}]` : `[${column.table}]`; + return `ALTER TABLE ${key} DROP COLUMN [${st.column.name}];`; +}); + +const renameColumn = convertor('rename_column', (st) => { + const { table: tableFrom, name: columnFrom, schema } = st.from; + + const key = schema !== 'dbo' ? `${schema}.${tableFrom}.${columnFrom}` : `${tableFrom}.${columnFrom}`; + + const { name: columnTo } = st.to; + return `EXEC sp_rename '${key}', [${columnTo}], 'COLUMN';`; +}); + +const alterColumn = convertor('alter_column', (st) => { + const { diff } = st; + + const column = diff.$right; + const notNullStatement = `${column.notNull ? ' NOT NULL' : ''}`; + + const key = column.schema !== 'dbo' ? `[${column.schema}].[${column.table}]` : `[${column.table}]`; + + return `ALTER TABLE ${key} ALTER COLUMN [${column.name}] ${column.type}${notNullStatement};`; +}); + +const recreateColumn = convertor('recreate_column', (st) => { + return [ + dropColumn.convert({ column: st.diff.$left }) as string, + addColumn.convert({ column: st.diff.$right, defaults: [], isPK: false }) as string, + ]; +}); + +const recreateIdentityColumn = convertor('recreate_identity_column', (st) => { + const { column, constraintsToCreate, constraintsToDelete } = st; + + const shouldTransferData = column.identity?.from && Boolean(!column.identity.to); + const statements = []; + + for (const toDelete of constraintsToDelete) { + if (toDelete.entityType === 'fks') statements.push(dropForeignKey.convert({ fk: toDelete }) as string); + if (toDelete.entityType === 'checks') statements.push(dropCheck.convert({ check: toDelete }) as string); + if (toDelete.entityType === 'defaults') statements.push(dropDefault.convert({ default: toDelete }) as string); + if (toDelete.entityType === 'pks') statements.push(dropPK.convert({ pk: toDelete }) as string); + if (toDelete.entityType === 'indexes') statements.push(dropIndex.convert({ index: toDelete }) as string); + if (toDelete.entityType === 'uniques') statements.push(dropUnique.convert({ unique: toDelete }) as string); + } + + const renamedColumnName = `__old_${column.name}`; + statements.push( + renameColumn.convert({ + from: { table: column.table, name: column.name, schema: column.schema }, + to: { name: renamedColumnName }, + } as RenameColumn) as string, + ); + + const defaultsToCreate: DefaultConstraint[] = constraintsToCreate.filter((it) => it.entityType === 'defaults'); + statements.push(addColumn.convert({ column: column.$right, defaults: defaultsToCreate, isPK: false }) as string); + + if (shouldTransferData) { + statements.push( + `INSERT INTO [${column.table}] ([${column.name}]) SELECT [${renamedColumnName}] FROM [${column.table}];`, + ); + } + + statements.push( + dropColumn.convert( + { column: { name: renamedColumnName, schema: column.schema, table: column.table } } as DropColumn, + ) as string, + ); + + for (const toCreate of constraintsToCreate) { + if (toCreate.entityType === 'checks') statements.push(addCheck.convert({ check: toCreate }) as string); + if (toCreate.entityType === 'fks') statements.push(createFK.convert({ fk: toCreate }) as string); + if (toCreate.entityType === 'pks') statements.push(createPK.convert({ pk: toCreate }) as string); + if (toCreate.entityType === 'indexes') statements.push(createIndex.convert({ index: toCreate }) as string); + if (toCreate.entityType === 'uniques') statements.push(addUnique.convert({ unique: toCreate }) as string); + } + + return statements; +}); + +const createIndex = convertor('create_index', (st) => { + const { name, table, columns, isUnique, where, schema } = st.index; + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + + const uniqueString = `[${columns.join('],[')}]`; + + const whereClause = where ? ` WHERE ${where}` : ''; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `CREATE ${indexPart} [${name}] ON ${key} (${uniqueString})${whereClause};`; +}); + +const dropIndex = convertor('drop_index', (st) => { + const { schema, name, table } = st.index; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `DROP INDEX [${name}] ON ${key};`; +}); + +const createFK = convertor('create_fk', (st) => { + const { + name, + table, + columns, + tableTo, + columnsTo, + onDelete, + onUpdate, + schema, + } = st.fk; + const onDeleteStatement = onDelete !== 'NO ACTION' ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate !== 'NO ACTION' ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columns.map((it) => `[${it}]`).join(','); + const toColumnsString = columnsTo.map((it) => `[${it}]`).join(','); + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `ALTER TABLE ${key} ADD CONSTRAINT [${name}] FOREIGN KEY (${fromColumnsString}) REFERENCES [${tableTo}](${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; +}); + +const createPK = convertor('create_pk', (st) => { + const { name, schema, table, columns } = st.pk; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `ALTER TABLE ${key} ADD CONSTRAINT [${name}] PRIMARY KEY ([${columns.join('],[')}]);`; +}); + +const renamePk = convertor('rename_pk', (st) => { + const { name: nameFrom, schema: schemaFrom } = st.from; + const { name: nameTo } = st.to; + + const key = schemaFrom !== 'dbo' ? `${schemaFrom}.${nameFrom}` : `${nameFrom}`; + return `EXEC sp_rename '${key}', [${nameTo}], 'OBJECT';`; +}); + +const renameCheck = convertor('rename_check', (st) => { + const { name: nameFrom, schema: schemaFrom } = st.from; + const { name: nameTo } = st.to; + + const key = schemaFrom !== 'dbo' ? `${schemaFrom}.${nameFrom}` : `${nameFrom}`; + return `EXEC sp_rename '${key}', [${nameTo}], 'OBJECT';`; +}); + +const renameFk = convertor('rename_fk', (st) => { + const { name: nameFrom, schema: schemaFrom } = st.from; + const { name: nameTo } = st.to; + + const key = schemaFrom !== 'dbo' ? `${schemaFrom}.${nameFrom}` : `${nameFrom}`; + return `EXEC sp_rename '${key}', [${nameTo}], 'OBJECT';`; +}); + +const renameIndex = convertor('rename_index', (st) => { + const { name: nameFrom, schema: schemaFrom, table: tableFrom } = st.from; + const { name: nameTo } = st.to; + + const key = schemaFrom !== 'dbo' ? `${schemaFrom}.${tableFrom}.${nameFrom}` : `${tableFrom}.${nameFrom}`; + return `EXEC sp_rename '${key}', [${nameTo}], 'INDEX';`; +}); + +const renameUnique = convertor('rename_unique', (st) => { + const { name: nameFrom, schema: schemaFrom } = st.from; + const { name: nameTo } = st.to; + + const key = schemaFrom !== 'dbo' ? `${schemaFrom}.${nameFrom}` : `${nameFrom}`; + return `EXEC sp_rename '${key}', [${nameTo}], 'OBJECT';`; +}); + +const createCheck = convertor('create_check', (st) => { + const { name, schema, table, value } = st.check; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `ALTER TABLE ${key} ADD CONSTRAINT [${name}] CHECK (${value});`; +}); + +const dropConstraint = convertor('drop_constraint', (st) => { + const { constraint, table, schema } = st; + + const key = schema !== 'dbo' ? `[${schema}].[${table}]` : `[${table}]`; + return `ALTER TABLE ${key} DROP CONSTRAINT [${constraint}];`; +}); + +const createView = convertor('create_view', (st) => { + const { definition, name, checkOption, encryption, schemaBinding, viewMetadata, schema } = st.view; + + let statement = `CREATE `; + + const key = schema === 'dbo' ? `[${name}]` : `[${schema}].[${name}]`; + statement += `VIEW ${key}`; + + if (encryption || schemaBinding || viewMetadata) { + const options: string[] = []; + statement += `\nWITH`; + + if (encryption) options.push(`ENCRYPTION`); + if (schemaBinding) options.push(`SCHEMABINDING`); + if (viewMetadata) options.push(`VIEW_METADATA`); + + statement += ` ${options.join(', ')}`; + } + statement += ` AS (${definition})`; + statement += checkOption ? `\nWITH CHECK OPTION` : ''; + + statement += ';'; + + return statement; +}); + +const dropView = convertor('drop_view', (st) => { + const { schema, name } = st.view; + const key = schema === 'dbo' ? `[${name}]` : `[${schema}].[${name}]`; + + return `DROP VIEW ${key};`; +}); + +const renameView = convertor('rename_view', (st) => { + const { schema, name } = st.from; + const key = schema === 'dbo' ? `${name}` : `${schema}.${name}`; + + return `EXEC sp_rename '${key}', [${st.to.name}];`; +}); + +const alterView = convertor('alter_view', (st) => { + const { definition, name, checkOption, encryption, schemaBinding, viewMetadata, schema } = st.view; + + const key = schema === 'dbo' ? `[${name}]` : `[${schema}].[${name}]`; + let statement = `ALTER VIEW ${key}`; + + if (encryption || schemaBinding || viewMetadata) { + const options: string[] = []; + statement += `\nWITH`; + + if (encryption) options.push(`ENCRYPTION`); + if (schemaBinding) options.push(`SCHEMABINDING`); + if (viewMetadata) options.push(`VIEW_METADATA`); + + statement += ` ${options.join(', ')}`; + } + statement += ` AS (${definition})`; + statement += checkOption ? `\nWITH CHECK OPTION` : ''; + + statement += ';'; + + return statement; +}); + +const createSchema = convertor('create_schema', (st) => { + return `CREATE SCHEMA [${st.name}];\n`; +}); + +const dropSchema = convertor('drop_schema', (st) => { + return `DROP SCHEMA [${st.name}];\n`; +}); + +const renameSchema = convertor('rename_schema', (_st) => { + return `/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`; +}); + +const moveTable = convertor('move_table', (st) => { + const { from, name, to } = st; + return `ALTER SCHEMA [${to}] TRANSFER [${from}].[${name}];\n`; +}); + +const moveView = convertor('move_view', (st) => { + const { fromSchema, toSchema, view } = st; + const from = fromSchema === 'dbo' ? `[${view.name}]` : `[${fromSchema}].[${view.name}]`; + + return `ALTER SCHEMA [${toSchema}] TRANSFER ${from};`; +}); + +const addUnique = convertor('add_unique', (st) => { + const { unique } = st; + const tableNameWithSchema = unique.schema !== 'dbo' + ? `[${unique.schema}].[${unique.table}]` + : `[${unique.table}]`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT [${unique.name}] UNIQUE([${unique.columns.join('],[')}]);`; +}); + +const dropPK = convertor('drop_pk', (st) => { + const pk = st.pk; + const key = pk.schema !== 'dbo' + ? `[${pk.schema}].[${pk.table}]` + : `[${pk.table}]`; + + return `ALTER TABLE ${key} DROP CONSTRAINT [${pk.name}];`; +}); + +const addCheck = convertor('add_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'dbo' + ? `[${check.schema}].[${check.table}]` + : `[${check.table}]`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT [${check.name}] CHECK (${check.value});`; +}); + +const dropCheck = convertor('drop_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'dbo' + ? `[${check.schema}].[${check.table}]` + : `[${check.table}]`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT [${check.name}];`; +}); + +const dropUnique = convertor('drop_unique', (st) => { + const { unique } = st; + + const tableNameWithSchema = unique.schema !== 'dbo' + ? `[${unique.schema}].[${unique.table}]` + : `[${unique.table}]`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT [${unique.name}];`; +}); + +const dropForeignKey = convertor('drop_fk', (st) => { + const { schema, table, name } = st.fk; + + const tableNameWithSchema = schema !== 'dbo' + ? `[${schema}].[${table}]` + : `[${table}]`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT [${name}];\n`; +}); + +const addDefault = convertor('create_default', (st) => { + const { schema, table, name, default: tableDefault, column } = st.default; + + const tableNameWithSchema = schema !== 'dbo' + ? `[${schema}].[${table}]` + : `[${table}]`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT [${name}] DEFAULT ${tableDefault} FOR [${column}];`; +}); + +const dropDefault = convertor('drop_default', (st) => { + const { schema, table, name } = st.default; + + const tableNameWithSchema = schema !== 'dbo' + ? `[${schema}].[${table}]` + : `[${table}]`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT [${name}];`; +}); + +const renameDefault = convertor('recreate_default', (st) => { + const { from, to } = st; + + return [dropDefault.convert({ default: from }) as string, addDefault.convert({ default: to }) as string]; +}); + +const convertors = [ + createTable, + dropTable, + renameTable, + addColumn, + dropColumn, + renameColumn, + alterColumn, + recreateColumn, + recreateIdentityColumn, + createIndex, + dropIndex, + createFK, + createPK, + dropPK, + createCheck, + dropConstraint, + createView, + dropView, + renameView, + alterView, + createSchema, + dropSchema, + moveTable, + moveView, + addCheck, + dropCheck, + renameSchema, + addUnique, + renamePk, + renameCheck, + renameFk, + renameIndex, + dropUnique, + dropForeignKey, + renameUnique, + addDefault, + dropDefault, + renameDefault, +]; + +export function fromJson( + statements: JsonStatement[], +) { + const grouped = statements + .map((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + if (!convertor) throw new Error(`No convertor for: ${statement.type} statement`); + + const sqlStatements = convertor.convert(statement as any); + const statements = typeof sqlStatements === 'string' ? [sqlStatements] : sqlStatements; + return { jsonStatement: statement, sqlStatements: statements }; + }) + .filter((it) => it !== null); + + const result = { + sqlStatements: grouped.map((it) => it.sqlStatements).flat(), + groupedStatements: grouped, + }; + return result; +} diff --git a/drizzle-kit/src/dialects/mssql/ddl.ts b/drizzle-kit/src/dialects/mssql/ddl.ts new file mode 100644 index 0000000000..0edfbc0d38 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/ddl.ts @@ -0,0 +1,330 @@ +import { create } from '../dialect'; +import { defaultNameForPK, defaultNameForUnique } from './grammar'; + +export const createDDL = () => { + return create({ + schemas: {}, + tables: { schema: 'required' }, + columns: { + schema: 'required', + table: 'required', + type: 'string', + notNull: 'boolean', + generated: { + type: ['persisted', 'virtual'], + as: 'string', + }, + identity: { + increment: 'number', + seed: 'number', + }, + }, + pks: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: 'string[]', + }, + fks: { + schema: 'required', + table: 'required', + columns: 'string[]', + nameExplicit: 'boolean', + schemaTo: 'string', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: ['NO ACTION', 'CASCADE', 'SET NULL', 'SET DEFAULT'], + onDelete: ['NO ACTION', 'CASCADE', 'SET NULL', 'SET DEFAULT'], + }, + indexes: { + schema: 'required', + table: 'required', + columns: 'string[]', // does not supported indexing expressions + isUnique: 'boolean', + where: 'string?', + }, + uniques: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: 'string[]', + }, + checks: { + schema: 'required', + table: 'required', + value: 'string', + }, + defaults: { + schema: 'required', + table: 'required', + column: 'string', + // this field will be required for name preserving + nameExplicit: 'boolean', + default: 'string?', + }, + views: { + schema: 'required', + definition: 'string', + encryption: 'boolean?', + schemaBinding: 'boolean?', + viewMetadata: 'boolean?', + checkOption: 'boolean?', + }, + }); +}; + +export type MssqlDDL = ReturnType; + +export type MssqlEntities = MssqlDDL['_']['types']; +export type MssqlEntity = MssqlEntities[keyof MssqlEntities]; +export type DiffEntities = MssqlDDL['_']['diffs']['alter']; + +export type Schema = MssqlEntities['schemas']; +export type Table = MssqlEntities['tables']; +export type Column = MssqlEntities['columns']; +export type Index = MssqlEntities['indexes']; +export type DefaultConstraint = MssqlEntities['defaults']; +export type UniqueConstraint = MssqlEntities['uniques']; +export type ForeignKey = MssqlEntities['fks']; +export type PrimaryKey = MssqlEntities['pks']; +export type CheckConstraint = MssqlEntities['checks']; +export type View = MssqlEntities['views']; + +export type InterimColumn = Column & { + isPK: boolean; + pkName: string | null; + isUnique: boolean; + uniqueName: string | null; +}; + +export type ViewColumn = { + schema: string; + view: string; + name: string; + type: string; + notNull: boolean; +}; + +export type InterimSchema = { + schemas: Schema[]; + tables: Table[]; + columns: InterimColumn[]; + pks: PrimaryKey[]; + fks: ForeignKey[]; + indexes: Index[]; + checks: CheckConstraint[]; + views: View[]; + viewColumns: ViewColumn[]; + uniques: UniqueConstraint[]; + defaults: DefaultConstraint[]; +}; + +export type TableFull = { + schema: string; + name: string; + columns: Column[]; + uniques: UniqueConstraint[]; + pk: PrimaryKey | null; + fks: ForeignKey[]; + checks: CheckConstraint[]; + indexes: Index[]; + defaults: DefaultConstraint[]; +}; + +export const fullTableFromDDL = (table: Table, ddl: MssqlDDL): TableFull => { + const filter = { schema: table.schema, table: table.name } as const; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const uniques = ddl.uniques.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + const defaults = ddl.defaults.list(filter); + + return { + ...table, + columns, + pk, + fks, + uniques, + checks, + indexes, + defaults, + }; +}; + +export type SchemaError = { + type: 'table_duplicate'; + name: string; + schema: string; +} | { + type: 'column_duplicate'; + table: string; + name: string; + schema: string; +} | { + type: 'view_name_duplicate'; + schema: string; + name: string; +} | { + type: 'schema_duplicate'; + name: string; +} | { + type: 'index_duplicate'; + schema: string; + table: string; + name: string; +} | { + type: 'index_no_name'; + schema: string; + table: string; + sql: string; +} | { + type: 'constraint_duplicate'; + schema: string; + table: string; + name: string; +}; + +export const interimToDDL = (interim: InterimSchema): { ddl: MssqlDDL; errors: SchemaError[] } => { + const errors = [] as SchemaError[]; + const ddl = createDDL(); + + for (const it of interim.schemas) { + const res = ddl.schemas.push(it); + if (res.status === 'CONFLICT') { + errors.push({ type: 'schema_duplicate', name: it.name }); + } + } + + for (const table of interim.tables) { + const res = ddl.tables.push(table); + if (res.status === 'CONFLICT') { + errors.push({ type: 'table_duplicate', name: table.name, schema: res.data.schema }); + } + } + + for (const column of interim.columns) { + const { isPK: _1, isUnique: _2, pkName: _3, uniqueName: _4, ...rest } = column; + + const res = ddl.columns.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ type: 'column_duplicate', table: column.table, name: column.name, schema: res.data.schema }); + } + } + + for (const index of interim.indexes) { + const isConflictNamePerSchema = ddl.indexes.one({ schema: index.schema, name: index.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'index_duplicate', + schema: index.schema, + table: index.table, + name: index.name, + }); + } + ddl.indexes.push(index); + } + + for (const unique of interim.uniques) { + const isConflictNamePerSchema = ddl.uniques.one({ schema: unique.schema, name: unique.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'constraint_duplicate', + schema: unique.schema, + table: unique.table, + name: unique.name, + }); + } + ddl.uniques.push(unique); + } + + for (const fk of interim.fks) { + const isConflictNamePerSchema = ddl.fks.one({ schema: fk.schema, name: fk.name }); + + if (isConflictNamePerSchema) { + errors.push({ type: 'constraint_duplicate', name: fk.name, table: fk.table, schema: fk.schema }); + } + + ddl.fks.push(fk); + } + + for (const pk of interim.pks) { + const isConflictNamePerSchema = ddl.pks.one({ schema: pk.schema, name: pk.name }); + + if (isConflictNamePerSchema) { + errors.push({ type: 'constraint_duplicate', name: pk.name, table: pk.table, schema: pk.schema }); + } + ddl.pks.push(pk); + } + + for (const column of interim.columns.filter((it) => it.isPK)) { + const name = column.pkName !== null ? column.pkName : defaultNameForPK(column.table); + const exists = ddl.pks.one({ schema: column.schema, table: column.table, name: name }) !== null; + if (exists) continue; + + ddl.pks.push({ + table: column.table, + name, + nameExplicit: column.pkName !== null, + columns: [column.name], + schema: column.schema, + }); + } + + for (const column of interim.columns.filter((it) => it.isUnique)) { + const name = column.uniqueName !== null ? column.uniqueName : defaultNameForUnique(column.table, [column.name]); + const exists = ddl.uniques.one({ schema: column.schema, table: column.table, name: name }) !== null; + if (exists) continue; + + ddl.uniques.push({ + schema: column.schema, + table: column.table, + name, + nameExplicit: column.uniqueName !== null, + columns: [column.name], + }); + } + + for (const columnDefault of interim.defaults) { + const res = ddl.defaults.push(columnDefault); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'constraint_duplicate', + schema: columnDefault.schema, + table: columnDefault.table, + name: columnDefault.name, + }); + } + } + + for (const check of interim.checks) { + const isConflictNamePerSchema = ddl.checks.one({ schema: check.schema, name: check.name }); + + if (isConflictNamePerSchema) { + errors.push({ + type: 'constraint_duplicate', + schema: check.schema, + table: check.table, + name: check.name, + }); + } + + ddl.checks.push(check); + } + + for (const view of interim.views) { + const res = ddl.views.push(view); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'view_name_duplicate', + schema: view.schema, + name: view.name, + }); + } + } + + return { ddl, errors }; +}; diff --git a/drizzle-kit/src/dialects/mssql/diff.ts b/drizzle-kit/src/dialects/mssql/diff.ts new file mode 100644 index 0000000000..bbc5ba94bf --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/diff.ts @@ -0,0 +1,1070 @@ +import { prepareMigrationRenames } from '../../utils'; +import { mockResolver } from '../../utils/mocks'; +import type { Resolver } from '../common'; +import { diff } from '../dialect'; +import { groupDiffs, preserveEntityNames } from '../utils'; +import { fromJson } from './convertor'; +import type { + CheckConstraint, + Column, + DefaultConstraint, + DiffEntities, + ForeignKey, + Index, + MssqlDDL, + MssqlEntities, + PrimaryKey, + Schema, + UniqueConstraint, + View, +} from './ddl'; +import { createDDL, fullTableFromDDL } from './ddl'; +import { typesCommutative } from './grammar'; +import type { JsonStatement } from './statements'; +import { prepareStatement } from './statements'; + +export const ddlDiffDry = async (ddlFrom: MssqlDDL, ddlTo: MssqlDDL, mode: 'default' | 'push') => { + const mocks = new Set(); + return ddlDiff( + ddlFrom, + ddlTo, + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mode, + ); +}; + +export const ddlDiff = async ( + ddl1: MssqlDDL, + ddl2: MssqlDDL, + schemasResolver: Resolver, + tablesResolver: Resolver, + columnsResolver: Resolver, + viewsResolver: Resolver, + uniquesResolver: Resolver, + indexesResolver: Resolver, + checksResolver: Resolver, + pksResolver: Resolver, + fksResolver: Resolver, + defaultsResolver: Resolver, + mode: 'default' | 'push', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { jsonStatement: JsonStatement; sqlStatements: string[] }[]; + renames: string[]; +}> => { + const ddl1Copy = createDDL(); + for (const entity of ddl1.entities.list()) { + ddl1Copy.entities.push(entity); + } + + const schemasDiff = diff(ddl1, ddl2, 'schemas'); + const { + created: createdSchemas, + deleted: deletedSchemas, + renamedOrMoved: renamedSchemas, + } = await schemasResolver({ + created: schemasDiff.filter((it) => it.$diffType === 'create'), + deleted: schemasDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const rename of renamedSchemas) { + ddl1.entities.update({ + set: { + schema: rename.to.name, + }, + where: { + schema: rename.from.name, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.name, + }, + where: { + schemaTo: rename.from.name, + }, + }); + } + + const createSchemas = createdSchemas.map((it) => prepareStatement('create_schema', it)); + const dropSchemas = deletedSchemas.map((it) => prepareStatement('drop_schema', it)); + const renameSchemas = renamedSchemas.map((it) => prepareStatement('rename_schema', it)); + + const tablesDiff = diff(ddl1, ddl2, 'tables'); + + const { + created: createdTables, + deleted: deletedTables, + renamedOrMoved: renamedOrMovedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.filter((it) => it.$diffType === 'create'), + deleted: tablesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedTables = renamedOrMovedTables.filter((it) => it.from.name !== it.to.name); + const movedTables = renamedOrMovedTables.filter((it) => it.from.schema !== it.to.schema); + + const pksRenames = [] as { from: PrimaryKey; to: PrimaryKey }[]; + const uniqueRenames = [] as { from: UniqueConstraint; to: UniqueConstraint }[]; + const fksRenames = [] as { from: ForeignKey; to: ForeignKey }[]; + const checkRenames = [] as { from: CheckConstraint; to: CheckConstraint }[]; + + for (const rename of renamedOrMovedTables) { + ddl1.tables.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.schema, + tableTo: rename.to.name, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.name, + }, + }); + ddl1.fks.update({ + set: { + schema: rename.to.schema, + table: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.name, + }, + }); + + ddl1.entities.update({ + set: { + table: rename.to.name, + schema: rename.to.schema, + }, + where: { + table: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const columnsDiff = diff(ddl1, ddl2, 'columns'); + const columnRenames = [] as { from: Column; to: Column }[]; + const columnsToCreate = [] as Column[]; + const columnsToDelete = [] as Column[]; + + const groupedByTable = groupDiffs(columnsDiff); + + for (let it of groupedByTable) { + const { created, deleted, renamedOrMoved } = await columnsResolver({ + created: it.inserted, + deleted: it.deleted, + }); + + columnsToCreate.push(...created); + columnsToDelete.push(...deleted); + columnRenames.push(...renamedOrMoved); + } + + for (const rename of columnRenames) { + ddl1.columns.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl1.pks.update({ + set: { + columns: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + + ddl1.fks.update({ + set: { + columns: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + ddl1.fks.update({ + set: { + columnsTo: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.table, + }, + }); + + ddl1.uniques.update({ + set: { + columns: (it) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + }); + + ddl1.defaults.update({ + set: { column: rename.to.name }, + where: { + schema: rename.from.schema, + table: rename.from.table, + column: rename.from.name, + }, + }); + + ddl1.checks.update({ + set: { + value: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + value: rename.from.name, + }, + }); + } + + preserveEntityNames(ddl1.uniques, ddl2.uniques, mode); + preserveEntityNames(ddl1.fks, ddl2.fks, mode); + preserveEntityNames(ddl1.pks, ddl2.pks, mode); + preserveEntityNames(ddl1.defaults, ddl2.defaults, mode); + + const uniquesDiff = diff(ddl1, ddl2, 'uniques'); + const groupedUniquesDiff = groupDiffs(uniquesDiff); + + const uniqueCreates = [] as UniqueConstraint[]; + const uniqueDeletes = [] as UniqueConstraint[]; + + for (const entry of groupedUniquesDiff) { + const { renamedOrMoved: renamed, created, deleted } = await uniquesResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + uniqueCreates.push(...created); + uniqueDeletes.push(...deleted); + uniqueRenames.push(...renamed); + } + + for (const rename of uniqueRenames) { + ddl1.uniques.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffChecks = diff(ddl1, ddl2, 'checks'); + const groupedChecksDiff = groupDiffs(diffChecks); + const checkCreates = [] as CheckConstraint[]; + const checkDeletes = [] as CheckConstraint[]; + + for (const entry of groupedChecksDiff) { + const { renamedOrMoved, created, deleted } = await checksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + checkCreates.push(...created); + checkDeletes.push(...deleted); + checkRenames.push(...renamedOrMoved); + } + + for (const rename of checkRenames) { + ddl1.checks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffIndexes = diff(ddl1, ddl2, 'indexes'); + const groupedIndexesDiff = groupDiffs(diffIndexes); + const indexesRenames = [] as { from: Index; to: Index }[]; + const indexesCreates = [] as Index[]; + const indexesDeletes = [] as Index[]; + + for (const entry of groupedIndexesDiff) { + const { renamedOrMoved, created, deleted } = await indexesResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + indexesCreates.push(...created); + indexesDeletes.push(...deleted); + indexesRenames.push(...renamedOrMoved); + } + + for (const rename of indexesRenames) { + ddl1.indexes.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffPKs = diff(ddl1, ddl2, 'pks'); + const groupedPKsDiff = groupDiffs(diffPKs); + const pksCreates = [] as PrimaryKey[]; + const pksDeletes = [] as PrimaryKey[]; + + for (const entry of groupedPKsDiff) { + const { renamedOrMoved, created, deleted } = await pksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + pksCreates.push(...created); + pksDeletes.push(...deleted); + pksRenames.push(...renamedOrMoved); + } + + for (const rename of pksRenames) { + ddl1.pks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffFKs = diff(ddl1, ddl2, 'fks'); + const groupedFKsDiff = groupDiffs(diffFKs); + const fksCreates = [] as ForeignKey[]; + const fksDeletes = [] as ForeignKey[]; + + for (const entry of groupedFKsDiff) { + const { renamedOrMoved, created, deleted } = await fksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + fksCreates.push(...created); + fksDeletes.push(...deleted); + fksRenames.push(...renamedOrMoved); + } + + for (const rename of fksRenames) { + ddl1.fks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const viewsDiff = diff(ddl1, ddl2, 'views'); + + const { + created: createdViews, + deleted: deletedViews, + renamedOrMoved: renamedOrMovedViews, + } = await viewsResolver({ + created: viewsDiff.filter((it) => it.$diffType === 'create'), + deleted: viewsDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedViews = renamedOrMovedViews.filter((it) => it.from.schema === it.to.schema); + const movedViews = renamedOrMovedViews.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedViews) { + ddl1.views.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + for (const move of movedViews) { + ddl1.views.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + } + + const diffDefaults = diff(ddl1, ddl2, 'defaults'); + const defaultsCreates: DefaultConstraint[] = diffDefaults.filter((it) => it.$diffType === 'create').map((it) => ({ + ...it, + $diffType: undefined, + })); + const defaultsDeletes: DefaultConstraint[] = diffDefaults.filter((it) => it.$diffType === 'drop').map((it) => ({ + ...it, + $diffType: undefined, + })); + + // TODO for now drizzle-orm does not provides passing names for defaults + // for (const entry of groupedDefaultsDiff) { + // const { renamedOrMoved, created, deleted } = await defaultsResolver({ + // created: entry.inserted, + // deleted: entry.deleted, + // }); + + // defaultsCreates.push(...created); + // defaultsDeletes.push(...deleted); + // defaultsRenames.push(...renamedOrMoved); + // } + // for (const rename of defaultsRenames) { + // ddl1.defaults.update({ + // set: { + // name: rename.to.name, + // schema: rename.to.schema, + // }, + // where: { + // name: rename.from.name, + // schema: rename.from.schema, + // }, + // }); + // } + + const alters = diff.alters(ddl1, ddl2); + + const jsonStatements: JsonStatement[] = []; + + /* + with new DDL when table gets created with constraints, etc. + or existing table with constraints and indexes gets deleted, + those entites are treated by diff as newly created or deleted + + we filter them out, because we either create them on table creation + or they get automatically deleted when table is deleted + */ + const tablesFilter = (type: 'deleted' | 'created') => { + return (it: { schema: string; table: string }) => { + if (type === 'created') { + return !createdTables.some((t) => t.schema === it.schema && t.name === it.table); + } else { + return !deletedTables.some((t) => t.schema === it.schema && t.name === it.table); + } + }; + }; + + const columnsFilter = (_type: 'added') => { + return (it: { schema: string; table: string; column: string }) => { + return !columnsToCreate.some((t) => t.schema === it.schema && t.table === it.table && t.name === it.column); + }; + }; + + const createTables = createdTables.map((it) => + prepareStatement('create_table', { table: fullTableFromDDL(it, ddl2) }) + ); + + const jsonDropTables = deletedTables.map((it) => + prepareStatement('drop_table', { table: fullTableFromDDL(it, ddl2) }) + ); + const jsonRenameTables = renamedTables.map((it) => + prepareStatement('rename_table', { + schema: it.from.schema, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonRenameColumnsStatements = columnRenames.map((it) => prepareStatement('rename_column', it)); + const jsonDropColumnsStatemets = columnsToDelete.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_column', { column: it }) + ); + const jsonAddColumnsStatemets = columnsToCreate.filter(tablesFilter('created')).map((it) => { + const isPK = ddl2.pks.one({ schema: it.schema, table: it.table, columns: { CONTAINS: it.name } }) !== null; + return prepareStatement('add_column', { + column: it, + defaults: ddl2.defaults.list(), + isPK, + }); + }); + const columnAlters = alters.filter((it) => it.entityType === 'columns').filter((it) => Object.keys(it).length > 5); // $difftype, entitytype, schema, table, name + + const columnsToRecreate = columnAlters.filter((it) => it.generated).filter((it) => { + return !(mode === 'push' && it.generated && it.generated.from && it.generated.to + && it.generated.from.as !== it.generated.to.as && it.generated.from.type === it.generated.to.type); + }); + + const jsonRecreateColumns = columnsToRecreate.map((it) => + prepareStatement('recreate_column', { + diff: it, + }) + ); + + // identity alters are not allowed, only recreate + const jsonAlterColumns = columnAlters.filter((it) => !(it.generated) && !(it.identity)).filter((it) => { + if (it.notNull && (it.$right.generated || it.$right.identity)) { + delete it.notNull; + } + + if (it.type && typesCommutative(it.type.from, it.type.to, mode)) { + delete it.type; + } + + // const pkIn2 = ddl2.pks.one({ schema: it.schema, table: it.table, columns: { CONTAINS: it.name } }); + // When adding primary key to column it is needed to add not null first + // if (it.notNull && pkIn2) { + // delete it.notNull; + // } + + // const pkIn1 = ddl1.pks.one({ schema: it.schema, table: it.table, columns: { CONTAINS: it.name } }); + // if (it.notNull && it.notNull.from && pkIn1 && !pkIn2) { + // delete it.notNull; + // } + + if ((it.$right.generated || it.$left.generated) && it.$right.type !== it.$left.type) { + delete it.type; + } + + return ddl2.columns.hasDiff(it); + }).map( + (it) => { + return prepareStatement('alter_column', { + diff: it, + }); + }, + ); + + const jsonSetTableSchemas = movedTables.map((it) => + prepareStatement('move_table', { + name: it.to.name, // raname of table comes first + from: it.from.schema, + to: it.to.schema, + }) + ); + + const jsonRecreateIdentityColumns = columnAlters.filter((it) => it.identity).map((column) => { + const checksToCreate = ddl2.checks.list({ + schema: column.schema, + table: column.table, + }); + const uniquesToCreate = ddl2.uniques.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + const pksToCreate = ddl2.pks.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + const defToCreate = ddl2.defaults.list({ + schema: column.schema, + table: column.table, + column: column.name, + }); + const fk1ToCreate = ddl2.fks.list({ + schema: column.schema, + table: column.table, + columns: { CONTAINS: column.name }, + }); + const fk2ToCreate = ddl2.fks.list({ + schemaTo: column.schema, + tableTo: column.table, + columnsTo: { CONTAINS: column.name }, + }); + const indexesToCreate = ddl2.indexes.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + + const checksToDelete = ddl1.checks.list({ + schema: column.schema, + table: column.table, + }); + const uniquesToDelete = ddl1.uniques.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + const pksToDelete = ddl1.pks.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + const defToDelete = ddl1.defaults.list({ + schema: column.schema, + table: column.table, + column: column.name, + }); + const fk1ToDelete = ddl1.fks.list({ + schema: column.schema, + table: column.table, + columns: { CONTAINS: column.name }, + }); + const fk2ToDelete = ddl1.fks.list({ + schemaTo: column.schema, + tableTo: column.table, + columnsTo: { CONTAINS: column.name }, + }); + const indexesToDelete = ddl1.indexes.list({ + schema: column.schema, + table: column.table, + columns: { + CONTAINS: column.name, + }, + }); + + return prepareStatement('recreate_identity_column', { + column: column, + constraintsToCreate: [ + ...checksToCreate, + ...uniquesToCreate, + ...pksToCreate, + ...defToCreate, + ...fk1ToCreate, + ...fk2ToCreate, + ...indexesToCreate, + ], + constraintsToDelete: [ + ...checksToDelete, + ...fk1ToDelete, + ...fk2ToDelete, + ...uniquesToDelete, + ...pksToDelete, + ...defToDelete, + ...indexesToDelete, + ], + defaults: ddl2.defaults.list(), + }); + }); + + // filter identity + const checkIdentityFilter = (type: 'created' | 'deleted') => { + return (it: CheckConstraint | DiffEntities['checks']) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'checks' + && constraint.name === it.name + && constraint.table === it.table + && constraint.schema === it.schema + ); + }); + }; + }; + const jsonCreatedCheckConstraints = checkCreates.filter(tablesFilter('created')).filter( + checkIdentityFilter('created'), + ).map(( + it, + ) => prepareStatement('add_check', { check: it })); + const jsonDeletedCheckConstraints = checkDeletes.filter(tablesFilter('deleted')).filter( + checkIdentityFilter('deleted'), + ).map(( + it, + ) => prepareStatement('drop_check', { check: it })); + const jsonRenamedCheckConstraints = checkRenames.map((it) => + prepareStatement('rename_check', { from: it.from, to: it.to }) + ); + + const filteredChecksAlters = alters.filter((it) => it.entityType === 'checks').filter( + (it): it is DiffEntities['checks'] => { + if (it.entityType !== 'checks') return false; + + if (it.value && mode === 'push') { + delete it.value; + } + + return ddl2.checks.hasDiff(it); + }, + ); + + const alteredChecks = filteredChecksAlters.filter(checkIdentityFilter('created')).filter( + checkIdentityFilter('deleted'), + ); + alteredChecks.forEach((it) => { + jsonCreatedCheckConstraints.push(prepareStatement('add_check', { check: it.$right })); + jsonDeletedCheckConstraints.push(prepareStatement('drop_check', { check: it.$left })); + }); + + // filter identity + const uniquesIdentityFilter = (type: 'created' | 'deleted') => { + return (it: UniqueConstraint) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'uniques' + && constraint.name === it.name + && constraint.table === it.table + && constraint.schema === it.schema + ); + }); + }; + }; + const jsonAddedUniqueConstraints = uniqueCreates.filter(tablesFilter('created')).filter( + uniquesIdentityFilter('created'), + ).map((it) => prepareStatement('add_unique', { unique: it })); + const jsonDeletedUniqueConstraints = uniqueDeletes.filter(tablesFilter('deleted')).filter( + uniquesIdentityFilter('deleted'), + ).map((it) => { + return prepareStatement('drop_unique', { unique: it }); + }); + const jsonRenameUniqueConstraints = uniqueRenames.map((it) => + prepareStatement('rename_unique', { from: it.from, to: it.to }) + ); + + // filter identity + const primaryKeysIdentityFilter = (type: 'created' | 'deleted') => { + return (it: PrimaryKey | DiffEntities['pks']) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'pks' + && constraint.name === it.name + && constraint.table === it.table + && constraint.schema === it.schema + ); + }); + }; + }; + const jsonAddPrimaryKeys = pksCreates.filter(tablesFilter('created')).filter(primaryKeysIdentityFilter('created')) + .map((it) => prepareStatement('create_pk', { pk: it })); + const jsonDropPrimaryKeys = pksDeletes.filter(tablesFilter('deleted')).filter(primaryKeysIdentityFilter('deleted')) + .map((it) => prepareStatement('drop_pk', { pk: it })); + const jsonRenamePrimaryKeys = pksRenames.map((it) => prepareStatement('rename_pk', { from: it.from, to: it.to })); + const alteredPKs = alters.filter((it) => it.entityType === 'pks').filter((it) => { + return !!it.columns; + }); + alteredPKs.filter(primaryKeysIdentityFilter('deleted')).filter(primaryKeysIdentityFilter('deleted')).forEach((it) => { + jsonAddPrimaryKeys.push({ pk: it.$right, type: 'create_pk' }); + jsonDropPrimaryKeys.push({ pk: it.$left, type: 'drop_pk' }); + }); + + // filter identity + const defaultsIdentityFilter = (type: 'created' | 'deleted') => { + return (it: DefaultConstraint | DiffEntities['defaults']) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'defaults' + && constraint.name === it.name + && constraint.table === it.table + && constraint.schema === it.schema + ); + }); + }; + }; + const jsonCreateDefaults = defaultsCreates.filter(tablesFilter('created')) + .filter(columnsFilter('added')) + .filter( + defaultsIdentityFilter('created'), + ) + .map((defaultValue) => + prepareStatement('create_default', { + default: defaultValue, + }) + ); + const jsonDropDefaults = defaultsDeletes.filter(tablesFilter('deleted')) + .filter(defaultsIdentityFilter('deleted')) + .map((defaultValue) => prepareStatement('drop_default', { default: defaultValue })); + const alteredDefaults = alters.filter((it) => it.entityType === 'defaults') + .filter((it) => { + if (it.nameExplicit) { + delete it.nameExplicit; + } + + if (it.default) { + let deleteDefault = false; + deleteDefault ||= it.default.from === it.default.to; + + const column = ddl2.columns.one({ name: it.column?.to, schema: it.schema, table: it.table })!; + const numbers = ['bigint', 'decimal', 'numeric', 'real', 'float']; + + // When user defined value in drizzle sql that is bigger than `max mssql integer` it will be stored with dot + // 1. === 1 (same values in mssql) + // For commutativity replace all this + // For .default this will be handled automatically via introspection, but this is for drizzlesql cases + if (numbers.find((it) => column.type.startsWith(it)) && it.default.from && it.default.to) { + it.default.from = it.default.from.replace('.)', ')').replace(".'", "'"); + it.default.to = it.default.to.replace('.)', ')').replace(".'", "'"); + deleteDefault ||= it.default.from === it.default.to; + } + + // any literal number from drizzle sql is parsed as (), not (()) as from .default + // this will cause diff, but still (10) === ((10)) + deleteDefault ||= it.default.from === `(${it.default.to})`; // for drizzle sql numbers: () === (()) + deleteDefault ||= it.default.to === `(${it.default.from})`; // for drizzle sql numbers: () === (()) + + if (deleteDefault) { + delete it.default; + } + } + + return ddl2.defaults.hasDiff(it); + }) + .filter(defaultsIdentityFilter('created')) + .filter(defaultsIdentityFilter('deleted')); + const jsonRecreatedDefaults = alteredDefaults.map((it) => + prepareStatement('recreate_default', { + from: it.$left, + to: it.$right, + }) + ); + + // filter identity + const fksIdentityFilter = (type: 'created' | 'deleted') => { + return (it: ForeignKey | DiffEntities['fks']) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'fks' + && constraint.name === it.name + && ((constraint.table === it.table && constraint.schema === it.schema) + || (constraint.schemaTo === it.schemaTo && it.tableTo === constraint.tableTo)) + ); + }); + }; + }; + const jsonCreateReferences = fksCreates.filter(fksIdentityFilter('created')).map(( + it, + ) => prepareStatement('create_fk', { fk: it })); + + const jsonDropReferences = fksDeletes.filter((x) => { + const fromDeletedTable = ddl2.tables.one({ schema: x.schema, name: x.table }) === null; + const toDeletedTable = (x.schema !== x.schemaTo + || x.tableTo !== x.table) && ddl2.tables.one({ schema: x.schemaTo, name: x.tableTo }) === null; + if (fromDeletedTable && !toDeletedTable) return false; + return true; + }).filter(fksIdentityFilter('deleted')).map((it) => prepareStatement('drop_fk', { fk: it })); + + const jsonRenameReferences = fksRenames.map((it) => + prepareStatement('rename_fk', { + from: it.from, + to: it.to, + }) + ); + alters.filter((it) => it.entityType === 'fks').filter((x) => { + if ( + x.nameExplicit + && ((mode === 'push' && x.nameExplicit.from && !x.nameExplicit.to) + || x.nameExplicit.to && !x.nameExplicit.from) + ) { + delete x.nameExplicit; + } + + return ddl2.fks.hasDiff(x); + }).filter(fksIdentityFilter('created')).filter( + fksIdentityFilter('deleted'), + ).forEach((it) => { + jsonDropReferences.push(prepareStatement('drop_fk', { fk: it.$left })); + jsonCreateReferences.push(prepareStatement('create_fk', { fk: it.$right })); + }); + + // filter identity + const indexesIdentityFilter = (type: 'created' | 'deleted') => { + return (it: Index | DiffEntities['indexes']) => { + return !jsonRecreateIdentityColumns.some((column) => { + const constraints = type === 'created' ? column.constraintsToCreate : column.constraintsToDelete; + + return constraints.some((constraint) => + constraint.entityType === 'indexes' + && constraint.name === it.name + && constraint.table === it.table + && constraint.schema === it.schema + ); + }); + }; + }; + const jsonCreateIndexes = indexesCreates.filter(indexesIdentityFilter('created')).map((index) => + prepareStatement('create_index', { index }) + ); + const jsonDropIndexes = indexesDeletes.filter(indexesIdentityFilter('deleted')).filter(tablesFilter('deleted')).map(( + index, + ) => prepareStatement('drop_index', { index })); + const jsonRenameIndex = indexesRenames.map((it) => prepareStatement('rename_index', { from: it.from, to: it.to })); + for ( + const idx of alters.filter((it) => it.entityType === 'indexes').filter(indexesIdentityFilter('created')).filter( + indexesIdentityFilter('deleted'), + ) + ) { + const forWhere = !!idx.where && (idx.where.from !== null && idx.where.to !== null ? mode !== 'push' : true); + const forColumns = !!idx.columns && (idx.columns.from.length === idx.columns.to.length ? mode !== 'push' : true); + + // TODO recheck this + if (idx.isUnique || forColumns || forWhere) { + const index = ddl2.indexes.one({ schema: idx.schema, table: idx.table, name: idx.name })!; + jsonDropIndexes.push(prepareStatement('drop_index', { index })); + jsonCreateIndexes.push(prepareStatement('create_index', { index })); + } + } + + const createViews = createdViews.map((it) => prepareStatement('create_view', { view: it })); + + const jsonDropViews = deletedViews.map((it) => prepareStatement('drop_view', { view: it })); + + const jsonRenameViews = renamedViews.map((it) => prepareStatement('rename_view', it)); + + const jsonMoveViews = movedViews.map((it) => + prepareStatement('move_view', { fromSchema: it.from.schema, toSchema: it.to.schema, view: it.to }) + ); + + const filteredViewAlters = alters.filter((it): it is DiffEntities['views'] => { + if (it.entityType !== 'views') return false; + + if (it.definition && mode === 'push' && !it.schemaBinding) { + delete it.definition; + } + + return ddl2.views.hasDiff(it); + }); + const jsonAlterViews = filteredViewAlters.map((it) => { + return prepareStatement('alter_view', { + diff: it, + view: ddl2.views.one({ schema: it.schema, name: it.name })!, + }); + }); + + jsonStatements.push(...createSchemas); + jsonStatements.push(...renameSchemas); + + jsonStatements.push(...createTables); + + jsonStatements.push(...jsonDropViews); + jsonStatements.push(...jsonRenameViews); + jsonStatements.push(...jsonMoveViews); + jsonStatements.push(...jsonAlterViews); + jsonStatements.push(...jsonRecreatedDefaults); + + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonDropReferences); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonSetTableSchemas); + + jsonStatements.push(...jsonDeletedCheckConstraints); // should be before renaming column + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + jsonStatements.push(...jsonDropDefaults); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexes); + jsonStatements.push(...jsonDropPrimaryKeys); + + jsonStatements.push(...jsonAddColumnsStatemets); + jsonStatements.push(...jsonRecreateColumns); + jsonStatements.push(...jsonRecreateIdentityColumns); + jsonStatements.push(...jsonAlterColumns); + jsonStatements.push(...jsonAddPrimaryKeys); + jsonStatements.push(...jsonRenamePrimaryKeys); + + jsonStatements.push(...jsonCreateReferences); + jsonStatements.push(...jsonCreateDefaults); + jsonStatements.push(...jsonCreateIndexes); + jsonStatements.push(...jsonRenameIndex); + + jsonStatements.push(...jsonDropColumnsStatemets); + + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonCreatedCheckConstraints); + jsonStatements.push(...jsonRenamedCheckConstraints); + jsonStatements.push(...jsonRenameUniqueConstraints); + jsonStatements.push(...jsonRenameReferences); + // jsonStatements.push(...jsonRenameDefaults); + + jsonStatements.push(...createViews); + + jsonStatements.push(...dropSchemas); + + const { groupedStatements, sqlStatements } = fromJson(jsonStatements); + + const renames = prepareMigrationRenames([ + ...renameSchemas, + ...renamedOrMovedTables, + ...columnRenames, + ...uniqueRenames, + ...checkRenames, + ...indexesRenames, + ...pksRenames, + ...fksRenames, + ...renamedOrMovedViews, + ]); + + return { + statements: jsonStatements, + sqlStatements, + groupedStatements: groupedStatements, + renames: renames, + }; +}; diff --git a/drizzle-kit/src/dialects/mssql/drizzle.ts b/drizzle-kit/src/dialects/mssql/drizzle.ts new file mode 100644 index 0000000000..cb22bc2a1f --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/drizzle.ts @@ -0,0 +1,409 @@ +import type { Casing } from 'drizzle-orm'; +import { getTableName, is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { AnyMsSqlColumn, AnyMsSqlTable } from 'drizzle-orm/mssql-core'; +import { + getTableConfig, + getViewConfig, + MsSqlColumn, + MsSqlDialect, + MsSqlSchema, + MsSqlTable, + MsSqlView, +} from 'drizzle-orm/mssql-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { safeRegister } from 'src/utils/utils-node'; +import { getColumnCasing, sqlToStr } from '../drizzle'; +import type { EntityFilter } from '../pull-utils'; +import type { DefaultConstraint, InterimSchema, MssqlEntities, Schema, SchemaError } from './ddl'; +import { defaultNameForDefault, defaultNameForFK, defaultNameForPK, defaultNameForUnique, typeFor } from './grammar'; + +export const upper = (value: T | undefined): Uppercase | null => { + if (!value) return null; + return value.toUpperCase() as Uppercase; +}; + +export const defaultFromColumn = ( + column: AnyMsSqlColumn, + casing?: Casing, +): DefaultConstraint['default'] | null => { + if (typeof column.default === 'undefined') return null; + const def = column.default; + + const sqlTypeLowered = column.getSQLType().toLowerCase(); + + if (is(def, SQL)) { + // extra wrapping + const str = sqlToStr(def, casing); + if (!str.startsWith('(')) return `(${str})`; + + return str; + } + + const grammarType = typeFor(sqlTypeLowered); + if (grammarType) return grammarType.defaultFromDrizzle(def); + + throw new Error(`unexpected default: ${column.getSQLType().toLowerCase()} ${column.default}`); +}; + +export const fromDrizzleSchema = ( + schema: { + schemas: MsSqlSchema[]; + tables: AnyMsSqlTable[]; + views: MsSqlView[]; + }, + casing: CasingType | undefined, + filter: EntityFilter, +): { schema: InterimSchema; errors: SchemaError[] } => { + const dialect = new MsSqlDialect({ casing }); + const errors: SchemaError[] = []; + + const schemas = schema.schemas + .filter((x) => { + return !x.isExisting && x.schemaName !== 'dbo' && filter({ type: 'schema', name: x.schemaName }); + }) + .map((it) => ({ + entityType: 'schemas', + name: it.schemaName, + })); + + const tableConfigPairs = schema.tables.map((it) => { + return { config: getTableConfig(it), table: it }; + }); + + const tables = tableConfigPairs.map((it) => { + const config = it.config; + + return { + entityType: 'tables', + schema: config.schema ?? 'dbo', + name: config.name, + } satisfies MssqlEntities['tables']; + }); + + const result: InterimSchema = { + schemas: schemas, + tables: tables, + columns: [], + pks: [], + fks: [], + indexes: [], + checks: [], + views: [], + viewColumns: [], + uniques: [], + defaults: [], + }; + + for (const { config } of tableConfigPairs) { + const { + name: tableName, + columns, + indexes, + foreignKeys, + schema: drizzleSchema, + checks, + primaryKeys, + uniqueConstraints, + } = config; + + const schema = drizzleSchema || 'dbo'; + if (!filter({ type: 'table', schema, name: tableName })) { + continue; + } + + for (const pk of primaryKeys) { + const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); + + const name = pk.name || defaultNameForPK(tableName); + + result.pks.push({ + entityType: 'pks', + table: tableName, + schema: schema, + name: name, + nameExplicit: pk.isNameExplicit, + columns: columnNames, + }); + } + + for (const column of columns) { + const columnName = getColumnCasing(column, casing); + + const isPk = result.pks.find((it) => it.columns.includes(columnName)); + const notNull: boolean = column.notNull || Boolean(column.generated) || Boolean(isPk); + + // @ts-expect-error + // Drizzle ORM gives this value in runtime, but not in types. + // After sync with Andrew, we decided to fix this with Dan later + // That's due to architecture problems we have in columns and complex abstraction we should avoid + // for now we are sure this value is here + // If it's undefined - than users didn't provide any identity + // If it's an object with seed/increment and a) both are undefined - use default identity startegy + // b) some of them have values - use them + // Note: you can't have only one value. Either both are undefined or both are defined + const identity = column.identity as { seed: number; increment: number } | undefined; + + const generated = column.generated + ? { + as: is(column.generated.as, SQL) + ? dialect.sqlToQuery(column.generated.as as SQL).sql + : typeof column.generated.as === 'function' + ? dialect.sqlToQuery(column.generated.as() as SQL).sql + : `${column.generated.as}`, + type: column.generated.mode ?? 'virtual', + } + : null; + + result.columns.push({ + schema, + entityType: 'columns', + table: tableName, + name: columnName, + type: column.getSQLType(), + pkName: null, + notNull: notNull, + // @ts-expect-error + // TODO update description + // 'virtual' | 'stored' for all dialects + // 'virtual' | 'persisted' for mssql + // We should remove this option from common Column and store it per dialect common + // Was discussed with Andrew + // Type erorr because of common in drizzle orm for all dialects (includes virtual' | 'stored' | 'persisted') + generated, + identity: identity ?? null, + isPK: column.primary, + isUnique: column.isUnique, + uniqueName: column.uniqueName ?? null, + }); + + if (typeof column.default !== 'undefined') { + result.defaults.push({ + entityType: 'defaults', + name: defaultNameForDefault(tableName, columnName), + nameExplicit: false, + schema, + column: columnName, + table: tableName, + default: defaultFromColumn(column, casing), + }); + } + } + + for (const unique of uniqueConstraints) { + const columns = unique.columns.map((c) => { + return getColumnCasing(c, casing); + }); + + const name = unique.name ?? defaultNameForUnique(tableName, unique.columns.map((c) => c.name)); + + result.uniques.push({ + entityType: 'uniques', + table: tableName, + name: name, + schema: schema, + nameExplicit: unique.isNameExplicit, + columns: columns, + }); + } + + for (const fk of foreignKeys) { + const reference = fk.reference(); + + const referenceFT = reference.foreignTable; + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); + + const originalColumnsFrom = reference.columns.map((it) => it.name); + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const originalColumnsTo = reference.foreignColumns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + let name = fk.getName() || defaultNameForFK(tableName, columnsFrom, tableTo, columnsTo); + if (casing !== undefined) { + for (let i = 0; i < originalColumnsFrom.length; i++) { + name = name.replace(originalColumnsFrom[i], columnsFrom[i]); + } + for (let i = 0; i < originalColumnsTo.length; i++) { + name = name.replace(originalColumnsTo[i], columnsTo[i]); + } + } + + result.fks.push({ + entityType: 'fks', + table: tableName, + name, + schema, + columns: columnsFrom, + tableTo, + columnsTo, + nameExplicit: fk.isNameExplicit(), + schemaTo: getTableConfig(fk.reference().foreignTable).schema || 'dbo', + onUpdate: upper(fk.onUpdate) ?? 'NO ACTION', + onDelete: upper(fk.onDelete) ?? 'NO ACTION', + }); + } + + for (const index of indexes) { + const columns = index.config.columns; + const name = index.config.name; + + for (const column of columns) { + if (is(column, SQL) && !index.config.name) { + errors.push({ + type: 'index_no_name', + schema: schema, + table: getTableName(index.config.table), + sql: dialect.sqlToQuery(column).sql, + }); + continue; + } + } + + let where = index.config.where ? dialect.sqlToQuery(index.config.where).sql : ''; + where = where === 'true' ? '' : where; + + result.indexes.push({ + entityType: 'indexes', + table: tableName, + name, + schema, + columns: columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + return sql; + } else { + return getColumnCasing(it, casing); + } + }), + isUnique: index.config.unique ?? false, + where: where ? where : null, + }); + } + + for (const check of checks) { + const name = check.name; + const value = check.value; + + result.checks.push({ + entityType: 'checks', + table: tableName, + schema, + name, + value: dialect.sqlToQuery(value, 'mssql-check').sql, + }); + } + } + + for (const view of schema.views) { + const cfg = getViewConfig(view); + const { + isExisting, + name, + query, + schema: drizzleSchema, + selectedFields, + checkOption, + encryption, + schemaBinding, + viewMetadata, + } = cfg; + + if (isExisting) continue; + if (!filter({ type: 'table', schema: drizzleSchema ?? 'dbo', name })) continue; + + const schema = drizzleSchema ?? 'dbo'; + + for (const key in selectedFields) { + if (is(selectedFields[key], MsSqlColumn)) { + const column = selectedFields[key]; + const notNull: boolean = column.notNull; + + result.viewColumns.push({ + view: name, + schema, + name: column.name, + type: column.getSQLType(), + notNull: notNull, + }); + } + } + + result.views.push({ + entityType: 'views', + name, + definition: query + ? dialect.sqlToQuery(query, schemaBinding ? 'mssql-view-with-schemabinding' : undefined).sql + : '', + checkOption: checkOption ?? false, // defaut + encryption: encryption ?? false, // default + schema, + schemaBinding: schemaBinding ?? false, // default + viewMetadata: viewMetadata ?? false, // default + }); + } + + return { schema: result, errors }; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnyMsSqlTable[] = []; + const schemas: MsSqlSchema[] = []; + const views: MsSqlView[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = fromExport(i0); + + tables.push(...prepared.tables); + schemas.push(...prepared.schemas); + views.push(...prepared.views); + relations.push(...prepared.relations); + } + }); + + return { + tables, + schemas, + views, + relations, + }; +}; + +const fromExport = (exports: Record) => { + const tables: AnyMsSqlTable[] = []; + const schemas: MsSqlSchema[] = []; + const views: MsSqlView[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, MsSqlTable)) { + tables.push(t); + } + + if (is(t, MsSqlSchema)) { + schemas.push(t); + } + + if (is(t, MsSqlView)) { + views.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { + tables, + schemas, + views, + relations, + }; +}; diff --git a/drizzle-kit/src/dialects/mssql/grammar.ts b/drizzle-kit/src/dialects/mssql/grammar.ts new file mode 100644 index 0000000000..8b0f9ff0d8 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/grammar.ts @@ -0,0 +1,778 @@ +import { assertUnreachable, trimChar } from '../../utils'; +import { parse, stringify } from '../../utils/when-json-met-bigint'; +import { hash } from '../common'; +import { escapeForSqlDefault, escapeForTsLiteral, parseParams, unescapeFromSqlDefault } from '../utils'; +import type { DefaultConstraint, MssqlEntities } from './ddl'; +import type { Import } from './typescript'; + +const getDefaultOptions = (x: keyof typeof defaults.options): string | null => { + return defaults.options[x as keyof typeof defaults.options] + ? Object.values(defaults.options[x as keyof typeof defaults.options]).join(',') + : null; +}; +const getFloatPrecisionFrom = (x: number) => { + return 1 <= x && x <= 24 ? 24 : 25 <= x && x <= 53 ? 53 : x; +}; +export const defaults = { + options: { + numeric: { + precision: 18, + scale: 0, + }, + decimal: { + precision: 18, + scale: 0, + }, + time: { + precision: 7, + }, + float: { + precision: 53, + }, + varchar: { + length: 1, + }, + char: { + length: 1, + }, + nvarchar: { + length: 1, + }, + nchar: { + length: 1, + }, + datetime2: { + precision: 7, + }, + datetimeoffset: { + precision: 7, + }, + binary: { + length: 1, + }, + varbinary: { + length: 1, + }, + }, + max_int_value: 2147483647, + min_int_value: -2147483648, +} as const; + +export const defaultNameForPK = (table: string) => { + const desired = `${table}_pkey`; + const res = desired.length > 128 + ? `${hash(desired)}_pkey` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const defaultNameForUnique = (table: string, column: string[]) => { + const desired = `${table}_${column.join('_')}_key`; + const res = desired.length > 128 + ? table.length < 128 - 18 // _{hash(12)}_key + ? `${table}_${hash(desired)}_key` + : `${hash(desired)}_key` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const defaultNameForFK = (table: string, columns: string[], tableTo: string, columnsTo: string[]) => { + const desired = `${table}_${columns.join('_')}_${tableTo}_${columnsTo.join('_')}_fk`; + const res = desired.length > 128 + ? table.length < 128 - 18 // _{hash(12)}_fkey + ? `${table}_${hash(desired)}_fk` + : `${hash(desired)}_fk` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const defaultNameForDefault = (table: string, column: string) => { + const desired = `${table}_${column}_default`; + const res = desired.length > 128 + ? table.length < 128 - 18 // _{hash(12)}_default + ? `${table}_${hash(desired)}__default` + : `${hash(desired)}__default` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export type OnAction = MssqlEntities['fks']['onUpdate']; +export const parseFkAction = (type: string): OnAction => { + switch (type) { + case 'NO_ACTION': + return 'NO ACTION'; + case 'SET_NULL': + return 'SET NULL'; + case 'CASCADE': + return 'CASCADE'; + case 'SET_DEFAULT': + return 'SET DEFAULT'; + default: + throw new Error(`Unknown foreign key type: ${type}`); + } +}; + +const viewAsStatementRegex = + /\bAS\b\s*\(?\s*(WITH[\s\S]+?SELECT[\s\S]*?|SELECT[\s\S]*?)\)?(?=\s+WITH CHECK OPTION\b|\s*;?$)/i; +export const parseViewSQL = (sql: string | null): string | null => { + if (!sql) return ''; // this means that used is_encrypted + + const match = sql.match(viewAsStatementRegex); + return match ? match[1] : null; +}; + +const viewMetadataRegex = /\bwith\b\s+([^)]*\bview_metadata\b[^)]*)(\s+as\b|\s*,)/i; +export const parseViewMetadataFlag = (sql: string | null): boolean => { + if (!sql) return false; + + const match = sql.match(viewMetadataRegex); + return match ? true : false; +}; + +export const bufferToBinary = (str: Buffer) => { + return '0x' + (str.toString('hex')).toUpperCase(); +}; + +export const parseDefault = (type: string, def: string) => { + const grammarType = typeFor(type); + return grammarType.defaultFromIntrospect(def); +}; + +const commutativeTypes = [ + ['char', `char(${getDefaultOptions('char')})`], + ['nchar', `nchar(${getDefaultOptions('nchar')})`], + ['varchar', `varchar(${getDefaultOptions('varchar')})`], + ['nvarchar', `nvarchar(${getDefaultOptions('nvarchar')})`], + ['datetime2', `datetime2(${getDefaultOptions('datetime2')})`], + ['datetimeoffset', `datetimeoffset(${getDefaultOptions('datetimeoffset')})`], + ['time', `time(${getDefaultOptions('time')})`], + ['binary', `binary(${getDefaultOptions('binary')})`], + ['varbinary', `varbinary(${getDefaultOptions('varbinary')})`], + ['decimal', `decimal(${getDefaultOptions('decimal')})`], + ['numeric', `numeric(${getDefaultOptions('numeric')})`], + ['float', `float(${getDefaultOptions('float')})`], +]; +export const typesCommutative = ( + left: string, + right: string, + mode: 'push' | 'default', +) => { + for (const it of commutativeTypes) { + const leftIn = it.some((x) => x === left); + const rightIn = it.some((x) => x === right); + + if (leftIn && rightIn) return true; + } + + if (mode === 'push') { + if (left.replace(',0)', ')') === right.replace(',0)', ')')) return true; // { from: 'decimal(19,0)', to: 'decimal(19)' } + + // SQL Server treats n as one of two possible values. If 1<=n<=24, n is treated as 24. If 25<=n<=53, n is treated as 53 + // https://learn.microsoft.com/en-us/sql/t-sql/data-types/float-and-real-transact-sql?view=sql-server-ver16 + // SQL Server treats float(24) as real + if (left === 'real' && right.startsWith('float')) { + const rightOptions = parseParams(right).join(','); + + if (Number(rightOptions) <= 24) return true; + } + if (right.startsWith('float') && right === 'float') { + const leftOptions = parseParams(left).join(','); + + if (Number(leftOptions) <= 24) return true; + } + if (right.startsWith('float') && right.startsWith('float')) { + const leftOptions = parseParams(left).join(','); + const rightOptions = parseParams(right).join(','); + + if (getFloatPrecisionFrom(Number(leftOptions)) === getFloatPrecisionFrom(Number(rightOptions))) return true; + } + } + return false; +}; + +const checkNumber = (it: string) => { + const check = Number(it); + + if (Number.isNaN(check)) return 'NaN'; + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) return 'number'; + return 'bigint'; +}; + +export interface SqlType { + is(type: string): boolean; + drizzleImport(): Import; + defaultFromDrizzle(value: unknown): DefaultConstraint['default']; + defaultFromIntrospect(value: string): DefaultConstraint['default']; + toTs( + type: string, + value: DefaultConstraint['default'], + ): { options?: Record; default: string; customType?: string }; +} + +export const Int: SqlType = { + is: (type: string) => type === 'int', + drizzleImport: () => 'int', + defaultFromDrizzle: (value: unknown) => { + const stringified = String(value); + + // mssql wraps each number in extra () + return `((${stringified}))`; + }, + defaultFromIntrospect: (value: string) => { + // mssql stores values that are bigger than `int` with dots + const tmp = value.replace('.))', '))'); + const checked = checkNumber(trimChar(trimChar(tmp, ['(', ')']), ['(', ')'])); + if (checked === 'NaN') return value; + return tmp; + }, + toTs: (_type, value) => { + if (!value) return { default: '' }; + + // cases from introspect: + // int DEFAULT '10' --> ('10') + // int DEFAULT 10 --> ((10)) + value = value.substring(1, value.length - 1); + + const trimmed = trimChar(value, ['(', ')']); + + const numType = checkNumber(trimmed); + if (numType === 'NaN') return { default: `sql\`${value}\`` }; + return { default: trimmed }; + }, +}; +export const TinyInt: SqlType = { + is: (type: string) => type === 'tinyint', + drizzleImport: () => 'tinyint', + defaultFromDrizzle: Int.defaultFromDrizzle, + defaultFromIntrospect: Int.defaultFromIntrospect, + toTs: Int.toTs, +}; +export const SmallInt: SqlType = { + is: (type: string) => type === 'smallint', + drizzleImport: () => 'smallint', + defaultFromDrizzle: Int.defaultFromDrizzle, + defaultFromIntrospect: Int.defaultFromIntrospect, + toTs: Int.toTs, +}; +export const BigInt: SqlType = { + is: (type: string) => type === 'bigint', + drizzleImport: () => 'bigint', + defaultFromDrizzle: (value: unknown) => { + return `((${String(value)}))`; + }, + defaultFromIntrospect: Int.defaultFromIntrospect, + toTs: (_type, value) => { + if (value === null) return { options: { mode: 'number' }, default: '' }; + + // cases from introspect: + // bigintint DEFAULT '10' --> ('10') + // bigintint DEFAULT '9007199254740994' --> ('9007199254740994') + // bigintint DEFAULT '9007199254740994.' --> ('9007199254740994.') + // bigintint DEFAULT 9007199254740994 --> ((9007199254740994.)) + // bigintint DEFAULT 10 --> ((10)) + value = value.substring(1, value.length - 1); + + const tmp = value.replaceAll('.)', ')'); + const trimmed = trimChar(tmp, ['(', ')']); + + const numType = checkNumber(trimmed); + + if (numType === 'NaN') return { options: { mode: 'bigint' }, default: `sql\`${value}\`` }; + if (numType === 'number') return { options: { mode: 'number' }, default: trimmed }; + if (numType === 'bigint') return { options: { mode: 'bigint' }, default: `${trimmed}n` }; + assertUnreachable(numType); + }, +}; + +export const Bit: SqlType = { + is: (type) => type === 'bit', + drizzleImport: () => 'bit', + defaultFromDrizzle: (value: unknown) => { + return String(value) === 'true' ? '((1))' : '((0))'; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + toTs: (_type, value) => { + if (value === null) return { default: '' }; + + // cases + // bit 1 -> ((1)) + // bit 1. -> ((1.)) -> edge case + // bit '1' -> ('1') -> edge case + // bit '1.' -> ('1.') -> this is not valid syntax to insert + value = value.substring(1, value.length - 1); + if (value === '(1)') return { default: 'true' }; + if (value === '(0)') return { default: 'false' }; + + return { default: `sql\`${value}\`` }; + }, +}; + +export const Char: SqlType = { + is: (type: string) => type === 'char' || type.startsWith('char('), + drizzleImport: () => 'char', + defaultFromDrizzle: (value) => { + const val = String(value); + + return `('${escapeForSqlDefault(val)}')`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + // for text compatibility + let optionsToSet: { length: number | 'max' } | undefined; + + const param = parseParams(type)[0]; + if (param) optionsToSet = { length: param === 'max' ? 'max' : Number(param) }; + + if (!value) return { default: '', options: optionsToSet }; + + // ('text') + // remove outer ( and ) + value = value.substring(1, value.length - 1); + const isTSQLStringLiteral = (str: string) => { + // Trim and check if string starts and ends with a single quote + if (!/^'.*'$/.test(str.trim())) return false; + + // Remove the surrounding quotes + const inner = str.trim().slice(1, -1); + + // Check for valid internal quote escaping: only doubled single quotes are allowed + return !/[^']'[^']/.test(inner); // there should be no unescaped (lonely) single quotes + }; + + if (isTSQLStringLiteral(value)) { + // remove extra ' and ' + value = value.substring(1, value.length - 1); + const unescaped = unescapeFromSqlDefault(value); + const escaped = escapeForTsLiteral(unescaped); + + return { options: optionsToSet, default: escaped }; + } + + return { options: optionsToSet, default: `sql\`${value}\`` }; + }, +}; +export const NChar: SqlType = { + is: (type: string) => type === 'nchar' || type.startsWith('nchar('), + drizzleImport: () => 'nchar', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + toTs: Char.toTs, +}; +export const Varchar: SqlType = { + is: (type) => type === 'varchar' || type.startsWith('varchar('), + drizzleImport: () => 'varchar', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + toTs: Char.toTs, +}; +export const NVarchar: SqlType = { + is: (type: string) => type === 'nvarchar' || type.startsWith('nvarchar('), + drizzleImport: () => 'nvarchar', + defaultFromDrizzle: (value: unknown) => { + let result: string; + + if (typeof value === 'string') result = escapeForSqlDefault(value); + else if (typeof value === 'object' || Array.isArray(value)) { + result = stringify(value, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + }); + } else { + throw new Error(`unexpected default: ${value}`); + } + + return `('${result}')`; + }, + defaultFromIntrospect: Char.defaultFromIntrospect, + toTs: (type, value) => { + // for text compatibility + let optionsToSet: { length: number | 'max' } | undefined; + + const param = parseParams(type)[0]; + if (param) optionsToSet = { length: param === 'max' ? 'max' : Number(param) }; + + if (!value) return { default: '', options: optionsToSet }; + + // ('text') + // remove outer ( and ) + value = value.substring(1, value.length - 1); + const isTSQLStringLiteral = (str: string) => { + // Trim and check if string starts and ends with a single quote + if (!/^'.*'$/.test(str.trim())) return false; + + // Remove the surrounding quotes + const inner = str.trim().slice(1, -1); + + // Check for valid internal quote escaping: only doubled single quotes are allowed + // 'text'+'text' - not pass + // 'text''text' - pass + return !/[^']'[^']/.test(inner); // there should be no unescaped (lonely) single quotes + }; + + if (!isTSQLStringLiteral(value)) { + return { options: optionsToSet, default: `sql\`${value}\`` }; + } + + try { + const parsed = parse(trimChar(value, "'"), (_, v) => { + if (typeof v === 'string') { + return unescapeFromSqlDefault(v); + } + return v; + }); + + return { + default: stringify(parsed, undefined, undefined, true)!, + options: { mode: 'json', ...optionsToSet }, + }; + } catch {} + + // remove extra ' and ' + value = value.substring(1, value.length - 1); + const unescaped = unescapeFromSqlDefault(value); + const escaped = escapeForTsLiteral(unescaped); + + return { options: optionsToSet, default: escaped }; + }, +}; +export const Text: SqlType = { + is: (type: string) => type === 'text' || type.startsWith('text('), + drizzleImport: () => 'text', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + toTs: Char.toTs, +}; +export const NText: SqlType = { + is: (type: string) => type === 'ntext' || type.startsWith('ntext('), + drizzleImport: () => 'ntext', + defaultFromDrizzle: Text.defaultFromDrizzle, + defaultFromIntrospect: Text.defaultFromIntrospect, + toTs: Text.toTs, +}; + +export const Decimal: SqlType = { + is: (type: string) => type === 'decimal' || type.startsWith('decimal('), + drizzleImport: () => 'decimal', + defaultFromDrizzle: (value) => { + return `((${String(value)}))`; + }, + defaultFromIntrospect: (value) => { + // mssql stores values that are bigger than `int` with dots + const tmp = value.replace('.))', '))'); + const checked = checkNumber(trimChar(trimChar(tmp, ['(', ')']), ['(', ')'])); + if (checked === 'NaN') return value; + return tmp; + }, + toTs: (type, value) => { + const optionsToSet: any = {}; + + const params = parseParams(type); + if (params.length) { + const [p, s] = params; + if (p) optionsToSet['precision'] = Number(p); + if (s) optionsToSet['scale'] = Number(s); + } + + if (!value) return { options: optionsToSet, default: '' }; + // cases: + // [column] decimal DEFAULT '6.32' --> ('6.32') -> edge case + // [column1] decimal DEFAULT '6.' --> ('6.') -> edge case + // [column2] decimal DEFAULT '6' --> ('6') -> edge case + // [column3] decimal DEFAULT 6.32 --> ((6.32)) + // [column5] decimal DEFAULT 6 --> ((6)) + value = value.substring(1, value.length - 1); + + const trimmed = trimChar(value, ['(', ')']); + + const numType = checkNumber(trimmed); + + if (numType === 'NaN') return { options: { ...optionsToSet, mode: 'bigint' }, default: `sql\`${value}\`` }; + if (numType === 'number') return { options: { ...optionsToSet, mode: 'number' }, default: trimmed }; + if (numType === 'bigint') return { options: { ...optionsToSet, mode: 'bigint' }, default: `${trimmed}n` }; + assertUnreachable(numType); + }, +}; +export const Numeric: SqlType = { + is: (type: string) => type === 'numeric' || type.startsWith('numeric('), + drizzleImport: () => 'numeric', + defaultFromDrizzle: Decimal.defaultFromDrizzle, + defaultFromIntrospect: Decimal.defaultFromIntrospect, + toTs: Decimal.toTs, +}; + +export const Float: SqlType = { + is: (type: string) => type === 'float' || type.startsWith('float('), + drizzleImport: () => 'float', + defaultFromDrizzle: (value) => { + return `((${String(value)}))`; + }, + defaultFromIntrospect: (value) => { + // mssql stores values that are bigger than `int` with dots + const tmp = value.replace('.))', '))'); + const checked = checkNumber(trimChar(trimChar(tmp, ['(', ')']), ['(', ')'])); + if (checked === 'NaN') return value; + return tmp; + }, + toTs: (type, value) => { + const param = parseParams(type)[0]; + const optionsToSet = { precision: Number(param) }; + + if (!value) return { default: '', options: optionsToSet }; + + // cases: + // [column] float DEFAULT '6.32' --> ('6.32') -> mapped to ((6.32)) + // [column2] float DEFAULT '6' --> ('6') -> mapped to ((6)) + // [column3] float DEFAULT 6.32 --> ((6.32)) + // [column5] float DEFAULT 6 --> ((6)) + value = value.substring(1, value.length - 1); + + const trimmed = trimChar(value, ['(', ')']); + + const numType = checkNumber(trimmed); + + if (numType === 'NaN') return { options: optionsToSet, default: `sql\`${value}\`` }; + return { options: optionsToSet, default: trimmed }; + }, +}; +export const Real: SqlType = { + is: (type: string) => type === 'real' || type.startsWith('real('), + drizzleImport: () => 'real', + defaultFromDrizzle: Float.defaultFromDrizzle, + defaultFromIntrospect: Float.defaultFromIntrospect, + toTs: (_type, value) => { + if (!value) return { default: '' }; + + // cases: + // [column] float DEFAULT '6.32' --> ('6.32') -> edge case + // [column1] float DEFAULT '6.' --> ('6.') -> edge case + // [column2] float DEFAULT '6' --> ('6') -> edge case + // [column3] float DEFAULT 6.32 --> ((6.32)) + // [column5] float DEFAULT 6 --> ((6)) + value = value.substring(1, value.length - 1); + + const trimmed = trimChar(value, ['(', ')']); + + const numType = checkNumber(trimmed); + if (numType === 'NaN') return { default: `sql\`${value}\`` }; + if (numType === 'number') return { default: trimmed }; + if (numType === 'bigint') return { default: `${trimmed}n` }; + assertUnreachable(numType); + }, +}; + +export const Datetime: SqlType = { + is: (type) => type === 'datetime' || type.startsWith('datetime('), + drizzleImport: () => 'datetime', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `('${value.toISOString().replace('T', ' ').replace('Z', '')}')`; + } + + return `('${String(value)}')`; + }, + defaultFromIntrospect: (value: string) => { + return value; + }, + toTs: (_type, value) => { + const options: { mode: string } = { mode: 'string' }; + + if (!value) return { default: '', options }; + + if (value === '(getdate())') return { default: '.defaultGetDate()', options }; + + // remove ( and ) + // ('2024-12-42 12:00:00') + value = value.substring(1, value.length - 1); + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, +}; +export const DateType: SqlType = { + is: (type) => type === 'date' || type.startsWith('date('), + drizzleImport: () => 'date', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `('${value.toISOString().split('T')[0]}')`; + } + + return `('${String(value)}')`; + }, + defaultFromIntrospect: Datetime.defaultFromIntrospect, + toTs: Datetime.toTs, +}; +export const Datetime2: SqlType = { + is: (type) => type === 'datetime2' || type.startsWith('datetime2('), + drizzleImport: () => 'datetime2', + defaultFromDrizzle: Datetime.defaultFromDrizzle, + defaultFromIntrospect: Datetime.defaultFromIntrospect, + toTs: (type, value) => { + const options: { mode: string; precision: number } = { + mode: 'string', + precision: defaults.options.datetime2.precision, + }; + + const param = parseParams(type)[0]; + if (param) options['precision'] = Number(param); + + if (!value) return { default: '', options }; + + // remove ( and ) + // ('2024-12-42 12:00:00') + value = value.substring(1, value.length - 1); + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, +}; +export const Datetimeoffset: SqlType = { + is: (type) => type === 'datetimeoffset' || type.startsWith('datetimeoffset('), + drizzleImport: () => 'datetimeoffset', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `('${value.toISOString()}')`; + } + + return `('${String(value)}')`; + }, + defaultFromIntrospect: Datetime.defaultFromIntrospect, + toTs: (type, value) => { + const options: { mode: string; precision: number } = { + mode: 'string', + precision: defaults.options.datetimeoffset.precision, + }; + + const param = parseParams(type)[0]; + if (param) options['precision'] = Number(param); + + if (!value) return { default: '', options }; + + if (value === '(getdate())') return { default: '.defaultGetDate()', options }; + + // remove ( and ) + // ('2024-12-42 12:00:00') + value = value.substring(1, value.length - 1); + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, +}; +export const Time: SqlType = { + is: (type) => type === 'time' || type.startsWith('time('), + drizzleImport: () => 'time', + defaultFromDrizzle: (value: unknown) => { + if (value instanceof Date) { + return `('${value.toISOString().split('T')[1].replace('Z', '')}')`; + } + + return `('${String(value)}')`; + }, + defaultFromIntrospect: Datetime.defaultFromIntrospect, + toTs: (type, value) => { + const options: { mode: string; precision: number } = { + mode: 'string', + precision: defaults.options.time.precision, + }; + + const param = parseParams(type)[0]; + if (param) options['precision'] = Number(param); + + if (!value) return { default: '', options }; + + // remove ( and ) + // ('2024-12-42 12:00:00') + value = value.substring(1, value.length - 1); + // check for valid date + if (isNaN(Date.parse(value.substring(1, value.length - 1)))) { + return { default: `sql\`${value}\``, options }; + } + + return { default: value, options }; + }, +}; + +export const Binary: SqlType = { + is: (type) => type === 'binary' || type.startsWith('binary('), + drizzleImport: () => 'binary', + defaultFromDrizzle: (value) => { + if (Buffer.isBuffer(value)) { + return `(${bufferToBinary(value)})`; + } + throw Error('unexpected binary default'); + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const optionsToSet: { length: number | 'max' } = { length: defaults.options.binary.length }; + + const param = parseParams(type)[0]; + if (param) optionsToSet['length'] = param === 'max' ? 'max' : Number(param); + + // (0x...) + const def = value ? `sql\`${value.substring(1, value.length - 1)}\`` : ''; + return { options: optionsToSet, default: def }; + }, +}; +export const Varbinary: SqlType = { + is: (type) => type === 'varbinary' || type.startsWith('varbinary('), + drizzleImport: () => 'varbinary', + defaultFromDrizzle: Binary.defaultFromDrizzle, + defaultFromIntrospect: Binary.defaultFromIntrospect, + toTs: Binary.toTs, +}; + +export const Custom: SqlType = { + is: () => { + throw Error('Mocked'); + }, + drizzleImport: () => 'customType', + defaultFromDrizzle: (value) => { + return `('${String(value)}')`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + return { default: `sql\`${value}\``, customType: type }; + }, +}; + +export const typeFor = (sqlType: string): SqlType => { + if (Int.is(sqlType)) return Int; + if (TinyInt.is(sqlType)) return TinyInt; + if (SmallInt.is(sqlType)) return SmallInt; + if (BigInt.is(sqlType)) return BigInt; + if (Bit.is(sqlType)) return Bit; + if (Char.is(sqlType)) return Char; + if (NChar.is(sqlType)) return NChar; + if (Varchar.is(sqlType)) return Varchar; + if (NVarchar.is(sqlType)) return NVarchar; + if (Text.is(sqlType)) return Text; + if (NText.is(sqlType)) return NText; + if (Decimal.is(sqlType)) return Decimal; + if (Numeric.is(sqlType)) return Numeric; + if (Float.is(sqlType)) return Float; + if (Real.is(sqlType)) return Real; + if (DateType.is(sqlType)) return DateType; + if (Datetime.is(sqlType)) return Datetime; + if (Datetime2.is(sqlType)) return Datetime2; + if (Datetimeoffset.is(sqlType)) return Datetimeoffset; + if (Time.is(sqlType)) return Time; + if (Binary.is(sqlType)) return Binary; + if (Varbinary.is(sqlType)) return Varbinary; + return Custom; +}; diff --git a/drizzle-kit/src/dialects/mssql/introspect.ts b/drizzle-kit/src/dialects/mssql/introspect.ts new file mode 100644 index 0000000000..5bc87d3122 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/introspect.ts @@ -0,0 +1,732 @@ +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import type { DB } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + DefaultConstraint, + ForeignKey, + Index, + InterimColumn, + InterimSchema, + MssqlEntities, + PrimaryKey, + Schema, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import { parseDefault, parseFkAction, parseViewMetadataFlag, parseViewSQL } from './grammar'; + +export const fromDatabase = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +): Promise => { + const schemas: Schema[] = []; + const tables: MssqlEntities['tables'][] = []; + const columns: InterimColumn[] = []; + const indexes: Index[] = []; + const pks: PrimaryKey[] = []; + const fks: ForeignKey[] = []; + const uniques: UniqueConstraint[] = []; + const checks: CheckConstraint[] = []; + const defaults: DefaultConstraint[] = []; + const views: View[] = []; + const viewColumns: ViewColumn[] = []; + + // schema_id is needed for not joining tables by schema name but just to pass where schema_id = id + const introspectedSchemas = await db.query<{ schema_name: string; schema_id: number }>(` + SELECT s.name as schema_name, s.schema_id as schema_id + FROM sys.schemas s + JOIN sys.database_principals p ON s.principal_id = p.principal_id + WHERE p.type IN ('S', 'U') -- Only SQL users and Windows users + AND s.name NOT IN ('guest', 'INFORMATION_SCHEMA', 'sys') + ORDER BY lower(s.name); + `).then((rows) => { + queryCallback('schemas', rows, null); + return rows; + }).catch((error) => { + queryCallback('schemas', [], error); + throw error; + }); + + const filteredSchemas = introspectedSchemas.filter((it) => filter({ type: 'schema', name: it.schema_name })); + + schemas.push( + ...filteredSchemas.filter((it) => it.schema_name !== 'dbo').map((it) => ({ + entityType: 'schemas', + name: it.schema_name, + })), + ); + + const filteredSchemaIds = filteredSchemas.map((it) => it.schema_id); + + const tablesList = await db + .query<{ + object_id: number; + schema_id: number; + name: string; + }>(` + SELECT + object_id as object_id, + schema_id AS schema_id, + name AS name +FROM + sys.tables +WHERE + schema_id IN (${filteredSchemaIds.join(', ')}) + AND sys.tables.is_ms_shipped = 0 +ORDER BY lower(name); +`).then((rows) => { + queryCallback('tables', rows, null); + return rows; + }).catch((error) => { + queryCallback('tables', [], error); + throw error; + }); + + const viewsList = await db.query<{ + name: string; + object_id: number; + schema_id: number; + with_check_option: boolean; + definition: string; + schema_binding: boolean; + }>(` +SELECT +views.name as name, +views.object_id as object_id, +views.schema_id as schema_id, +views.with_check_option as with_check_option, +modules.definition as definition, +modules.is_schema_bound as schema_binding +FROM +sys.views views +LEFT JOIN sys.sql_modules modules on modules.object_id = views.object_id +WHERE views.schema_id IN (${filteredSchemaIds.join(', ')}) + AND views.is_ms_shipped = 0 +ORDER BY lower(views.name); +`).then((rows) => { + queryCallback('views', rows, null); + return rows; + }).catch((error) => { + queryCallback('views', [], error); + throw error; + }); + + const filteredTables = tablesList.filter((it) => { + const schema = filteredSchemas.find((schema) => schema.schema_id === it.schema_id)!; + + if (!filter({ type: 'table', schema: schema.schema_name, name: it.name })) return false; + return true; + }) + .map((it) => { + const schema = filteredSchemas.find((schema) => schema.schema_id === it.schema_id)!; + + return { + ...it, + schema: schema.schema_name, + }; + }); + + const filteredTableIds = filteredTables.map((it) => it.object_id); + const filteredViewIds = viewsList.map((it) => it.object_id); + const filteredViewsAndTableIds = [...filteredTableIds, ...filteredViewIds]; + + if (filteredViewIds.length === 0 && filteredTableIds.length === 0) { + return { + schemas, + tables: [], + columns: [], + pks: [], + fks: [], + indexes: [], + uniques: [], + defaults: [], + checks: [], + views: [], + viewColumns: [], + }; + } + + const filterByTableIds = filteredTableIds.length > 0 ? `(${filteredTableIds.join(',')})` : ''; + const filterByTableAndViewIds = filteredViewsAndTableIds.length > 0 ? `(${filteredViewsAndTableIds.join(',')})` : ''; + + for (const table of filteredTables) { + tables.push({ + entityType: 'tables', + schema: table.schema, + name: table.name, + }); + } + + const checkConstraintQuery = await db.query<{ + name: string; + schema_id: number; + parent_table_id: number; + definition: string; + is_system_named: boolean; + }>(` +SELECT + name as name, + schema_id as schema_id, + parent_object_id as parent_table_id, + definition as definition, + is_system_named as is_system_named +FROM sys.check_constraints +${filterByTableIds ? 'WHERE parent_object_id in ' + filterByTableIds : ''} +ORDER BY lower(name) +;`).then((rows) => { + queryCallback('checks', rows, null); + return rows; + }).catch((error) => { + queryCallback('checks', [], error); + throw error; + }); + + const defaultsConstraintQuery = await db.query<{ + name: string; + schema_id: number; + parent_table_id: number; + parent_column_id: number; + definition: string; + is_system_named: boolean; + }>(` +SELECT + name as name, + schema_id as schema_id, + parent_object_id as parent_table_id, + parent_column_id as parent_column_id, + definition as definition, + is_system_named as is_system_named +FROM sys.default_constraints +${filterByTableIds ? 'WHERE parent_object_id in ' + filterByTableIds : ''} +ORDER BY lower(name) +;`).then((rows) => { + queryCallback('defaults', rows, null); + return rows; + }).catch((error) => { + queryCallback('defaults', [], error); + throw error; + }); + + type ForeignKeyRow = { + name: string; + schema_id: number; + parent_table_id: number; + parent_column_id: number; + on_delete: string; + on_update: string; + is_system_named: boolean; + reference_table_id: number; + reference_column_id: number; + }; + + const fkCostraintQuery = await db.query(` +SELECT + fk.name as name, + fk.schema_id as schema_id, + fkc.parent_object_id as parent_table_id, + fkc.parent_column_id as parent_column_id, + fk.delete_referential_action_desc as on_delete, + fk.update_referential_action_desc as on_update, + fk.is_system_named as is_system_named, + fkc.referenced_object_id as reference_table_id, + fkc.referenced_column_id as reference_column_id + FROM +sys.foreign_keys fk +LEFT JOIN sys.foreign_key_columns fkc ON fkc.constraint_object_id = fk.object_id +WHERE fk.schema_id IN (${filteredSchemaIds.join(', ')}) +ORDER BY lower(fk.name); + `).then((rows) => { + queryCallback('fks', rows, null); + return rows; + }).catch((error) => { + queryCallback('fks', [], error); + throw error; + }); + + type RawIdxsAndConstraints = { + table_id: number; + index_id: number; + name: string; + is_unique: boolean; + is_primary_key: boolean; + is_unique_constraint: boolean; + has_filter: boolean; + filter_definition: string; + column_id: number; + }; + + const pksUniquesAndIdxsQuery = await db.query(` + SELECT + i.object_id as table_id, + i.index_id as index_id, + i.name AS name, + i.is_unique as is_unique, + i.is_primary_key as is_primary_key, + i.is_unique_constraint as is_unique_constraint, + i.has_filter as has_filter, + i.filter_definition as filter_definition, + ic.column_id as column_id + FROM sys.indexes i + INNER JOIN sys.index_columns ic + ON i.object_id = ic.object_id + AND i.index_id = ic.index_id + ${filterByTableIds ? 'WHERE i.object_id in ' + filterByTableIds : ''} + ORDER BY lower(i.name);`) + .then((rows) => { + queryCallback('indexes', rows, null); + return rows; + }).catch((error) => { + queryCallback('indexes', [], error); + throw error; + }); + + const columnsQuery = await db.query<{ + column_id: number; + table_object_id: number; + name: string; + system_type_id: number; + max_length_bytes: number; + precision: number; + scale: number; + is_nullable: boolean; + is_identity: boolean; + is_computed: boolean; + default_object_id: number; + seed_value: number; + increment_value: number; + type: string; + generated_always_definition: string | null; + generated_is_persisted: boolean; + rel_kind: 'U' | 'V'; + }>(` +SELECT + col.column_id as column_id, + col.object_id as table_object_id, + col.name as name, + col.system_type_id as system_type_id, + col.max_length as max_length_bytes, + col.precision as precision, + col.scale as scale, + col.is_nullable as is_nullable, + col.is_identity as is_identity, + col.is_computed as is_computed, + col.default_object_id as default_object_id, + col.generated_always_type as generated_always_type, + CAST(idc.seed_value AS INT) AS seed_value, + CAST(idc.increment_value AS INT) AS increment_value, + types.name as type, + computed.definition as generated_always_definition, + computed.is_persisted as generated_is_persisted, + obj.type as rel_kind +FROM sys.columns col +LEFT JOIN sys.types types + ON types.system_type_id = col.system_type_id AND types.user_type_id = col.user_type_id +LEFT JOIN sys.identity_columns idc + ON idc.object_id = col.object_id AND idc.column_id = col.column_id +LEFT JOIN sys.computed_columns computed + ON computed.object_id = col.object_id AND computed.column_id = col.column_id +LEFT JOIN sys.objects obj + ON obj.object_id = col.object_id +WHERE obj.type in ('U', 'V') + AND col.object_id IN ${filterByTableAndViewIds}; +`).then((rows) => { + queryCallback('columns', rows, null); + return rows; + }).catch((error) => { + queryCallback('columns', [], error); + throw error; + }); + + // TODO add counting + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let tableCount = 0; + let checksCount = 0; + let viewsCount = 0; + + const [ + checkConstraintList, + defaultsConstraintList, + fkCostraintList, + pksUniquesAndIdxsList, + columnsList, + ] = await Promise + .all([ + checkConstraintQuery, + defaultsConstraintQuery, + fkCostraintQuery, + pksUniquesAndIdxsQuery, + columnsQuery, + ]); + + columnsCount = columnsList.length; + tableCount = filteredTables.length; + + for (const column of columnsList.filter((it) => it.rel_kind.trim() === 'U')) { + const table = filteredTables.find((it) => it.object_id === column.table_object_id); + if (!table) continue; // skip if no table found + + const schema = filteredSchemas.find((it) => it.schema_id === table.schema_id)!; + const precision = column.precision; + const scale = column.scale; + const bytesLength = column.max_length_bytes; + + const formatLength = (length: number | null, divisor: number = 1) => { + if (length === null) return ''; + if (length === -1) return 'max'; + return `${length / divisor}`; + }; + + const parseOptions = (type: string) => { + if (type === 'nchar' || type === 'nvarchar') { + return formatLength(bytesLength, 2); + } + + if (type === 'char' || type === 'varchar' || type === 'binary' || type === 'varbinary') { + return formatLength(bytesLength); + } + + if (type === 'float') { + return String(precision); + } + + if (type === 'datetimeoffset' || type === 'datetime2' || type === 'time') { + return String(scale); + } + + if (type === 'decimal' || type === 'numeric') { + return `${precision},${scale}`; + } + + return null; + }; + const options = parseOptions(column.type); + + const columnType = column.type + (options ? `(${options})` : ''); + + const unique = pksUniquesAndIdxsList.filter((it) => it.is_unique_constraint).find((it) => { + return it.table_id === table.object_id && it.column_id === column.column_id; + }) ?? null; + + const pk = pksUniquesAndIdxsList.filter((it) => it.is_primary_key).find((it) => { + return it.table_id === table.object_id && it.column_id === column.column_id; + }) ?? null; + + columns.push({ + entityType: 'columns', + schema: schema.schema_name, + table: table.name, + name: column.name, + type: columnType, + isUnique: unique ? true : false, + uniqueName: unique ? unique.name : null, + pkName: pk ? pk.name : null, + notNull: !column.is_nullable && !column.is_identity, + isPK: pk ? true : false, + generated: column.is_computed + ? { + as: column.generated_always_definition!, + type: column.generated_is_persisted ? 'persisted' : 'virtual', + } + : null, + identity: column.is_identity + ? { + increment: column.increment_value, + seed: column.seed_value, + } + : null, + }); + } + + type GroupedIdxsAndContraints = Omit & { + column_ids: number[]; + }; + const groupedIdxsAndContraints: GroupedIdxsAndContraints[] = Object.values( + pksUniquesAndIdxsList.reduce((acc: Record, row: RawIdxsAndConstraints) => { + const table = filteredTables.find((it) => it.object_id === row.table_id); + if (!table) return acc; + + const key = `${row.table_id}_${row.index_id}`; + if (!acc[key]) { + const { column_id: _, ...rest } = row; + acc[key] = { ...rest, column_ids: [] }; + } + acc[key].column_ids.push(row.column_id); + return acc; + }, {}), + ); + + const groupedPrimaryKeys: GroupedIdxsAndContraints[] = []; + const groupedUniqueConstraints: GroupedIdxsAndContraints[] = []; + const groupedIndexes: GroupedIdxsAndContraints[] = []; + + indexesCount = groupedIndexes.length; + + groupedIdxsAndContraints.forEach((it) => { + if (it.is_primary_key) groupedPrimaryKeys.push(it); + else if (it.is_unique_constraint) groupedUniqueConstraints.push(it); + else groupedIndexes.push(it); + }); + + for (const unique of groupedUniqueConstraints) { + const table = filteredTables.find((it) => it.object_id === unique.table_id); + if (!table) continue; + + const schema = filteredSchemas.find((it) => it.schema_id === table.schema_id)!; + + const columns = unique.column_ids.map((it) => { + const column = columnsList.find((column) => + column.table_object_id === unique.table_id && column.column_id === it + )!; + return column.name; + }); + + uniques.push({ + entityType: 'uniques', + schema: schema.schema_name, + table: table.name, + name: unique.name, + nameExplicit: true, + columns, + }); + } + + for (const pk of groupedPrimaryKeys) { + const table = filteredTables.find((it) => it.object_id === pk.table_id); + if (!table) continue; + + const schema = filteredSchemas.find((it) => it.schema_id === table.schema_id)!; + + const columns = pk.column_ids.map((it) => { + const column = columnsList.find((column) => column.table_object_id === pk.table_id && column.column_id === it)!; + return column.name; + }); + + pks.push({ + entityType: 'pks', + schema: schema.schema_name, + table: table.name, + name: pk.name, + nameExplicit: true, + columns, + }); + } + + for (const index of groupedIndexes) { + const table = filteredTables.find((it) => it.object_id === index.table_id); + if (!table) continue; + + const schema = filteredSchemas.find((it) => it.schema_id === table.schema_id)!; + + const columns = index.column_ids.map((it) => { + const column = columnsList.find((column) => + column.table_object_id === index.table_id && column.column_id === it + )!; + return column.name; + }); + + indexes.push({ + entityType: 'indexes', + schema: schema.schema_name, + table: table.name, + name: index.name, + columns, + where: index.has_filter ? index.filter_definition : null, + isUnique: index.is_unique, + }); + } + + type GroupedForeignKey = { + name: string; + schema_id: number; + parent_table_id: number; + on_delete: string; + on_update: string; + is_system_named: boolean; + reference_table_id: number; + columns: { parent_column_ids: number[]; reference_column_ids: number[] }; + }; + const groupedFkCostraints = Object.values( + fkCostraintList.reduce((acc: Record, row: ForeignKeyRow) => { + const key = `${row.name}_${row.schema_id}`; + + if (acc[key]) { + acc[key].columns.parent_column_ids.push(row.parent_column_id); + acc[key].columns.reference_column_ids.push(row.reference_column_id); + } else { + acc[key] = { + ...row, + columns: { parent_column_ids: [row.parent_column_id], reference_column_ids: [row.reference_column_id] }, + }; + } + + return acc; + }, {}), + ); + + foreignKeysCount = groupedFkCostraints.length; + for (const fk of groupedFkCostraints) { + const tableFrom = filteredTables.find((it) => it.object_id === fk.parent_table_id); + if (!tableFrom) continue; + const schemaFrom = filteredSchemas.find((it) => it.schema_id === fk.schema_id)!; + + const tableTo = filteredTables.find((it) => it.object_id === fk.reference_table_id)!; + const schemaTo = filteredSchemas.find((it) => it.schema_id === tableTo.schema_id)!; + + const columns = fk.columns.parent_column_ids.map((it) => { + const column = columnsList.find((column) => + column.table_object_id === fk.parent_table_id && column.column_id === it + )!; + return column.name; + }); + + const columnsTo = fk.columns.reference_column_ids.map((it) => { + const column = columnsList.find((column) => + column.table_object_id === fk.reference_table_id && column.column_id === it + )!; + return column.name; + }); + + fks.push({ + entityType: 'fks', + schema: schemaFrom.schema_name, + table: tableFrom.name, + name: fk.name, + nameExplicit: true, + columns, + tableTo: tableTo.name, + schemaTo: schemaTo.schema_name, + columnsTo, + onUpdate: parseFkAction(fk.on_update), + onDelete: parseFkAction(fk.on_delete), + }); + } + + checksCount = checkConstraintList.length; + for (const check of checkConstraintList) { + const table = filteredTables.find((it) => it.object_id === check.parent_table_id); + if (!table) continue; + + const schema = filteredSchemas.find((it) => it.schema_id === check.schema_id)!; + + checks.push({ + entityType: 'checks', + schema: schema.schema_name, + table: table.name, + name: check.name, + value: check.definition, + }); + } + + for (const defaultConstraint of defaultsConstraintList) { + const table = filteredTables.find((it) => it.object_id === defaultConstraint.parent_table_id); + if (!table) continue; + + const schema = filteredSchemas.find((it) => it.schema_id === defaultConstraint.schema_id)!; + const column = columnsList.find((it) => + it.column_id === defaultConstraint.parent_column_id && it.table_object_id === defaultConstraint.parent_table_id + )!; + + defaults.push({ + entityType: 'defaults', + schema: schema.schema_name, + table: table.name, + default: parseDefault(column.type, defaultConstraint.definition), + nameExplicit: true, + column: column.name, + name: defaultConstraint.name, + }); + } + + progressCallback('columns', columnsCount, 'fetching'); + progressCallback('checks', checksCount, 'fetching'); + progressCallback('indexes', indexesCount, 'fetching'); + progressCallback('tables', tableCount, 'done'); + + viewsCount = viewsList.length; + for (const view of viewsList) { + const viewName = view.name; + const viewSchema = filteredSchemas.find((it) => it.schema_id === view.schema_id); + if (!viewSchema) continue; + + if (!filter({ type: 'table', schema: viewSchema.schema_name, name: viewName })) continue; + tableCount += 1; + + const encryption = view.definition === null; + const definition = parseViewSQL(view.definition); + if (definition === null) { + throw new Error(`Could not process view ${view.name}:\n${view.definition}`); + } + const withMetadata = parseViewMetadataFlag(view.definition); + const checkOption = view.with_check_option; + const schemaBinding = view.schema_binding; + + views.push({ + entityType: 'views', + schema: viewSchema.schema_name, + name: view.name, + definition, + checkOption, + encryption, + schemaBinding, + viewMetadata: withMetadata, + }); + + const columns = columnsList.filter((it) => it.table_object_id === view.object_id && it.rel_kind.trim() === 'V'); + + for (const viewColumn of columns) { + viewColumns.push({ + notNull: !viewColumn.is_nullable, + name: viewColumn.name, + type: viewColumn.type, + schema: viewSchema.schema_name, + view: view.name, + }); + } + } + + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + return { + schemas, + tables, + columns, + defaults, + indexes, + pks, + fks, + uniques, + checks, + views, + viewColumns, + } satisfies InterimSchema; +}; + +export const fromDatabaseForDrizzle = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +) => { + return await fromDatabase(db, filter, progressCallback); +}; diff --git a/drizzle-kit/src/dialects/mssql/serializer.ts b/drizzle-kit/src/dialects/mssql/serializer.ts new file mode 100644 index 0000000000..3272f2f80d --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/serializer.ts @@ -0,0 +1,74 @@ +import { mssqlSchemaError } from 'src/cli/views'; +import type { CasingType } from '../../cli/validations/common'; +import { prepareFilenames } from '../../utils/utils-node'; +import type { MssqlDDL } from './ddl'; +import { createDDL, interimToDDL } from './ddl'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; +import type { MssqlSnapshot } from './snapshot'; +import { drySnapshot, snapshotValidator } from './snapshot'; + +export const prepareSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: MssqlDDL; + ddlCur: MssqlDDL; + snapshot: MssqlSnapshot; + snapshotPrev: MssqlSnapshot; + custom: MssqlSnapshot; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + + const res = await prepareFromSchemaFiles(filenames); + + // DO we wanna respect entity filter here? + const { schema, errors } = fromDrizzleSchema(res, casing, () => true); + + if (errors.length > 0) { + console.log(errors.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const { ddl: ddlCur, errors: errors2 } = interimToDDL(schema); + + if (errors2.length > 0) { + console.log(errors2.map((it) => mssqlSchemaError(it)).join('\n')); + process.exit(1); + } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '1', + dialect: 'mssql', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies MssqlSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: MssqlSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom }; +}; diff --git a/drizzle-kit/src/dialects/mssql/snapshot.ts b/drizzle-kit/src/dialects/mssql/snapshot.ts new file mode 100644 index 0000000000..8d12ccd2e6 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/snapshot.ts @@ -0,0 +1,146 @@ +import { randomUUID } from 'crypto'; +import { any, array as zArray, boolean, enum as enumType, literal, object, record, string, type TypeOf } from 'zod'; +import { originUUID } from '../../utils'; +import { array, validator } from '../simpleValidator'; +import type { MssqlDDL, MssqlEntity } from './ddl'; +import { createDDL } from './ddl'; + +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), // TODO persisted + as: string(), + }).optional(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraint: record(string(), checkConstraint).default({}), +}).strict(); + +const viewMeta = object({ + checkOption: boolean().optional(), + encryption: boolean().optional(), + schemaBinding: boolean().optional(), + viewMetadata: boolean().optional(), +}).strict(); + +export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict().merge(viewMeta); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('mssql'); + +const schemaHash = object({ + id: string(), + prevIds: zArray(string()), +}); + +export const schemaInternal = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), table), + views: record(string(), view).default({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schema = schemaInternal.merge(schemaHash); + +export type Table = TypeOf; +export type Column = TypeOf; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['1'], + dialect: ['mssql'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => ddl.entities.validate(it)), + renames: array((_) => true), +}); + +export type MssqlSnapshot = typeof snapshotValidator.shape; + +export const toJsonSnapshot = (ddl: MssqlDDL, prevIds: string[], renames: string[]): MssqlSnapshot => { + return { dialect: 'mssql', id: randomUUID(), prevIds, version: '1', ddl: ddl.entities.list(), renames }; +}; + +export const drySnapshot = snapshotValidator.strict( + { + version: '1', + dialect: 'mssql', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], + } satisfies MssqlSnapshot, +); diff --git a/drizzle-kit/src/dialects/mssql/statements.ts b/drizzle-kit/src/dialects/mssql/statements.ts new file mode 100644 index 0000000000..5a4894b57f --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/statements.ts @@ -0,0 +1,277 @@ +import type { Simplify } from '../../utils'; +import type { + CheckConstraint, + Column, + DefaultConstraint, + DiffEntities, + ForeignKey, + Index, + PrimaryKey, + Schema, + TableFull, + UniqueConstraint, + View, +} from './ddl'; + +export interface CreateSchema { + type: 'create_schema'; + name: string; +} + +export interface DropSchema { + type: 'drop_schema'; + name: string; +} + +export interface RenameSchema { + type: 'rename_schema'; + from: Schema; + to: Schema; +} + +export interface CreateTable { + type: 'create_table'; + table: TableFull; +} + +export interface DropTable { + type: 'drop_table'; + table: TableFull; +} +export interface RenameTable { + type: 'rename_table'; + from: string; + to: string; + schema: string; +} + +export interface AddColumn { + type: 'add_column'; + column: Column; + defaults: DefaultConstraint[]; + isPK: boolean; +} + +export interface DropColumn { + type: 'drop_column'; + column: Column; +} + +export interface RenameColumn { + type: 'rename_column'; + from: Column; + to: Column; +} + +export interface AlterColumn { + type: 'alter_column'; + diff: DiffEntities['columns']; +} + +export interface RecreateIdentityColumn { + type: 'recreate_identity_column'; + column: DiffEntities['columns']; + defaults: DefaultConstraint[]; + constraintsToDelete: (UniqueConstraint | CheckConstraint | Index | PrimaryKey | ForeignKey | DefaultConstraint)[]; + constraintsToCreate: (UniqueConstraint | CheckConstraint | Index | PrimaryKey | ForeignKey | DefaultConstraint)[]; +} +export interface RecreateColumn { + type: 'recreate_column'; + diff: DiffEntities['columns']; +} + +export interface CreateIndex { + type: 'create_index'; + index: Index; +} + +export interface DropIndex { + type: 'drop_index'; + index: Index; +} + +export interface CreateFK { + type: 'create_fk'; + fk: ForeignKey; +} +export interface DropFK { + type: 'drop_fk'; + fk: ForeignKey; +} + +export interface CreatePK { + type: 'create_pk'; + pk: PrimaryKey; +} + +export interface DropPK { + type: 'drop_pk'; + pk: PrimaryKey; +} + +export interface DropConstraint { + type: 'drop_constraint'; + table: string; + schema: string; + constraint: string; +} + +export interface CreateView { + type: 'create_view'; + view: View; +} + +export interface DropView { + type: 'drop_view'; + view: View; +} + +export interface RenameView { + type: 'rename_view'; + from: View; + to: View; +} + +export interface AlterView { + type: 'alter_view'; + diff: DiffEntities['views']; + view: View; +} + +export interface CreateCheck { + type: 'create_check'; + check: CheckConstraint; +} + +export interface CreateUnique { + type: 'add_unique'; + unique: UniqueConstraint; +} + +export interface DeleteUnique { + type: 'drop_unique'; + unique: UniqueConstraint; +} + +export interface MoveTable { + type: 'move_table'; + name: string; + from: string; + to: string; +} + +export interface AddCheck { + type: 'add_check'; + check: CheckConstraint; +} + +export interface DropCheck { + type: 'drop_check'; + check: CheckConstraint; +} + +export interface MoveView { + type: 'move_view'; + fromSchema: string; + toSchema: string; + view: View; +} + +export interface RenamePrimaryKey { + type: 'rename_pk'; + from: PrimaryKey; + to: PrimaryKey; +} + +export interface RenameCheck { + type: 'rename_check'; + from: CheckConstraint; + to: CheckConstraint; +} + +export interface RenameIndex { + type: 'rename_index'; + from: Index; + to: Index; +} + +export interface RenameForeignKey { + type: 'rename_fk'; + from: ForeignKey; + to: ForeignKey; +} + +export interface RenameUnique { + type: 'rename_unique'; + from: UniqueConstraint; + to: UniqueConstraint; +} + +export interface CreateDefault { + type: 'create_default'; + default: DefaultConstraint; +} + +export interface DropDefault { + type: 'drop_default'; + default: DefaultConstraint; +} + +export interface RecreateDefault { + type: 'recreate_default'; + from: DefaultConstraint; + to: DefaultConstraint; +} + +export type JsonStatement = + | CreateSchema + | DropSchema + | RenameSchema + | MoveView + | AddCheck + | DropCheck + | MoveTable + | CreateUnique + | DeleteUnique + | CreateTable + | DropTable + | RenameTable + | AddColumn + | DropColumn + | RenameColumn + | AlterColumn + | RecreateColumn + | RecreateIdentityColumn + | CreateIndex + | DropIndex + | CreateFK + | DropFK + | CreatePK + | DropPK + | CreateView + | DropView + | RenameView + | AlterView + | DropConstraint + | CreateCheck + | RenamePrimaryKey + | RenameCheck + | RenameIndex + | RenameForeignKey + | RenameUnique + | CreateDefault + | DropDefault + | RecreateDefault; + +export const prepareStatement = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + args: Omit, +): Simplify => { + return { + type, + ...args, + } as TStatement; +}; diff --git a/drizzle-kit/src/dialects/mssql/typescript.ts b/drizzle-kit/src/dialects/mssql/typescript.ts new file mode 100644 index 0000000000..7da020d029 --- /dev/null +++ b/drizzle-kit/src/dialects/mssql/typescript.ts @@ -0,0 +1,526 @@ +import '../../@types/utils'; +import { toCamelCase } from 'drizzle-orm/casing'; +import { assertUnreachable } from 'src/utils'; +import type { Casing } from '../../cli/validations/common'; +import type { + CheckConstraint, + Column, + DefaultConstraint, + ForeignKey, + Index, + MssqlDDL, + PrimaryKey, + UniqueConstraint, + ViewColumn, +} from './ddl'; +import { fullTableFromDDL } from './ddl'; +import { typeFor } from './grammar'; + +const imports = [ + 'bigint', + 'binary', + 'bit', + 'char', + 'nchar', + 'varchar', + 'nvarchar', + 'date', + 'datetime', + 'datetime2', + 'datetimeoffset', + 'decimal', + 'float', + 'int', + 'numeric', + 'real', + 'smallint', + 'text', + 'ntext', + 'json', + 'time', + 'tinyint', + 'varbinary', + 'tinyint', + 'customType', +] as const; +export type Import = (typeof imports)[number]; + +const mssqlImportsList = new Set([ + 'mssqlTable', + ...imports, +]); + +function inspect(it: any): string { + if (!it) return ''; + + const keys = Object.keys(it); + if (keys.length === 0) return '{}'; + + const pairs = keys.map((key) => { + const formattedKey = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(key) + ? key + : `'${key}'`; + + const value = it[key]; + const formattedValue = typeof value === 'string' ? `'${value}'` : String(value); + + return `${formattedKey}: ${formattedValue}`; + }); + + return `{ ${pairs.join(', ')} }`; +} + +const objToStatement2 = (json: { [s: string]: unknown }, mode: 'string' | 'number' = 'string') => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: ${mode === 'string' ? `"${json[it]}"` : json[it]}`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(toCamelCase(value)); + } + + assertUnreachable(casing); +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +function generateIdentityParams(column: Column) { + if (column.identity === null) return ''; + const identity = column.identity; + + const tuples = []; + if (identity.seed) { + tuples.push(['seed', identity.seed]); + } + if (identity.increment) { + tuples.push(['increment', identity.increment]); + } + + const params = tuples.length > 0 ? `{ ${tuples.map((x) => `${x[0]}: ${x[1]}`).join(' ,')} }` : ''; + + return `.identity(${params})`; +} + +export const paramNameFor = (name: string, schema: string | null) => { + const schemaSuffix = schema && schema !== 'dbo' ? `In${schema.capitalise()}` : ''; + return `${name}${schemaSuffix}`; +}; + +// prev: schemaToTypeScript +export const ddlToTypeScript = ( + ddl: MssqlDDL, + columnsForViews: ViewColumn[], + casing: Casing, +) => { + const tableFn = `mssqlTable`; + + const schemas = Object.fromEntries( + ddl.schemas.list().filter((it) => it.name !== 'dbo').map((it) => { + return [it.name, withCasing(it.name, casing)]; + }), + ); + + const imports = new Set(); + const vcs = columnsForViews.map((it) => ({ entityType: 'viewColumns' as const, ...it })); + const entities = [...ddl.entities.list(), ...vcs]; + for (const x of entities) { + if (x.entityType === 'schemas' && x.name !== 'dbo') imports.add('mssqlSchema'); + if (x.entityType === 'tables') imports.add(tableFn); + + if (x.entityType === 'indexes') { + if (x.isUnique) imports.add('uniqueIndex'); + else imports.add('index'); + } + + if (x.entityType === 'fks') imports.add('foreignKey'); + + if (x.entityType === 'pks') imports.add('primaryKey'); + if (x.entityType === 'uniques') imports.add('unique'); + if (x.entityType === 'checks') imports.add('check'); + if (x.entityType === 'views' && x.schema === 'dbo') imports.add('mssqlView'); + + if (x.entityType === 'columns' || x.entityType === 'viewColumns') { + const grammarType = typeFor(x.type); + imports.add(grammarType.drizzleImport()); + if (mssqlImportsList.has(x.type)) imports.add(x.type); + } + } + + const schemaStatements = Object.entries(schemas).map((it) => { + return `export const ${it[1]} = mssqlSchema("${it[0]}");\n`; + }).join(''); + + const tableStatements = ddl.tables.list().map((it) => { + const tableSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, tableSchema); + const table = fullTableFromDDL(it, ddl); + const columns = ddl.columns.list({ schema: table.schema, table: table.name }); + const fks = ddl.fks.list({ schema: table.schema, table: table.name }); + + const func = tableSchema ? `${tableSchema}.table` : tableFn; + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + columns, + table.pk, + fks, + schemas, + ddl.defaults.list({ schema: table.schema, table: table.name }), + casing, + ); + statement += '}'; + + // more than 2 fields or self reference or cyclic + // Andrii: I switched this one off until we will get custom names in .references() + const filteredFKs = table.fks.filter((it) => { + return it.columns.length > 1 || isSelf(it); + }); + + const hasCallback = table.indexes.length > 0 + || filteredFKs.length > 0 + || table.pk + || table.uniques.length > 0 + || table.checks.length > 0; + + if (hasCallback) { + statement += ', '; + statement += '(table) => [\n'; + statement += table.pk ? createTablePK(table.pk, casing) : ''; + statement += createTableFKs(filteredFKs, schemas, casing); + statement += createTableIndexes(table.name, table.indexes, casing); + statement += createTableUniques(table.uniques, casing); + statement += createTableChecks(table.checks); + statement += ']'; + } + + statement += ');'; + return statement; + }); + + const viewsStatements = Object.values(ddl.views.list()) + .map((it) => { + const viewSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, viewSchema); + + const func = it.schema !== 'dbo' + ? `${viewSchema}.view` + : 'mssqlView'; + + const as = `sql\`${it.definition}\``; + + const viewColumns = columnsForViews.filter((x) => x.schema === it.schema && x.view === it.name); + + const columns = createViewColumns( + viewColumns, + casing, + ); + + const viewOptions = { + encryption: it.encryption, + schemaBinding: it.schemaBinding, + viewMetadata: it.viewMetadata, + checkOption: it.checkOption, + }; + + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; + statement += Object.keys(viewOptions).length > 0 ? `.with(${JSON.stringify(viewOptions)})` : ''; + statement += `.as(${as});`; + + return statement; + }) + .join('\n\n'); + + const uniqueMssqlImports = [...imports]; + + const importsTs = `import { ${ + uniqueMssqlImports.join( + ', ', + ) + } } from "drizzle-orm/mssql-core" +import { sql } from "drizzle-orm"\n\n`; + + let decalrations = schemaStatements; + decalrations += '\n'; + decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements; + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(ddl.tables) + .map((it) => withCasing(it.name, casing)) + .join(',\n') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +// const isCyclic = (fk: ForeignKey) => { +// const key = `${fk.table}-${fk.tableTo}`; +// const reverse = `${fk.tableTo}-${fk.table}`; +// return relations.has(key) && relations.has(reverse); +// }; + +const isSelf = (fk: ForeignKey) => { + return fk.table === fk.tableTo; +}; + +const column = ( + type: string, + name: string, + casing: Casing, + def: DefaultConstraint['default'], +) => { + const lowered = type.toLowerCase(); + + const grammarType = typeFor(lowered); + const key = withCasing(name, casing); + const { default: defToSet, options: optionsToSet, customType } = grammarType.toTs(type, def); + const columnName = dbColumnName({ name, casing, withMode: Boolean(optionsToSet) }); + const drizzleType = grammarType.drizzleImport(); + + let res = `${key}: ${drizzleType}${customType ? `({ dataType: () => '${customType}' })` : ''}(${columnName}${ + inspect(optionsToSet) + })`; + res += defToSet ? defToSet.startsWith('.') ? defToSet : `.default(${defToSet})` : ''; + return res; +}; + +const createViewColumns = ( + columns: ViewColumn[], + casing: Casing, +) => { + let statement = ''; + + columns.forEach((it) => { + const columnStatement = column( + it.type, + it.name, + casing, + null, + ); + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + statement += it.notNull ? '.notNull()' : ''; + statement += ',\n'; + }); + return statement; +}; + +const createTableColumns = ( + columns: Column[], + primaryKey: PrimaryKey | null, + fks: ForeignKey[], + schemas: Record, + defaults: DefaultConstraint[], + casing: Casing, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columns.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce((res, it) => { + const arr = res[it.columns[0]] || []; + arr.push(it); + res[it.columns[0]] = arr; + return res; + }, {} as Record); + + columns.forEach((it) => { + const def = defaults.find((def) => def.column === it.name && def.schema === it.schema); + + const columnStatement = column( + it.type, + it.name, + casing, + def ? def.default : null, + ); + const pk = primaryKey && primaryKey.columns.length === 1 && primaryKey.columns[0] === it.name + ? primaryKey + : null; + + statement += '\t'; + statement += columnStatement; + statement += it.notNull && !it.identity && !pk ? '.notNull()' : ''; + statement += it.identity ? generateIdentityParams(it) : ''; + statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; + + const fks = fkByColumnName[it.name]; + // Andrii: I switched it off until we will get a custom naem setting in references + if (fks) { + const fksStatement = fks + .map((it) => { + const onDelete = it.onDelete && it.onDelete !== 'NO ACTION' ? it.onDelete : null; + const onUpdate = it.onUpdate && it.onUpdate !== 'NO ACTION' ? it.onUpdate : null; + const params = { onDelete: onDelete?.toLowerCase(), onUpdate: onUpdate?.toLowerCase() }; + + const paramsStr = objToStatement2(params); + const tableSchema = schemas[it.schemaTo || '']; + const paramName = paramNameFor(it.tableTo, tableSchema); + if (paramsStr) { + return `.references(() => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; + } + return `.references(() => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)})`; + }) + .join(''); + statement += fksStatement; + } + + statement += ',\n'; + }); + + return statement; +}; + +const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { + let statement = ''; + + idxs.forEach((it) => { + // TODO: cc: @AndriiSherman we have issue when index is called as table called + // let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; + // idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; + // idxKey = withCasing(idxKey, casing); + // const indexGeneratedName = indexName( + // tableName, + // it.columns.map((it) => it.value), + // ); + + const name = it.name; + // const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += it.isUnique ? '\tuniqueIndex(' : '\tindex('; + statement += name ? `"${name}")` : ')'; + + statement += `.on(${ + it.columns + .map((it) => { + return `table.${withCasing(it, casing)}`; + }) + .join(', ') + })`; + statement += it.where ? `.where(sql\`${it.where}\`)` : ''; + + statement += `,\n`; + }); + + return statement; +}; + +const createTablePK = (it: PrimaryKey, casing: Casing): string => { + let statement = '\tprimaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${withCasing(c, casing)}`; + }) + .join(', ') + }`; + statement += `]${it.nameExplicit ? `, name: "${it.name}"` : ''}}),\n`; + return statement; +}; + +// get a map of db role name to ts key +// if to by key is in this map - no quotes, otherwise - quotes + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: Casing, +): string => { + let statement = ''; + + unqs.forEach((it, index) => { + statement += '\tunique('; + statement += it.nameExplicit ? `"${it.name}")` : ')'; + statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; + statement += index === unqs.length - 1 ? `\n` : ',\n'; + }); + + return statement; +}; + +const createTableChecks = ( + checkConstraints: CheckConstraint[], +) => { + let statement = ''; + + checkConstraints.forEach((it) => { + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,`; + }); + + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { + let statement = ''; + + fks.forEach((it) => { + const tableSchema = it.schemaTo === 'public' ? '' : schemas[it.schemaTo]; + const paramName = paramNameFor(it.tableTo, tableSchema); + + const isSelf = it.tableTo === it.table; + const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; + statement += `\tforeignKey({\n`; + statement += `\t\tcolumns: [${it.columns.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; + statement += `\t\tforeignColumns: [${ + it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') + }],\n`; + statement += it.nameExplicit ? `\t\tname: "${it.name}"\n` : ''; + statement += `\t})`; + + statement += it.onUpdate && it.onUpdate !== 'NO ACTION' ? `.onUpdate("${it.onUpdate}")` : ''; + statement += it.onDelete && it.onDelete !== 'NO ACTION' ? `.onDelete("${it.onDelete}")` : ''; + statement += `,\n`; + }); + return statement; +}; diff --git a/drizzle-kit/src/dialects/mysql/commutativity.ts b/drizzle-kit/src/dialects/mysql/commutativity.ts new file mode 100644 index 0000000000..bb36bb613a --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/commutativity.ts @@ -0,0 +1,463 @@ +import { existsSync, readFileSync } from 'fs'; +import { dirname } from 'path'; +import { assertUnreachable } from '../../utils'; +import { createDDL, type MysqlDDL } from './ddl'; +import { ddlDiffDry } from './diff'; +import { drySnapshot, type MysqlSnapshot } from './snapshot'; +import type { JsonStatement } from './statements'; + +export type BranchConflict = { + parentId: string; + parentPath?: string; + branchA: { headId: string; path: string; statement: JsonStatement }; + branchB: { headId: string; path: string; statement: JsonStatement }; +}; + +export type MySQLNonCommutativityReport = { + conflicts: BranchConflict[]; + leafNodes: string[]; // IDs of all leaf nodes (terminal nodes with no children) +}; + +type SnapshotNode = { + id: string; + prevIds: string[]; + path: string; // full path to snapshot.json + folderPath: string; // folder containing snapshot.json + raw: TSnapshot; +}; + +const footprintMap: Record = { + // Table operations + create_table: [ + 'create_table', + 'drop_table', + 'rename_table', + ], + drop_table: [ + 'create_table', + 'drop_table', + 'rename_table', + 'add_column', + 'drop_column', + 'alter_column', + 'recreate_column', + 'rename_column', + 'create_index', + ], + rename_table: [ + 'create_table', + 'drop_table', + 'rename_table', + ], + + // Column operations + add_column: ['add_column', 'alter_column', 'drop_column', 'rename_column', 'recreate_column'], + drop_column: ['add_column', 'drop_column', 'alter_column', 'rename_column', 'recreate_column'], + alter_column: ['add_column', 'drop_column', 'alter_column', 'rename_column', 'recreate_column'], + recreate_column: ['add_column', 'drop_column', 'alter_column', 'recreate_column', 'rename_column'], + rename_column: ['add_column', 'drop_column', 'alter_column', 'recreate_column', 'rename_column'], + + // Index operations + create_index: ['create_index', 'drop_index', 'drop_table'], + drop_index: ['create_index', 'drop_index'], + + // Primary key operations + drop_pk: ['drop_pk', 'create_pk'], + create_pk: ['drop_pk', 'create_pk'], + + // Foreign key operations + create_fk: ['create_fk'], + + // TODO statements + drop_constraint: [], + create_check: [], + + // View operations + create_view: ['create_view', 'drop_view', 'rename_view', 'alter_view'], + drop_view: ['create_view', 'drop_view', 'rename_view', 'alter_view'], + rename_view: ['create_view', 'drop_view', 'rename_view', 'alter_view'], + alter_view: ['create_view', 'drop_view', 'rename_view', 'alter_view'], +}; + +function formatFootprint(action: string, objectName: string, columnName: string): string { + return `${action};${objectName};${columnName}`; +} + +function extractStatementInfo( + statement: JsonStatement, +): { action: string; schema: string; objectName: string; columnName: string } { + const action = statement.type; + let schema = ''; + let objectName = ''; + let columnName = ''; + + switch (statement.type) { + // Table operations + case 'create_table': + objectName = statement.table.name; + break; + case 'drop_table': + objectName = statement.table; + break; + case 'rename_table': + schema = statement.to; + objectName = statement.from; + break; + + // Column operations + case 'add_column': + case 'drop_column': + case 'recreate_column': + objectName = statement.column.table; + columnName = statement.column.name; + break; + case 'alter_column': + objectName = statement.diff.table; + columnName = statement.column.name; + break; + case 'rename_column': + objectName = statement.table; + columnName = statement.from; + break; + + // Index operations + case 'create_index': + case 'drop_index': + objectName = statement.index.table; + // columnName = statement.index.name; + break; + + // Primary key operations + case 'drop_pk': + objectName = statement.pk.table; + break; + case 'create_pk': + objectName = statement.pk.table; + break; + + // Foreign key operations + case 'create_fk': + objectName = statement.fk.table; + break; + + // Check constraint operations + case 'create_check': + objectName = statement.check.table; + break; + + // Constraint operations + case 'drop_constraint': + objectName = statement.table; + break; + + // View operations + case 'create_view': + objectName = statement.view.name; + break; + case 'drop_view': + objectName = statement.name; + break; + case 'alter_view': + objectName = statement.view.name; + break; + case 'rename_view': + objectName = statement.from; + break; + + default: + assertUnreachable(statement); + } + + return { action, schema, objectName, columnName }; +} + +export function footprint(statement: JsonStatement, snapshot?: MysqlSnapshot): [string[], string[]] { + const info = extractStatementInfo(statement); + const conflictingTypes = footprintMap[statement.type]; + + const statementFootprint = [formatFootprint(statement.type, info.objectName, info.columnName)]; + + let conflictFootprints = conflictingTypes.map((conflictType) => + formatFootprint(conflictType, info.objectName, info.columnName) + ); + + if (snapshot) { + const expandedFootprints = expandFootprintsFromSnapshot(statement, info, conflictingTypes, snapshot); + conflictFootprints = [...conflictFootprints, ...expandedFootprints]; + } + + return [statementFootprint, conflictFootprints]; +} + +function generateLeafFootprints(statements: JsonStatement[], snapshot?: MysqlSnapshot): { + statementHashes: Array<{ hash: string; statement: JsonStatement }>; + conflictFootprints: Array<{ hash: string; statement: JsonStatement }>; +} { + const statementHashes: Array<{ hash: string; statement: JsonStatement }> = []; + const conflictFootprints: Array<{ hash: string; statement: JsonStatement }> = []; + + for (let i = 0; i < statements.length; i++) { + const statement = statements[i]; + const [hashes, conflicts] = footprint(statement, snapshot); + + for (const hash of hashes) { + statementHashes.push({ hash, statement }); + } + + for (const conflict of conflicts) { + conflictFootprints.push({ hash: conflict, statement }); + } + } + + return { statementHashes, conflictFootprints }; +} + +function expandFootprintsFromSnapshot( + statement: JsonStatement, + info: { action: string; schema: string; objectName: string; columnName: string }, + conflictingTypes: JsonStatement['type'][], + snapshot: MysqlSnapshot, +): string[] { + const expandedFootprints: string[] = []; + + if ( + statement.type === 'drop_table' || statement.type === 'rename_table' + ) { + const childEntities = findChildEntitiesInTableFromSnapshot(info.objectName, snapshot); + for (const entity of childEntities) { + for (const conflictType of conflictingTypes) { + expandedFootprints.push(formatFootprint(conflictType, entity.objectName, entity.columnName)); + } + } + // all indexes in changed tables should make a conflict in this case + // maybe we need to make other fields optional + if (statement.type === 'drop_table') { + expandedFootprints.push(formatFootprint('create_index', statement.table, '')); + } else if (statement.type === 'rename_table') { + expandedFootprints.push(formatFootprint('create_index', statement.to, '')); + } + } + + return expandedFootprints; +} + +function findChildEntitiesInTableFromSnapshot( + tableName: string, + snapshot: MysqlSnapshot, +): Array<{ objectName: string; columnName: string }> { + const entities: Array<{ objectName: string; columnName: string }> = []; + + for (const entity of snapshot.ddl) { + if (entity.entityType === 'columns' && entity.table === tableName) { + entities.push({ objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'indexes' && entity.table === tableName) { + entities.push({ objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'pks' && entity.table === tableName) { + entities.push({ objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'fks' && entity.table === tableName) { + entities.push({ objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'checks' && entity.table === tableName) { + entities.push({ objectName: entity.table, columnName: entity.name }); + } + } + + return entities; +} + +function findFootprintIntersections( + branchAHashes: Array<{ hash: string; statement: JsonStatement }>, + branchAConflicts: Array<{ hash: string; statement: JsonStatement }>, + branchBHashes: Array<{ hash: string; statement: JsonStatement }>, + branchBConflicts: Array<{ hash: string; statement: JsonStatement }>, +) { + // const intersections: { leftStatement: string; rightStatement: string }[] = []; + + for (const hashInfoA of branchAHashes) { + for (const conflictInfoB of branchBConflicts) { + if (hashInfoA.hash === conflictInfoB.hash) { + // Decided to return a first issue. You should run check and fix them until you have 0 + // intersections.push({ leftStatement: hashInfoA.hash, rightStatement: conflictInfoB.hash }); + return { leftStatement: hashInfoA.statement, rightStatement: conflictInfoB.statement }; + } + } + } + + for (const hashInfoB of branchBHashes) { + for (const conflictInfoA of branchAConflicts) { + if (hashInfoB.hash === conflictInfoA.hash) { + // Decided to return a first issue. You should run check and fix them until you have 0 + // intersections.push({ leftStatement: hashInfoB.hash, rightStatement: conflictInfoA.hash }); + return { leftStatement: hashInfoB.statement, rightStatement: conflictInfoA.statement }; + } + } + } + + // return intersections; +} + +export const getReasonsFromStatements = async ( + aStatements: JsonStatement[], + bStatements: JsonStatement[], + snapshotLeft?: MysqlSnapshot, + snapshotRight?: MysqlSnapshot, +) => { + // const parentSnapshot = snapshot ?? drySnapshot; + const branchAFootprints = generateLeafFootprints( + aStatements, + snapshotLeft, + ); + const branchBFootprints = generateLeafFootprints( + bStatements, + snapshotRight, + ); + + return findFootprintIntersections( + branchAFootprints.statementHashes, + branchAFootprints.conflictFootprints, + branchBFootprints.statementHashes, + branchBFootprints.conflictFootprints, + ); +}; + +export const detectNonCommutative = async ( + snapshots: string[], +): Promise => { + const nodes = buildSnapshotGraph(snapshots); + + // Build parent -> children mapping (a child can have multiple parents) + const prevToChildren: Record = {}; + for (const node of Object.values(nodes)) { + for (const parentId of node.prevIds) { + const arr = prevToChildren[parentId] ?? []; + arr.push(node.id); + prevToChildren[parentId] = arr; + } + } + + const conflicts: BranchConflict[] = []; + + for (const [prevId, childIds] of Object.entries(prevToChildren)) { + if (childIds.length <= 1) continue; + + const parentNode = nodes[prevId]; + + const childToLeaves: Record = {}; + for (const childId of childIds) { + childToLeaves[childId] = collectLeaves(nodes, childId); + } + + const leafStatements: Record = {}; + for (const leaves of Object.values(childToLeaves)) { + for (const leafId of leaves) { + const leafNode = nodes[leafId]!; + const parentSnapshot = parentNode ? parentNode.raw : drySnapshot; + const { statements } = await diff(parentSnapshot, leafNode.raw); + leafStatements[leafId] = { statements, path: leafNode.folderPath }; + } + } + + for (let i = 0; i < childIds.length; i++) { + for (let j = i + 1; j < childIds.length; j++) { + const groupA = childToLeaves[childIds[i]] ?? []; + const groupB = childToLeaves[childIds[j]] ?? []; + for (const aId of groupA) { + for (const bId of groupB) { + const aStatements = leafStatements[aId]!.statements; + const bStatements = leafStatements[bId]!.statements; + + const parentSnapshot = parentNode ? parentNode.raw : drySnapshot; + + // function that accepts statements are respond with conflicts + const intersectedHashed = await getReasonsFromStatements(aStatements, bStatements, parentSnapshot); + + if (intersectedHashed) { + // parentId and parentPath is a head of a branched leaves + conflicts.push({ + parentId: prevId, + parentPath: parentNode?.folderPath, + branchA: { headId: aId, path: leafStatements[aId]!.path, statement: intersectedHashed.leftStatement }, + branchB: { headId: bId, path: leafStatements[bId]!.path, statement: intersectedHashed.rightStatement }, + }); + } + } + } + } + } + } + + // Collect all leaf nodes (nodes with no children) + const allNodeIds = new Set(Object.keys(nodes)); + const nodesWithChildren = new Set(Object.values(prevToChildren).flat()); + const leafNodes = Array.from(allNodeIds).filter((id) => !nodesWithChildren.has(id)); + + return { conflicts, leafNodes }; +}; + +function buildSnapshotGraph( + snapshotFiles: string[], +): Record> { + const byId: Record> = {}; + for (const file of snapshotFiles) { + if (!existsSync(file)) continue; + const raw = JSON.parse(readFileSync(file, 'utf8')) as TSnapshot; + const node: SnapshotNode = { + id: raw.id, + prevIds: raw.prevIds, + path: file, + folderPath: dirname(file), + raw, + }; + byId[node.id] = node; + } + return byId; +} + +function collectLeaves( + graph: Record>, + startId: string, +): string[] { + const leaves: string[] = []; + const stack: string[] = [startId]; + const prevToChildren: Record = {}; + + // Build parent -> children mapping (a child can have multiple parents) + for (const node of Object.values(graph)) { + for (const parentId of node.prevIds) { + const arr = prevToChildren[parentId] ?? []; + arr.push(node.id); + prevToChildren[parentId] = arr; + } + } + + while (stack.length) { + const id = stack.pop()!; + const children = prevToChildren[id] ?? []; + if (children.length === 0) { + leaves.push(id); + } else { + for (const c of children) stack.push(c); + } + } + return leaves; +} + +async function diff( + fromSnap: MysqlSnapshot | 'dry', + toSnap: MysqlSnapshot, +): Promise<{ statements: JsonStatement[] }>; +async function diff( + fromSnap: MysqlSnapshot, + toSnap: MysqlSnapshot, +): Promise<{ statements: JsonStatement[] }>; +async function diff(fromSnap: any, toSnap: any): Promise<{ statements: JsonStatement[] }> { + const fromDDL: MysqlDDL = createDDL(); + const toDDL: MysqlDDL = createDDL(); + + if (fromSnap !== 'dry') { + for (const e of fromSnap.ddl) fromDDL.entities.push(e); + } + for (const e of toSnap.ddl) toDDL.entities.push(e); + + const { statements } = await ddlDiffDry(fromDDL, toDDL, 'default'); + return { statements }; +} diff --git a/drizzle-kit/src/dialects/mysql/convertor.ts b/drizzle-kit/src/dialects/mysql/convertor.ts new file mode 100644 index 0000000000..0302f510da --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/convertor.ts @@ -0,0 +1,301 @@ +import type { Simplify } from '../../utils'; +import type { JsonStatement } from './statements'; + +export const convertor = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + convertor: (statement: Simplify>) => string | string[], +) => { + return { + type, + can: (st: JsonStatement) => { + return st.type === type; + }, + convert: convertor, + }; +}; + +const createTable = convertor('create_table', (st) => { + const { name, columns, pk, checks, indexes, fks } = st.table; + + const uniqueIndexes = indexes.filter((it) => it.isUnique); + + let statement = ''; + statement += `CREATE TABLE \`${name}\` (\n`; + + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === column.name; + const primaryKeyStatement = isPK ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull && !isPK ? ' NOT NULL' : ''; + const defaultStatement = column.default !== null ? ` DEFAULT ${column.default}` : ''; + + const onUpdateStatement = column.onUpdateNow + ? ` ON UPDATE CURRENT_TIMESTAMP` + `${column.onUpdateNowFsp ? '(' + column.onUpdateNowFsp + ')' : ''}` + : ''; + + const autoincrementStatement = column.autoIncrement && column.type !== 'serial' + ? ' AUTO_INCREMENT' + : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` + : ''; + + const charSetStatement = column.charSet ? ` CHARACTER SET ${column.charSet}` : ''; + const collationStatement = column.collation ? ` COLLATE ${column.collation}` : ''; + + statement += '\t' + + `\`${column.name}\` ${column.type}${charSetStatement}${collationStatement}${autoincrementStatement}${primaryKeyStatement}${generatedStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (pk && (pk.columns.length > 1)) { + statement += ',\n'; + statement += `\tCONSTRAINT \`${pk.name}\` PRIMARY KEY(\`${pk.columns.join(`\`,\``)}\`)`; + } + + for (const unique of uniqueIndexes) { + statement += ',\n'; + const uniqueString = unique.columns + .map((it) => it.isExpression ? `${it.value}` : `\`${it.value}\``) + .join(','); + + statement += `\tCONSTRAINT \`${unique.name}\` UNIQUE INDEX(${uniqueString})`; + } + + // TODO remove from create_table + for (const fk of fks) { + statement += ',\n'; + statement += `\tCONSTRAINT \`${fk.name}\` FOREIGN KEY (\`${ + fk.columns.join('`,`') + }\`) REFERENCES \`${fk.tableTo}\`(\`${fk.columnsTo.join('`,`')}\`)`; + } + + for (const check of checks) { + statement += ',\n'; + statement += `\tCONSTRAINT \`${check.name}\` CHECK(${check.value})`; + } + + statement += `\n);`; + statement += `\n`; + return statement; +}); + +const dropTable = convertor('drop_table', (st) => { + return `DROP TABLE \`${st.table}\`;`; +}); + +const renameTable = convertor('rename_table', (st) => { + return `RENAME TABLE \`${st.from}\` TO \`${st.to}\`;`; +}); + +const addColumn = convertor('add_column', (st) => { + const { column, isPK } = st; + const { + name, + type, + notNull, + table, + onUpdateNow, + autoIncrement, + generated, + onUpdateNowFsp, + } = column; + + const defaultStatement = column.default !== null ? ` DEFAULT ${column.default}` : ''; + + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${isPK ? ' PRIMARY KEY' : ''}`; + const autoincrementStatement = `${autoIncrement ? ' AUTO_INCREMENT' : ''}`; + const onUpdateStatement = `${ + onUpdateNow ? ' ON UPDATE CURRENT_TIMESTAMP' + `${onUpdateNowFsp ? '(' + onUpdateNowFsp + ')' : ''}` : '' + }`; + + const generatedStatement = generated + ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` + : ''; + + const charSetStatement = column.charSet ? ` CHARACTER SET ${column.charSet}` : ''; + const collationStatement = column.collation ? ` COLLATE ${column.collation}` : ''; + + return `ALTER TABLE \`${table}\` ADD \`${name}\` ${type}${charSetStatement}${collationStatement}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}${onUpdateStatement};`; +}); + +const dropColumn = convertor('drop_column', (st) => { + return `ALTER TABLE \`${st.column.table}\` DROP COLUMN \`${st.column.name}\`;`; +}); + +const renameColumn = convertor('rename_column', (st) => { + return `ALTER TABLE \`${st.table}\` RENAME COLUMN \`${st.from}\` TO \`${st.to}\`;`; +}); + +const alterColumn = convertor('alter_column', (st) => { + const { column, isPK, wasPK } = st; + + const defaultStatement = column.default !== null ? ` DEFAULT ${column.default}` : ''; + + const notNullStatement = `${column.notNull ? ' NOT NULL' : ''}`; + const primaryKeyStatement = `${isPK && !wasPK ? ' PRIMARY KEY' : ''}`; + const autoincrementStatement = `${column.autoIncrement ? ' AUTO_INCREMENT' : ''}`; + const onUpdateStatement = `${ + column.onUpdateNow + ? ' ON UPDATE CURRENT_TIMESTAMP' + `${column.onUpdateNowFsp ? '(' + column.onUpdateNowFsp + ')' : ''}` + : '' + }`; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS (${column.generated.as}) ${column.generated.type.toUpperCase()}` + : ''; + + const charSetStatement = column.charSet ? ` CHARACTER SET ${column.charSet}` : ''; + const collationStatement = column.collation ? ` COLLATE ${column.collation}` : ''; + + return `ALTER TABLE \`${column.table}\` MODIFY COLUMN \`${column.name}\` ${column.type}${charSetStatement}${collationStatement}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}${onUpdateStatement};`; +}); + +const recreateColumn = convertor('recreate_column', (st) => { + return [dropColumn.convert(st) as string, addColumn.convert(st) as string]; +}); + +const createIndex = convertor('create_index', (st) => { + // TODO: handle everything? + const { name, table, columns, isUnique } = st.index; + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + + const uniqueString = columns + .map((it) => it.isExpression ? `${it.value}` : `\`${it.value}\``) + .join(','); + + return `CREATE ${indexPart} \`${name}\` ON \`${table}\` (${uniqueString});`; +}); + +const dropIndex = convertor('drop_index', (st) => { + return `DROP INDEX \`${st.index.name}\` ON \`${st.index.table}\`;`; +}); + +const createFK = convertor('create_fk', (st) => { + const { + name, + table, + columns, + tableTo, + columnsTo, + onDelete, + onUpdate, + } = st.fk; + const onDeleteStatement = onDelete !== 'NO ACTION' ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate !== 'NO ACTION' ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columns.map((it) => `\`${it}\``).join(','); + const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); + + return `ALTER TABLE \`${table}\` ADD CONSTRAINT \`${name}\` FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; +}); + +{ + // alter generated for column -> recreate +} + +const createPK = convertor('create_pk', (st) => { + return `ALTER TABLE \`${st.pk.table}\` ADD PRIMARY KEY (\`${st.pk.columns.join('`,`')}\`);`; +}); + +const dropPK = convertor('drop_pk', (st) => { + return `ALTER TABLE \`${st.pk.table}\` DROP PRIMARY KEY;`; +}); + +const createCheck = convertor('create_check', (st) => { + return `ALTER TABLE \`${st.check.table}\` ADD CONSTRAINT \`${st.check.name}\` CHECK (${st.check.value});`; +}); + +const dropConstraint = convertor('drop_constraint', (st) => { + const statements = [`ALTER TABLE \`${st.table}\` DROP CONSTRAINT \`${st.constraint}\`;`]; + if (st.dropAutoIndex) statements.push(`DROP INDEX \`${st.constraint}\` ON \`${st.table}\``); + return statements; +}); + +const createView = convertor('create_view', (st) => { + const { definition, name, algorithm, sqlSecurity, withCheckOption } = st.view; + + let statement = `CREATE `; + statement += st.replace ? `OR REPLACE ` : ''; // NO replace was in the code + statement += algorithm ? `ALGORITHM = ${algorithm} ` : ''; + statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity} ` : ''; + statement += `VIEW \`${name}\` AS (${definition})`; + statement += withCheckOption ? ` WITH ${withCheckOption} CHECK OPTION` : ''; + + statement += ';'; + + return statement; +}); + +const dropView = convertor('drop_view', (st) => { + return `DROP VIEW \`${st.name}\`;`; +}); + +const renameView = convertor('rename_view', (st) => { + return `RENAME TABLE \`${st.from}\` TO \`${st.to}\`;`; +}); + +const alterView = convertor('alter_view', (st) => { + const { name, definition, withCheckOption, algorithm, sqlSecurity } = st.view; + + let statement = `ALTER `; + statement += `ALGORITHM = ${algorithm} `; + statement += `SQL SECURITY ${sqlSecurity} `; + statement += `VIEW \`${name}\` AS ${definition}`; + statement += withCheckOption ? ` WITH ${withCheckOption} CHECK OPTION` : ''; + statement += ';'; + + return statement; +}); + +const convertors = [ + createTable, + dropTable, + renameTable, + addColumn, + dropColumn, + renameColumn, + alterColumn, + recreateColumn, + createIndex, + dropIndex, + createFK, + createPK, + dropPK, + createCheck, + dropConstraint, + createView, + dropView, + renameView, + alterView, +]; + +export function fromJson( + statements: JsonStatement[], +) { + const grouped = statements + .map((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + if (!convertor) throw new Error(`No convertor for: ${statement.type} statement`); + + const sqlStatements = convertor.convert(statement as any); + const statements = typeof sqlStatements === 'string' ? [sqlStatements] : sqlStatements; + return { jsonStatement: statement, sqlStatements: statements }; + }) + .filter((it) => it !== null); + + const result = { + sqlStatements: grouped.map((it) => it.sqlStatements).flat(), + groupedStatements: grouped, + }; + return result; +} diff --git a/drizzle-kit/src/dialects/mysql/ddl.ts b/drizzle-kit/src/dialects/mysql/ddl.ts new file mode 100644 index 0000000000..f8588355c1 --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/ddl.ts @@ -0,0 +1,309 @@ +import type { SchemaForPull } from 'src/cli/commands/pull-common'; +import { create } from '../dialect'; +import { nameForUnique } from './grammar'; + +export const createDDL = () => { + return create({ + tables: {}, + columns: { + table: 'required', + type: 'string', + notNull: 'boolean', + autoIncrement: 'boolean', + default: 'string?', + onUpdateNow: 'boolean', + onUpdateNowFsp: 'number?', + charSet: 'string?', + collation: 'string?', + generated: { + type: ['stored', 'virtual'], + as: 'string', + }, + }, + pks: { + table: 'required', + columns: 'string[]', + }, + fks: { + table: 'required', + columns: 'string[]', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + onDelete: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + nameExplicit: 'boolean', + }, + indexes: { + table: 'required', + columns: [{ + value: 'string', + isExpression: 'boolean', + }], + isUnique: 'boolean', + using: ['btree', 'hash', null], + algorithm: ['default', 'inplace', 'copy', null], + lock: ['default', 'none', 'shared', 'exclusive', null], + nameExplicit: 'boolean', // needed because uniques name can be not specified + }, + checks: { + table: 'required', + value: 'string', + }, + views: { + definition: 'string', + algorithm: ['undefined', 'merge', 'temptable'], + sqlSecurity: ['definer', 'invoker'], + withCheckOption: ['local', 'cascaded', null], + }, + }); +}; + +export type MysqlDDL = ReturnType; + +export type MysqlEntities = MysqlDDL['_']['types']; +export type MysqlEntity = MysqlEntities[keyof MysqlEntities]; +export type DiffEntities = MysqlDDL['_']['diffs']['alter']; + +export type Table = MysqlEntities['tables']; +export type Column = MysqlEntities['columns']; +export type Index = MysqlEntities['indexes']; +export type ForeignKey = MysqlEntities['fks']; +export type PrimaryKey = MysqlEntities['pks']; +export type CheckConstraint = MysqlEntities['checks']; +export type View = MysqlEntities['views']; + +export type InterimColumn = Column & { isPK: boolean; isUnique: boolean; uniqueName: string | null }; +export type ViewColumn = { + view: string; + name: string; + type: string; + notNull: boolean; +}; + +export type InterimSchema = { + tables: Table[]; + columns: InterimColumn[]; + pks: PrimaryKey[]; + fks: ForeignKey[]; + indexes: Index[]; + checks: CheckConstraint[]; + views: View[]; + viewColumns: ViewColumn[]; +}; + +export type TableFull = { + name: string; + columns: Column[]; + pk: PrimaryKey | null; + fks: ForeignKey[]; + checks: CheckConstraint[]; + indexes: Index[]; +}; + +export const fullTableFromDDL = (table: Table, ddl: MysqlDDL): TableFull => { + const filter = { table: table.name }; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + return { + name: table.name, + columns, + pk, + fks, + checks, + indexes, + }; +}; + +export type SchemaError = { + type: 'table_name_conflict'; + name: string; +} | { + type: 'column_name_conflict'; + table: string; + name: string; +} | { + type: 'column_unsupported_unique'; + table: string; + columns: string[]; +} | { + type: 'column_unsupported_default_on_autoincrement'; + table: string; + column: string; +}; + +export const interimToDDL = (interim: InterimSchema): { ddl: MysqlDDL; errors: SchemaError[] } => { + const errors = [] as SchemaError[]; + const ddl = createDDL(); + const resrtictedUniqueFor = [ + 'blob', + 'tinyblob', + 'mediumblob', + 'longblob', + 'text', + 'tinytext', + 'mediumtext', + 'longtext', + ]; + + for (const table of interim.tables) { + const res = ddl.tables.push(table); + if (res.status === 'CONFLICT') { + errors.push({ type: 'table_name_conflict', name: table.name }); + } + } + + for (const column of interim.columns) { + const { isPK: _1, isUnique: _2, uniqueName: _3, ...rest } = column; + const res = ddl.columns.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ type: 'column_name_conflict', table: column.table, name: column.name }); + } + + if ((column.type.startsWith('serial') || column.autoIncrement) && column.default !== null) { + errors.push({ type: 'column_unsupported_default_on_autoincrement', table: column.table, column: column.name }); + } + } + + for (const pk of interim.pks) { + const res = ddl.pks.push({ table: pk.table, name: 'PRIMARY', columns: pk.columns }); + if (res.status === 'CONFLICT') { + throw new Error(`PK conflict: ${JSON.stringify(pk)}`); + } + } + + for (const column of interim.columns.filter((it) => it.isPK)) { + const exists = ddl.pks.one({ + table: column.table, + name: 'PRIMARY', // database default + }) !== null; + if (exists) continue; + + ddl.pks.push({ + table: column.table, + name: 'PRIMARY', // database default + columns: [column.name], + }); + } + + for (const column of interim.columns.filter((it) => it.isUnique)) { + if (resrtictedUniqueFor.some((rc) => column.type.startsWith(rc))) { + errors.push({ type: 'column_unsupported_unique', columns: [column.name], table: column.table }); + } + + const name = column.uniqueName ?? nameForUnique(column.table, [column.name]); + const res = ddl.indexes.push({ + table: column.table, + name, + columns: [{ value: column.name, isExpression: false }], + isUnique: true, + using: null, + algorithm: null, + lock: null, + nameExplicit: !!column.uniqueName, + }); + + if (res.status === 'CONFLICT') { + throw new Error(`Index unique conflict: ${name}`); + } + } + + for (const index of interim.indexes) { + const res = ddl.indexes.push(index); + if (res.status === 'CONFLICT') { + throw new Error(`Index conflict: ${JSON.stringify(index)}`); + } + } + for (const index of interim.indexes.filter((i) => i.isUnique)) { + const conflictColumns = index.columns.filter((col) => { + if (col.isExpression) return false; + + const column = ddl.columns.one({ table: index.table, name: col.value }); + + return resrtictedUniqueFor.some( + (restrictedType) => column?.type.startsWith(restrictedType), + ); + }); + + if (conflictColumns.length > 0) { + errors.push({ + type: 'column_unsupported_unique', + columns: conflictColumns.map((it) => it.value), + table: index.table, + }); + } + } + + for (const fk of interim.fks) { + const res = ddl.fks.push(fk); + if (res.status === 'CONFLICT') { + throw new Error(`FK conflict: ${JSON.stringify(fk)}`); + } + } + + for (const check of interim.checks) { + const res = ddl.checks.push(check); + if (res.status === 'CONFLICT') { + throw new Error(`Check constraint conflict: ${JSON.stringify(check)}`); + } + } + + for (const view of interim.views) { + const res = ddl.views.push(view); + if (res.status === 'CONFLICT') { + throw new Error(`View conflict: ${JSON.stringify(view)}`); + } + } + + // TODO: add to other dialects, though potentially we should check on push + for (const it of ddl.entities.list()) { + let err = false; + + if (!ddl.entities.validate(it)) { + console.log('invalid entity:', it); + err = true; + } + if (err) throw new Error(); + } + + return { ddl, errors }; +}; + +export const tableFromDDL = ( + table: MysqlEntities['tables'], + ddl: MysqlDDL, +) => { + const filter = { table: table.name } as const; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + + return { + ...table, + columns, + pk, + fks, + checks, + indexes, + }; +}; + +export function mysqlToRelationsPull(schema: MysqlDDL): SchemaForPull { + return Object.values(schema.tables.list()).map((table) => { + const rawTable = tableFromDDL(table, schema); + return { + foreignKeys: rawTable.fks, + uniques: Object.values(rawTable.indexes).map((idx) => ({ + columns: idx.columns.map((idxc) => { + if (!idxc.isExpression && idx.isUnique) { + return idxc.value; + } + }).filter((item) => item !== undefined), + })), + }; + }); +} diff --git a/drizzle-kit/src/dialects/mysql/diff.ts b/drizzle-kit/src/dialects/mysql/diff.ts new file mode 100644 index 0000000000..7706b37f5a --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/diff.ts @@ -0,0 +1,505 @@ +import { trimChar } from '../../utils'; +import { mockResolver } from '../../utils/mocks'; +import type { Resolver } from '../common'; +import { diff } from '../dialect'; +import { groupDiffs, preserveEntityNames } from '../utils'; +import { fromJson } from './convertor'; +import type { Column, DiffEntities, Index, MysqlDDL, Table, View } from './ddl'; +import { fullTableFromDDL } from './ddl'; +import { charSetAndCollationCommutative, commutative, defaultNameForFK } from './grammar'; +import { prepareStatement } from './statements'; +import type { JsonStatement } from './statements'; + +export const ddlDiffDry = async (from: MysqlDDL, to: MysqlDDL, mode: 'default' | 'push' = 'default') => { + const s = new Set(); + return ddlDiff(from, to, mockResolver(s), mockResolver(s), mockResolver(s), mode); +}; + +export const ddlDiff = async ( + ddl1: MysqlDDL, + ddl2: MysqlDDL, + tablesResolver: Resolver
, + columnsResolver: Resolver, + viewsResolver: Resolver, + mode: 'default' | 'push', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { jsonStatement: JsonStatement; sqlStatements: string[] }[]; + renames: string[]; +}> => { + const tablesDiff = diff(ddl1, ddl2, 'tables'); + + const { + created: createdTables, + deleted: deletedTables, + renamedOrMoved: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.filter((it) => it.$diffType === 'create'), + deleted: tablesDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const renamed of renamedTables) { + ddl1.tables.update({ + set: { + name: renamed.to.name, + }, + where: { + name: renamed.from.name, + }, + }); + + const selfRefs = ddl1.fks.update({ + set: { + table: renamed.to.name, + tableTo: renamed.to.name, + }, + where: { + table: renamed.from.name, + tableTo: renamed.from.name, + }, + }); + + const froms = ddl1.fks.update({ + set: { + table: renamed.to.name, + }, + where: { + table: renamed.from.name, + }, + }); + + const tos = ddl1.fks.update({ + set: { + tableTo: renamed.to.name, + }, + where: { + tableTo: renamed.from.name, + }, + }); + + // preserve name for foreign keys + const renamedFKs = [...selfRefs.data, ...froms.data, ...tos.data]; + for (const fk of renamedFKs) { + const name = defaultNameForFK(fk); + ddl2.fks.update({ + set: { + name: fk.name, + }, + where: { + name: name, + }, + }); + } + + ddl1.entities.update({ + set: { + table: renamed.to.name, + }, + where: { + table: renamed.from.name, + }, + }); + } + + const columnsDiff = diff(ddl1, ddl2, 'columns').filter((it) => + !createdTables.some((table) => table.name === it.table) + ); // filter out columns for newly created tables + + const groupedByTable = groupDiffs(columnsDiff); + + const columnRenames = [] as { from: Column; to: Column }[]; + const columnCreates = [] as Column[]; + const columnDeletes = [] as Column[]; + + for (let it of groupedByTable) { + const { renamedOrMoved: renamed, created, deleted } = await columnsResolver({ + deleted: it.deleted, + created: it.inserted, + }); + + columnCreates.push(...created); + columnDeletes.push(...deleted); + columnRenames.push(...renamed); + } + + for (const rename of columnRenames) { + ddl1.columns.update({ + set: { + name: rename.to.name, + }, + where: { + table: rename.from.table, + name: rename.from.name, + }, + }); + + // DDL2 updates are needed for Drizzle Studio + const update1 = { + set: { + columns: (it: Index['columns'][number]) => { + if (!it.isExpression && it.value === rename.from.name) { + it.value = rename.to.name; + } + return it; + }, + }, + where: { + table: rename.from.table, + }, + } as const; + + ddl1.indexes.update(update1); + ddl2.indexes.update(update1); + + const update2 = { + set: { + columns: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + table: rename.from.table, + }, + } as const; + ddl1.fks.update(update2); + ddl2.fks.update(update2); + + const update3 = { + set: { + columnsTo: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + tableTo: rename.from.table, + }, + } as const; + ddl1.fks.update(update3); + ddl2.fks.update(update3); + + const update4 = { + set: { + columns: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + table: rename.from.table, + }, + }; + ddl1.pks.update(update4); + ddl2.pks.update(update4); + } + + preserveEntityNames(ddl1.fks, ddl2.fks, mode); + preserveEntityNames(ddl1.indexes, ddl2.indexes, mode); + + const viewsDiff = diff(ddl1, ddl2, 'views'); + + const { + created: createdViews, + deleted: deletedViews, + renamedOrMoved: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.filter((it) => it.$diffType === 'create'), + deleted: viewsDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const rename of renamedViews) { + ddl1.views.update({ + set: { + name: rename.to.name, + }, + where: { + name: rename.from.name, + }, + }); + } + + const checksDiff = diff(ddl1, ddl2, 'checks'); + const indexesDiff = diff(ddl1, ddl2, 'indexes'); + const fksDiff = diff(ddl1, ddl2, 'fks'); + const pksDiff = diff(ddl1, ddl2, 'pks'); + + const alters = diff.alters(ddl1, ddl2); + + const createTableStatements = createdTables.map((it) => { + const full = fullTableFromDDL(it, ddl2); + if (createdTables.length > 1) full.fks = []; // fks have to be created after all tables created + return prepareStatement('create_table', { table: full }); + }); + + const dropTableStatements = deletedTables.map((it) => { + return prepareStatement('drop_table', { table: it.name }); + }); + + const renameTableStatements = renamedTables.map((it) => { + return prepareStatement('rename_table', { from: it.from.name, to: it.to.name }); + }); + + const renameColumnsStatement = columnRenames.map((it) => { + return prepareStatement('rename_column', { + table: it.to.table, + from: it.from.name, + to: it.to.name, + }); + }); + + const createViewStatements = createdViews.map((it) => prepareStatement('create_view', { view: it, replace: false })); + + const dropViewStatements = deletedViews.map((it) => { + return prepareStatement('drop_view', { name: it.name }); + }); + + const renameViewStatements = renamedViews.map((it) => { + return prepareStatement('rename_view', { + from: it.from.name, + to: it.to.name, + }); + }); + + const alterViewStatements = alters.filter((it) => it.entityType === 'views') + .map((it) => { + // TODO: We should probably print a CLI hint for the user too + if (it.definition && mode === 'push') delete it.definition; + + /* + UNDEFINED lets the server pick at execution time (often it still runs as a merge if the query is “mergeable”). + Specifying MERGE when it’s not possible causes MySQL to store UNDEFINED with a warning, + but the reverse (forcing UNDEFINED to overwrite MERGE) doesn’t happen via ALTER. + + https://dev.mysql.com/doc/refman/8.4/en/view-algorithms.html + + TODO: We should probably print a hint in CLI for the user + */ + if (it.algorithm && it.algorithm.to === 'undefined') delete it.algorithm; + return it; + }) + .filter((it) => ddl2.views.hasDiff(it)) + .map((it) => { + const view = ddl2.views.one({ name: it.name })!; + if (it.definition) return prepareStatement('create_view', { view, replace: true }); + return prepareStatement('alter_view', { diff: it, view }); + }); + + const dropCheckStatements = checksDiff.filter((it) => it.$diffType === 'drop') + .filter((it) => !deletedTables.some((x) => x.name === it.table)) + .map((it) => prepareStatement('drop_constraint', { constraint: it.name, table: it.table, dropAutoIndex: false })); + + const dropIndexeStatements = indexesDiff.filter((it) => it.$diffType === 'drop').filter((it) => + !deletedTables.some((x) => x.name === it.table) + ).map((it) => prepareStatement('drop_index', { index: it })); + + const dropFKStatements = fksDiff.filter((it) => it.$diffType === 'drop') + .filter((it) => { + const tableDeteled = deletedTables.some((x) => x.name === it.table); + const tableToDeleted = deletedTables.some((x) => x.name === it.tableTo); + return !(tableDeteled && !tableToDeleted); + }) + .map((it) => { + let dropAutoIndex = ddl2.indexes.one({ table: it.table, name: it.name }) === null; + dropAutoIndex &&= !deletedTables.some((x) => x.name === it.table); + return prepareStatement('drop_constraint', { table: it.table, constraint: it.name, dropAutoIndex }); + }); + + const dropPKStatements = pksDiff.filter((it) => it.$diffType === 'drop') + .filter((it) => !deletedTables.some((x) => x.name === it.table)) + /* + we can't do `create table a(id int auto_increment);` + but we can do `ALTER TABLE `table1` MODIFY COLUMN `column1` int AUTO_INCREMENT` + and database implicitly makes column a Primary Key + */ + .filter((it) => { + if (it.columns.length === 1 && ddl2.columns.one({ table: it.table, name: it.columns[0] })?.autoIncrement) { + return false; + } + return true; + }) + .map((it) => prepareStatement('drop_pk', { pk: it })); + + const createCheckStatements = checksDiff.filter((it) => it.$diffType === 'create') + .filter((it) => !createdTables.some((x) => x.name === it.table)) + .map((it) => prepareStatement('create_check', { check: it })); + + const createIndexesStatements = indexesDiff.filter((it) => it.$diffType === 'create') + .filter((it) => !it.isUnique || !createdTables.some((x) => x.name === it.table)) + .map((it) => prepareStatement('create_index', { index: it })); + + const createFKsStatements = fksDiff.filter((it) => it.$diffType === 'create') + .filter((x) => createdTables.length >= 2 || !createdTables.some((it) => it.name === x.table)) + .map((it) => prepareStatement('create_fk', { fk: it })); + + const createPKStatements = pksDiff.filter((it) => it.$diffType === 'create') + .filter((it) => !createdTables.some((x) => x.name === it.table)) + .map((it) => prepareStatement('create_pk', { pk: it })); + + const addColumnsStatemets = columnCreates.filter((it) => it.entityType === 'columns').map((it) => { + const pk = ddl2.pks.one({ table: it.table }); + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === it.name; + return prepareStatement('add_column', { column: it, isPK: isPK ?? false }); + }); + + const dropColumnStatements = columnDeletes + .filter((it) => !deletedTables.some((x) => x.name === it.table)) + .filter((it) => it.entityType === 'columns').map((it) => { + return prepareStatement('drop_column', { column: it }); + }); + + const alterColumnPredicate: (it: DiffEntities['columns']) => boolean = (it) => { + if (it.generated) { + if (it.generated.from && it.generated.to) return false; + if (it.generated.from && it.generated.from.type === 'virtual') return false; + if (it.generated.to && it.generated.to.type === 'virtual') return false; + } + return true; + }; + + const columnAlterStatements = alters.filter((it) => it.entityType === 'columns') + .filter((it) => { + if (it.type && commutative(it.type.from, it.type.to, mode)) { + delete it.type; + } + + if (it.default && it.default.from && it.default.to && commutative(it.default.from, it.default.to, mode)) { + delete it.default; + } + + if (it.autoIncrement && it.autoIncrement.to && it.$right.type === 'serial') delete it.autoIncrement; + if (it.notNull && it.notNull.from && (it.$right.type === 'serial' || it.$right.autoIncrement)) delete it.notNull; + + if (it.default) { + let deleteDefault = false; + deleteDefault ||= it.default.from === it.default.to; + deleteDefault ||= it.default.from === `(${it.default.to})`; + deleteDefault ||= it.default.to === `(${it.default.from})`; + + // varbinary + deleteDefault ||= it.default.from === `(${it.default.to?.toLowerCase()})`; + deleteDefault ||= it.default.to === `(${it.default.from?.toLowerCase()})`; + + if (deleteDefault) { + delete it.default; + } + } + + if ( + mode === 'push' && it.generated && it.generated.from && it.generated.to + && it.generated.from.as !== it.generated.to.as + ) { + delete it.generated; + } + + if ( + it.notNull + ) { + const isPk = !!ddl2.pks.one({ table: it.table, columns: { CONTAINS: it.name } }); + const wasPk = !!ddl1.pks.one({ table: it.table, columns: { CONTAINS: it.name } }); + + // only if column is no longer pk, but new declaration is not not null, we need to set column not null + if (!isPk && wasPk) {} + else if (isPk || wasPk) delete it.notNull; // if there's a change in notnull but column is a part of a pk - we don't care + } + + if ( + mode === 'push' && (it.charSet || it.collation) + && charSetAndCollationCommutative( + { charSet: it.$left.charSet ?? null, collation: it.$left.collation ?? null }, + { charSet: it.$right.charSet ?? null, collation: it.$right.collation ?? null }, + ) + ) { + delete it.charSet; + delete it.collation; + } + + if ( + mode === 'push' && !it.type && it.default && it.default.from && it.default.to + && (it.$right.type === 'datetime' || it.$right.type === 'timestamp') + ) { + const c1 = Date.parse(trimChar(it.default.from, "'")); + const c2 = Date.parse(trimChar(it.default.to, "'")); + if (c1 === c2) delete it.default; + } + + return ddl2.columns.hasDiff(it) && alterColumnPredicate(it); + }).map((it) => { + // const { $diffType: _1, $left: _2, $right: _3, entityType: _4, table: _5, ...rest } = it; + const isPK = !!ddl2.pks.one({ table: it.table, columns: [it.name] }); + const wasPK = !!ddl1.pks.one({ table: it.table, columns: [it.name] }); + + const potentialTableRename = renamedTables.find((x) => x.to.name === it.$left.table); + const originTableName = potentialTableRename?.from.name ?? it.$left.table; + + const potentialRename = columnRenames.find((x) => x.from.table === it.$left.table && x.to.name === it.$left.name); + const originColumnName = potentialRename?.from.name ?? it.$left.name; + + return prepareStatement('alter_column', { + diff: it, + column: it.$right, + isPK: isPK, + wasPK, + origin: { + table: originTableName, + column: originColumnName, + }, + }); + }); + + const columnRecreateStatatements = alters.filter((it) => it.entityType === 'columns').filter((it) => + !alterColumnPredicate(it) + ).map((it) => { + const column = ddl2.columns.one({ name: it.name, table: it.table })!; + const pk = ddl2.pks.one({ table: it.table }); + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === column.name; + return prepareStatement('recreate_column', { column, isPK: isPK ?? false, diff: it }); + }); + + for (const pk of alters.filter((x) => x.entityType === 'pks')) { + if (pk.columns) { + dropPKStatements.push({ type: 'drop_pk', pk: pk.$left }); + createPKStatements.push({ type: 'create_pk', pk: pk.$right }); + } + } + + for (const fk of alters.filter((x) => x.entityType === 'fks')) { + if (fk.onDelete || fk.onUpdate) { + const dropAutoIndex = false; + dropFKStatements.push({ type: 'drop_constraint', table: fk.table, constraint: fk.name, dropAutoIndex }); + createFKsStatements.push({ type: 'create_fk', fk: fk.$right }); + } + } + + const statements = [ + ...createTableStatements, + ...dropFKStatements, + ...dropTableStatements, + ...renameTableStatements, + + ...renameColumnsStatement, + + ...dropViewStatements, + ...renameViewStatements, + ...alterViewStatements, + + ...dropCheckStatements, + + ...dropIndexeStatements, + ...dropPKStatements, + + ...columnAlterStatements, + ...columnRecreateStatatements, + + ...addColumnsStatemets, + ...createPKStatements, + + ...createIndexesStatements, + ...createFKsStatements, + ...createCheckStatements, + + ...dropColumnStatements, + ...createViewStatements, + ]; + + const res = fromJson(statements); + + return { + statements: statements, + sqlStatements: res.sqlStatements, + groupedStatements: res.groupedStatements, + renames: [], + }; +}; diff --git a/drizzle-kit/src/dialects/mysql/drizzle.ts b/drizzle-kit/src/dialects/mysql/drizzle.ts new file mode 100644 index 0000000000..e8bb7179da --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/drizzle.ts @@ -0,0 +1,333 @@ +import type { Casing } from 'drizzle-orm'; +import { getTableName, is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { AnyMySqlColumn, AnyMySqlTable } from 'drizzle-orm/mysql-core'; +import { + getTableConfig, + getViewConfig, + MySqlChar, + MySqlColumn, + MySqlCustomColumn, + MySqlDialect, + MySqlEnumColumn, + MySqlTable, + MySqlText, + MySqlTimestamp, + MySqlVarChar, + MySqlView, +} from 'drizzle-orm/mysql-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { safeRegister } from '../../utils/utils-node'; +import { getColumnCasing, sqlToStr } from '../drizzle'; +import type { Column, InterimSchema } from './ddl'; +import { defaultNameForFK, nameForUnique, typeFor } from './grammar'; + +export const defaultFromColumn = ( + column: AnyMySqlColumn, + casing?: Casing, +): Column['default'] => { + if (typeof column.default === 'undefined') return null; + let value = column.default; + + if (is(column.default, SQL)) { + let str = sqlToStr(column.default, casing); + // we need to wrap unknown statements in () otherwise there's not enough info in Type.toSQL + if (!str.startsWith('(')) return `(${str})`; + return str; + } + + if (is(column, MySqlCustomColumn)) { + const res = column.mapToDriverValue(column.default); + if (typeof res === 'string') value = res; + value = String(res); + } + + const grammarType = typeFor(column.getSQLType().toLowerCase()); + if (grammarType) return grammarType.defaultFromDrizzle(value); + + throw new Error(`unexpected default: ${column.getSQLType().toLowerCase()} ${column.default}`); +}; + +export const upper = (value: T | undefined): Uppercase | null => { + if (!value) return null; + return value.toUpperCase() as Uppercase; +}; + +export const fromDrizzleSchema = ( + tables: AnyMySqlTable[], + views: MySqlView[], + casing: CasingType | undefined, +): InterimSchema => { + const dialect = new MySqlDialect({ casing }); + const result: InterimSchema = { + tables: [], + columns: [], + pks: [], + fks: [], + indexes: [], + checks: [], + views: [], + viewColumns: [], + }; + + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + foreignKeys, + schema, + checks, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + + if (schema) continue; + + result.tables.push({ + entityType: 'tables', + name: tableName, + }); + + for (const column of columns) { + const name = getColumnCasing(column, casing); + const notNull: boolean = column.notNull; + + const sqlType = column.getSQLType().replace(', ', ','); // TODO: remove, should be redundant real(6, 3)->real(6,3) + + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated: Column['generated'] = column.generated + ? { + as: is(column.generated.as, SQL) + ? dialect.sqlToQuery(column.generated.as as SQL).sql + : typeof column.generated.as === 'function' + ? dialect.sqlToQuery(column.generated.as() as SQL).sql + : (column.generated.as as any), + type: column.generated.mode === 'virtual' ? 'virtual' : 'stored', + } + : null; + + const defaultValue = defaultFromColumn(column, casing); + const type = is(column, MySqlEnumColumn) + ? `enum(${column.enumValues?.map((it) => `'${it.replaceAll("'", "''")}'`).join(',')})` + : sqlType; + + let onUpdateNow: boolean = false; + let onUpdateNowFsp: number | null = null; + if (is(column, MySqlTimestamp)) { + onUpdateNow = column.hasOnUpdateNow ?? false; // TODO + onUpdateNowFsp = column.onUpdateNowFsp ?? null; + } + + let charSet: string | null = null; + let collation: string | null = null; + if (is(column, MySqlChar) || is(column, MySqlVarChar) || is(column, MySqlText) || is(column, MySqlEnumColumn)) { + charSet = column.charSet; + collation = column.collation ?? null; + } + + result.columns.push({ + entityType: 'columns', + table: tableName, + name, + type, + notNull, + autoIncrement, + onUpdateNow, + onUpdateNowFsp, + charSet, + collation, + generated, + isPK: column.primary, + isUnique: column.isUnique, + uniqueName: column.uniqueName ?? null, + default: defaultValue, + }); + } + + for (const pk of primaryKeys) { + const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); + + result.pks.push({ + entityType: 'pks', + table: tableName, + name: 'PRIMARY', + columns: columnNames, + }); + } + + for (const unique of uniqueConstraints) { + const columns = unique.columns.map((c) => { + if (is(c, SQL)) { + const sql = dialect.sqlToQuery(c).sql; + return { value: sql, isExpression: true }; + } + return { value: getColumnCasing(c, casing), isExpression: false }; + }); + + const name = unique.isNameExplicit + ? unique.name + : nameForUnique(tableName, unique.columns.filter((c) => !is(c, SQL)).map((c) => c.name)); + + result.indexes.push({ + entityType: 'indexes', + table: tableName, + name: name, + columns: columns, + isUnique: true, + algorithm: null, + lock: null, + using: null, + nameExplicit: unique.isNameExplicit, + }); + } + + for (const fk of foreignKeys) { + const reference = fk.reference(); + const referenceFT = reference.foreignTable; + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); + + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + let name = fk.isNameExplicit() + ? fk.getName() + : defaultNameForFK({ table: tableName, columns: columnsFrom, tableTo, columnsTo }); + + result.fks.push({ + entityType: 'fks', + table: tableName, + name, + columns: columnsFrom, + tableTo, + columnsTo, + onUpdate: upper(fk.onUpdate) ?? 'NO ACTION', + onDelete: upper(fk.onDelete) ?? 'NO ACTION', + nameExplicit: fk.isNameExplicit(), + }); + } + + for (const index of indexes) { + const columns = index.config.columns; + const name = index.config.name; + + result.indexes.push({ + entityType: 'indexes', + table: tableName, + name, + columns: columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + return { value: sql, isExpression: true }; + } else { + return { value: `${getColumnCasing(it, casing)}`, isExpression: false }; + } + }), + algorithm: index.config.algorithm ?? null, + lock: index.config.lock ?? null, + isUnique: index.config.unique ?? false, + using: index.config.using ?? null, + nameExplicit: index.isNameExplicit, + }); + } + + for (const check of checks) { + const name = check.name; + const value = check.value; + + result.checks.push({ + entityType: 'checks', + table: tableName, + name, + value: dialect.sqlToQuery(value).sql, + }); + } + } + + for (const view of views) { + const cfg = getViewConfig(view); + const { + isExisting, + name, + query, + selectedFields, + algorithm, + sqlSecurity, + withCheckOption, + } = cfg; + + if (isExisting) continue; + + for (const key in selectedFields) { + if (is(selectedFields[key], MySqlColumn)) { + const column = selectedFields[key]; + const notNull: boolean = column.notNull; + + result.viewColumns.push({ + view: name, + name: column.name, + type: column.getSQLType(), + notNull: notNull, + }); + } + } + + result.views.push({ + entityType: 'views', + name, + definition: query ? dialect.sqlToQuery(query).sql : '', + withCheckOption: withCheckOption ?? null, + algorithm: algorithm ?? 'undefined', // set default values + sqlSecurity: sqlSecurity ?? 'definer', // set default values + }); + } + + return result; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnyMySqlTable[] = []; + const views: MySqlView[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + views.push(...prepared.views); + relations.push(...prepared.relations); + } + }); + return { tables: Array.from(new Set(tables)), views, relations }; +}; + +export const prepareFromExports = (exports: Record) => { + const tables: AnyMySqlTable[] = []; + const views: MySqlView[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, MySqlTable)) { + tables.push(t); + } + + if (is(t, MySqlView)) { + views.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { tables, views, relations }; +}; diff --git a/drizzle-kit/src/dialects/mysql/grammar.ts b/drizzle-kit/src/dialects/mysql/grammar.ts new file mode 100644 index 0000000000..dbd3e099da --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/grammar.ts @@ -0,0 +1,1017 @@ +import { assertUnreachable, trimChar } from '../../utils'; +import { parse, stringify } from '../../utils/when-json-met-bigint'; +import { hash } from '../common'; +import { escapeForSqlDefault, escapeForTsLiteral, parseParams, unescapeFromSqlDefault } from '../utils'; +import type { Column, ForeignKey } from './ddl'; +import type { Import } from './typescript'; + +/* + TODO: revise handling of float/double in both orm and kit + in orm we can limit 0-23 precision for float and 24-53 in float/double types + in kit we can trim default values based on scale param with .toFixed(scale ?? defaultScale) + + MySQL also supports this optional precision specification, + but the precision value in FLOAT(p) is used only to determine storage size. + A precision from 0 to 23 results in a 4-byte single-precision FLOAT column. + A precision from 24 to 53 results in an 8-byte double-precision DOUBLE column. + + MySQL performs rounding when storing values, so if you insert 999.00009 into a FLOAT(7,4) column, the approximate result is 999.0001. +*/ + +/* + TODO: + Drizzle ORM allows real/double({ precision: 6 }) which is only allowed with scale +*/ + +const checkNumber = (it: string) => { + const check = Number(it); + + if (Number.isNaN(check)) return 'NaN'; + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) return 'number'; + return 'bigint'; +}; + +export interface SqlType { + is(type: string): boolean; + drizzleImport(vendor?: 'singlestore' | 'mysql'): Import; + defaultFromDrizzle(value: unknown, mode?: MODE): Column['default']; + defaultFromIntrospect(value: string): Column['default']; + toTs( + type: string, + value: Column['default'], + ): { options?: Record; default: string; customType?: string }; // customType for Custom +} + +const IntOps: Pick = { + defaultFromDrizzle: function(value: unknown): Column['default'] { + if (typeof value === 'number') { + return String(value); + } + return String(value); + }, + defaultFromIntrospect: function(value: string): Column['default'] { + return value; + }, +}; + +export const Int: SqlType = { + is: (type: string) => /^(?:int)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'int', + defaultFromDrizzle: IntOps.defaultFromDrizzle, + defaultFromIntrospect: IntOps.defaultFromIntrospect, + toTs: (type, value) => { + const options = type.includes('unsigned') ? { unsigned: true } : undefined; + const check = Number(value); + if (Number.isNaN(check)) return { options, default: `sql\`${value}\`` }; + return { options, default: value ?? '' }; + }, +}; + +export const Boolean: SqlType = { + is: (type) => type === 'tinyint(1)' || type === 'boolean', + drizzleImport: () => 'boolean', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + return value === '1' || value === 'true' ? 'true' : 'false'; + }, + toTs: (_, value) => { + return { default: value ?? '' }; + }, +}; + +export const TinyInt: SqlType = { + is: (type: string) => /^(?:tinyint)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'tinyint', + defaultFromDrizzle: IntOps.defaultFromDrizzle, + defaultFromIntrospect: IntOps.defaultFromIntrospect, + toTs: Int.toTs, +}; + +export const SmallInt: SqlType = { + is: (type: string) => /^(?:smallint)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'smallint', + defaultFromDrizzle: IntOps.defaultFromDrizzle, + defaultFromIntrospect: IntOps.defaultFromIntrospect, + toTs: Int.toTs, +}; + +export const MediumInt: SqlType = { + is: (type: string) => /^(?:mediumint)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'mediumint', + defaultFromDrizzle: IntOps.defaultFromDrizzle, + defaultFromIntrospect: IntOps.defaultFromIntrospect, + toTs: Int.toTs, +}; + +export const BigInt: SqlType = { + is: (type: string) => /^(?:bigint)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'bigint', + defaultFromDrizzle: (value) => { + if (typeof value === 'bigint') { + return `${value}`; + } + if (typeof value === 'number') { + return value.toString(); + } + return String(value); + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options = type.includes('unsigned') ? { unsigned: true } : {}; + if (value === null) return { options: { ...options, mode: 'number' }, default: '' }; + + const trimmed = trimChar(value, "'"); + const numType = checkNumber(trimmed); + if (numType === 'NaN') return { options: { ...options, mode: 'number' }, default: `sql\`${value}\`` }; + if (numType === 'number') return { options: { ...options, mode: 'number' }, default: trimmed }; + if (numType === 'bigint') return { options: { ...options, mode: 'bigint' }, default: `${trimmed}n` }; + assertUnreachable(numType); + }, +}; + +export const Serial: SqlType = { + is: (type: string) => /^(?:serial)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'serial', + defaultFromDrizzle: (_value) => { + return ''; // handled in interim to ddl + }, + defaultFromIntrospect: (value) => value, + toTs: (_type, _value) => { + return { default: '' }; + }, +}; + +export const Decimal: SqlType = { + // NUMERIC|DECIMAL[(1,1)] [UNSIGNED] [ZEROFILL] + is: (type) => /^(?:numeric|decimal)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'decimal', + defaultFromDrizzle: (value) => { + return `(${String(value)})`; + }, + defaultFromIntrospect: (value) => value, + toTs: (type, value) => { + const options: any = type.includes('unsigned') || type.includes('UNSIGNED') ? { unsigned: true } : {}; + const [precision, scale] = parseParams(type); + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + + const numType = checkNumber(value); + if (numType === 'NaN') return { options: options, default: `sql\`${value}\`` }; + if (numType === 'number') return { options: { ...options, mode: 'number' }, default: value }; + if (numType === 'bigint') return { options: { ...options, mode: 'bigint' }, default: `${value}n` }; + assertUnreachable(numType); + }, +}; + +export const Real: SqlType = { + // REAL[(1,1)] [UNSIGNED] [ZEROFILL] + is: (type) => /^(?:real)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'real', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + const trimmed = trimChar(trimChar(trimChar(value, '('), ')'), "'"); + return trimmed; + }, + toTs: (type, value) => { + const options: any = type.includes('unsigned') || type.includes('UNSIGNED') ? { unsigned: true } : {}; + const [precision, scale] = parseParams(type); + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + + const numType = checkNumber(value); + if (numType === 'NaN') return { options, default: `sql\`${value}\`` }; + if (numType === 'number') return { options, default: value }; + if (numType === 'bigint') return { options, default: `${value}n` }; + assertUnreachable(numType); + }, +}; + +export const Double: SqlType = { + // DOUBLE [PRECISION][(1,1)] [UNSIGNED] [ZEROFILL] + is: (type) => /^(?:double)(?:[\s(].*)?$/i.test(type) || /^(?:double precision)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'double', + defaultFromDrizzle: Real.defaultFromDrizzle, + defaultFromIntrospect: Real.defaultFromIntrospect, + toTs: Real.toTs, +}; + +export const Float: SqlType = { + // FLOAT[(1,1)] [UNSIGNED] [ZEROFILL] + is: (type) => /^(?:float)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'float', + defaultFromDrizzle: Real.defaultFromDrizzle, + defaultFromIntrospect: Real.defaultFromIntrospect, + toTs: Real.toTs, +}; + +export const Char: SqlType = { + is: (type) => /^(?:char)(?:[\s(].*)?$/i.test(type) || /^(?:character)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'char', + defaultFromDrizzle: (value) => { + return `'${escapeForSqlDefault(String(value))}'`; + }, + // 'text''text' -> text'text, we need to make match on introspect + defaultFromIntrospect: (value) => { + if (value.startsWith('(')) return value; + + const trimmed = trimChar(value, "'"); + return `'${escapeForSqlDefault(trimmed)}'`; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + if (value.startsWith('(')) return { options, default: `sql\`${value}\`` }; + + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(value, "'"))); + return { options, default: escaped }; + }, +}; + +export const Varchar: SqlType = { + is: (type) => { + return /^(?:varchar)(?:[\s(].*)?$/i.test(type) + || /^(?:nvarchar)(?:[\s(].*)?$/i.test(type) + || /^(?:character varying)(?:[\s(].*)?$/i.test(type); + }, + drizzleImport: () => 'varchar', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + toTs: Char.toTs, +}; + +export const TinyText: SqlType = { + is: (type) => /^\s*tinytext\s*$/i.test(type), + drizzleImport: () => 'tinytext', + defaultFromDrizzle: (value) => { + return `('${escapeForSqlDefault(value as string)}')`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + if (value.startsWith('(') || !value.startsWith("'")) return { options, default: `sql\`${value}\`` }; + + const trimmed = trimChar(value, "'"); + const escaped = value ? escapeForTsLiteral(unescapeFromSqlDefault(trimmed)) : ''; + return { options, default: escaped }; + }, +}; + +export const MediumText: SqlType = { + is: (type) => /^\s*mediumtext\s*$/i.test(type), + drizzleImport: () => 'mediumtext', + defaultFromDrizzle: TinyText.defaultFromDrizzle, + defaultFromIntrospect: TinyText.defaultFromIntrospect, + toTs: TinyText.toTs, +}; + +export const Text: SqlType = { + is: (type) => /^\s*text\s*$/i.test(type), + drizzleImport: () => 'text', + defaultFromDrizzle: TinyText.defaultFromDrizzle, + defaultFromIntrospect: TinyText.defaultFromIntrospect, + toTs: TinyText.toTs, +}; + +export const LongText: SqlType = { + is: (type) => /^\s*longtext\s*$/i.test(type), + drizzleImport: () => 'longtext', + defaultFromDrizzle: TinyText.defaultFromDrizzle, + defaultFromIntrospect: TinyText.defaultFromIntrospect, + toTs: TinyText.toTs, +}; + +export const TinyBlob: SqlType = { + is: (type) => /^\s*tinyblob\s*$/i.test(type), + drizzleImport: () => 'tinyblob', + defaultFromDrizzle: (value) => { + if (typeof Buffer !== 'undefined' && typeof Buffer.isBuffer === 'function' && Buffer.isBuffer(value)) { + return `(0x${value.toString('hex').toLowerCase()})`; + } + if (Array.isArray(value) || typeof value === 'object' || typeof value === 'string') { + return Text.defaultFromDrizzle(value); + } + throw new Error('unexpected'); + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + if (value === null) return { default: '' }; + + if (typeof Buffer !== 'undefined' && value.startsWith('0x')) { + const parsed = Buffer.from(value.slice(2, value.length), 'hex').toString('utf-8'); + const escaped = parsed.replaceAll('\\', '\\\\').replace('"', '\\"'); + return { options: { mode: 'buffer' }, default: `Buffer.from("${escaped}")` }; + } + + const { default: stringDef } = Text.toTs(type, value); + + return { default: stringDef, options: { mode: 'string' } }; + }, +}; + +export const MediumBlob: SqlType = { + is: (type) => /^\s*mediumblob\s*$/i.test(type), + drizzleImport: () => 'mediumblob', + defaultFromDrizzle: TinyBlob.defaultFromDrizzle, + defaultFromIntrospect: TinyBlob.defaultFromIntrospect, + toTs: TinyBlob.toTs, +}; + +export const LongBlob: SqlType = { + is: (type) => /^\s*longblob\s*$/i.test(type), + drizzleImport: () => 'longblob', + defaultFromDrizzle: TinyBlob.defaultFromDrizzle, + defaultFromIntrospect: TinyBlob.defaultFromIntrospect, + toTs: TinyBlob.toTs, +}; + +export const Blob: SqlType = { + is: (type) => /^\s*blob\s*$/i.test(type), + drizzleImport: () => 'blob', + defaultFromDrizzle: TinyBlob.defaultFromDrizzle, + defaultFromIntrospect: TinyBlob.defaultFromIntrospect, + toTs: TinyBlob.toTs, +}; + +export const Binary: SqlType = { + is: (type) => /^(?:binary)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'binary', + defaultFromDrizzle: TinyText.defaultFromDrizzle, + defaultFromIntrospect: (value) => { + // when you do `binary default 'text'` instead of `default ('text')` + if (value.startsWith('0x')) { + return `'${Buffer.from(value.slice(2), 'hex').toString('utf-8')}'`; + } + return value; + }, + toTs: TinyText.toTs, +}; + +export const Varbinary: SqlType = { + is: (type) => /^(?:varbinary)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'varbinary', + defaultFromDrizzle: (value) => { + return `(0x${Buffer.from(value as string).toString('hex').toLowerCase()})`; + }, + defaultFromIntrospect: (value) => value, + toTs: (type, value) => { + if (!value) return { default: '' }; + + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + + let trimmed = value.startsWith('(') ? value.substring(1, value.length - 1) : value; + trimmed = trimChar(value, "'"); + if (trimmed.startsWith('0x')) { + trimmed = Buffer.from(trimmed.slice(2), 'hex').toString('utf-8'); + return { options, default: `"${trimmed.replaceAll('"', '\\"')}"` }; + } else { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Json: SqlType = { + is: (type) => /^\s*json\s*$/i.test(type), + drizzleImport: () => 'json', + defaultFromDrizzle: (value) => { + const stringified = stringify(value, (key, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + }); + return `('${stringified}')`; + }, + defaultFromIntrospect: (value) => value, + toTs: (_, def) => { + if (!def) return { default: '' }; + const trimmed = trimChar(def, "'"); + try { + const parsed = parse(trimmed); + const stringified = stringify( + parsed, + (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("''", "'"); + }, + undefined, + true, + )!; + return { default: stringified }; + } catch {} + return { default: `sql\`${def}\`` }; + }, +}; + +export const Timestamp: SqlType = { + is: (type) => /^(?:timestamp)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'timestamp', + defaultFromDrizzle: (value) => { + if (value instanceof Date) { + const converted = value.toISOString().replace('T', ' ').slice(0, 23); + return `'${converted}'`; + } + // TODO: we can handle fsp 6 here too + return `'${value}'`; + }, + defaultFromIntrospect: (value) => { + if (!isNaN(Date.parse(value))) { + return `'${value}'`; + } + return value; + }, + toTs: (type, def) => { + const options: any = {}; + const [fsp] = parseParams(type); + if (fsp) options['fsp'] = Number(fsp); + + if (!def) return { options, default: '' }; + const trimmed = trimChar(def, "'"); + if (trimmed === 'now()' || trimmed === '(now())' || trimmed === '(CURRENT_TIMESTAMP)') { + return { options, default: '.defaultNow()' }; + } + + if (fsp && Number(fsp) > 3) return { options, default: `sql\`'${trimmed}'\`` }; + // TODO: we can handle fsp 6 here too, using sql`` + return { options, default: `new Date("${trimmed}Z")` }; + }, +}; + +export const DateTime: SqlType = { + is: (type) => /^(?:datetime)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'datetime', + defaultFromDrizzle: Timestamp.defaultFromDrizzle, + defaultFromIntrospect: Timestamp.defaultFromIntrospect, + toTs: Timestamp.toTs, +}; + +export const Time: SqlType = { + is: (type) => /^(?:time)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'time', + defaultFromDrizzle: (value) => { + return `'${String(value)}'`; + }, + defaultFromIntrospect: (value) => { + if (!value.startsWith("'")) return `'${value}'`; + return value; + }, + toTs: (type, def) => { + const options: any = {}; + const [fsp] = parseParams(type); + if (fsp) options['fsp'] = Number(fsp); + + if (!def) return { options, default: '' }; + + const trimmed = trimChar(def, "'"); + return { options, default: `"${trimmed}"` }; + }, +}; + +export const Date_: SqlType = { + is: (type) => /^\s*date\s*$/i.test(type), + drizzleImport: () => 'date', + defaultFromDrizzle: (value) => { + if (value instanceof Date) { + const converted = value.toISOString().split('T')[0]; + return `'${converted}'`; + } + return `'${value}'`; + }, + defaultFromIntrospect: (value) => { + if (!value.startsWith("'")) return `'${value}'`; + return value; + }, + toTs: (type, def) => { + const options: any = {}; + const [fsp] = parseParams(type); + if (fsp) options['fsp'] = Number(fsp); + if (!def) return { options, default: '' }; + return { options, default: `new Date("${trimChar(def, "'")}")` }; + }, +}; + +export const Year: SqlType = { + is: (type) => /^\s*year\s*$/i.test(type), + drizzleImport: () => 'year', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, def) => { + const options: any = {}; + const [fsp] = parseParams(type); + if (fsp) options['fsp'] = Number(fsp); + + if (!def) return { options, default: '' }; + return { options, default: `${def}` }; + }, +}; + +export const Enum: SqlType = { + is: (type) => /^(?:enum)(?:[\s(].*)?$/i.test(type), + drizzleImport: (vendor) => vendor === 'mysql' ? 'mysqlEnum' : 'singlestoreEnum', + defaultFromDrizzle: (value) => { + return `'${escapeForSqlDefault(value as string)}'`; + }, + defaultFromIntrospect: (value) => { + return `'${escapeForSqlDefault(value)}'`; + }, + toTs: (_, def) => { + if (!def) return { default: '' }; + const unescaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(def, "'"))); + return { default: unescaped }; + }, +}; + +export const Custom: SqlType = { + is: () => { + throw Error('Mocked'); + }, + drizzleImport: () => 'customType', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + return value; + }, + toTs: (type, def) => { + if (!def) return { default: '', customType: type }; + const unescaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(def, "'"))); + return { default: unescaped, customType: type }; + }, +}; + +export const typeFor = (sqlType: string): SqlType => { + if (Boolean.is(sqlType)) return Boolean; + if (TinyInt.is(sqlType)) return TinyInt; + if (SmallInt.is(sqlType)) return SmallInt; + if (MediumInt.is(sqlType)) return MediumInt; + if (Int.is(sqlType)) return Int; + if (BigInt.is(sqlType)) return BigInt; + if (Serial.is(sqlType)) return Serial; + if (Decimal.is(sqlType)) return Decimal; + if (Real.is(sqlType)) return Real; + if (Double.is(sqlType)) return Double; + if (Float.is(sqlType)) return Float; + if (Char.is(sqlType)) return Char; + if (Varchar.is(sqlType)) return Varchar; + if (TinyText.is(sqlType)) return TinyText; + if (MediumText.is(sqlType)) return MediumText; + if (Text.is(sqlType)) return Text; + if (LongText.is(sqlType)) return LongText; + if (Binary.is(sqlType)) return Binary; + if (Varbinary.is(sqlType)) return Varbinary; + if (Json.is(sqlType)) return Json; + if (Timestamp.is(sqlType)) return Timestamp; + if (DateTime.is(sqlType)) return DateTime; + if (Date_.is(sqlType)) return Date_; + if (Time.is(sqlType)) return Time; + if (Year.is(sqlType)) return Year; + if (Enum.is(sqlType)) return Enum; + if (TinyBlob.is(sqlType)) return TinyBlob; + if (MediumBlob.is(sqlType)) return MediumBlob; + if (LongBlob.is(sqlType)) return LongBlob; + if (Blob.is(sqlType)) return Blob; + return Custom; +}; + +type InvalidDefault = 'text_no_parentecies'; +export const checkDefault = (value: string, type: string): InvalidDefault | null => { + if ( + (type === 'tinytext' || type === 'mediumtext' || type === 'text' || type === 'longtext' + || type === 'binary' || type === 'varbinary' + || type === 'json') && !value.startsWith('(') && !value.endsWith(')') + ) { + return 'text_no_parentecies'; + } + + if (type === 'binary' || type === 'varbinary') { + } + + return null; +}; + +export const defaultNameForFK = (fk: Pick) => { + const desired = `${fk.table}_${fk.columns.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fkey`; + const res = desired.length > 63 + ? fk.table.length < 63 - 18 // _{hash(12)}_fkey + ? `${fk.table}_${hash(desired)}_fkey` + : `${hash(desired)}_fkey` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const nameForUnique = (tableName: string, columns: string[]) => { + return `${columns.join('_')}_unique`; +}; + +const stripCollation = (defaultValue: string): string => { + const coll = 'utf8mb4'; + const escaped = coll.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); + const regex = new RegExp(`_${escaped}(?=(?:\\\\['"]|['"]))`, 'g'); + const res = defaultValue.replace(regex, '').replaceAll("\\'", "'").replaceAll("\\\\'", "''"); + return res; +}; + +export const parseEnum = (it: string) => { + return Array.from(it.matchAll(/'((?:[^']|'')*)'/g), (m) => m[1]); +}; + +export const parseDefaultValue = ( + columnType: string, + value: string | undefined, +): Column['default'] => { + if (value === null || typeof value === 'undefined') return null; + + value = stripCollation(value); + + const grammarType = typeFor(columnType); + if (grammarType) return grammarType.defaultFromIntrospect(value); + + console.error(`unknown default: ${columnType} ${value}`); + return null; +}; + +const commutativeTypes = [ + ['tinyint(1)', 'boolean'], + ['binary(1)', 'binary'], + ['char(1)', 'char'], + ['now()', '(now())', 'CURRENT_TIMESTAMP', '(CURRENT_TIMESTAMP)', 'CURRENT_TIMESTAMP()'], +]; + +export const commutative = (left: string, right: string, mode: 'push' | 'default' = 'default') => { + for (const it of commutativeTypes) { + const leftIn = it.some((x) => x === left); + const rightIn = it.some((x) => x === right); + + if (leftIn && rightIn) return true; + } + + const leftPatched = left.replace(', ', ','); + const rightPatched = right.replace(', ', ','); + if (leftPatched === rightPatched) return true; + + if (mode === 'push') { + if (left === 'double' && right === 'real') return true; + if (left.startsWith('double(') && right.startsWith('real(') && right.replace('real', 'double') === left) { + return true; + } + if (left.startsWith('real(') && right.startsWith('double(') && right.replace('double', 'real') === left) { + return true; + } + if (left.replace(',0)', ')') === right.replace(',0)', ')')) return true; // { from: 'decimal(19,0)', to: 'decimal(19)' } + } + + if ( + (left.startsWith('float(') && right === 'float') + || (right.startsWith('float(') && left === 'float') + ) { + return true; // column type is float regardless of float(M,D), always stored as 7 digits precision + } + return false; +}; + +const commutativeCharSetAndCollation: { charSet: string; collation: string; isDefault: boolean }[] = [ + { collation: 'armscii8_bin', charSet: 'armscii8', isDefault: false }, + { collation: 'armscii8_general_ci', charSet: 'armscii8', isDefault: true }, + { collation: 'ascii_bin', charSet: 'ascii', isDefault: false }, + { collation: 'ascii_general_ci', charSet: 'ascii', isDefault: true }, + { collation: 'big5_bin', charSet: 'big5', isDefault: false }, + { collation: 'big5_chinese_ci', charSet: 'big5', isDefault: true }, + { collation: 'binary', charSet: 'binary', isDefault: true }, + { collation: 'cp1250_bin', charSet: 'cp1250', isDefault: false }, + { collation: 'cp1250_croatian_ci', charSet: 'cp1250', isDefault: false }, + { collation: 'cp1250_czech_cs', charSet: 'cp1250', isDefault: false }, + { collation: 'cp1250_general_ci', charSet: 'cp1250', isDefault: true }, + { collation: 'cp1250_polish_ci', charSet: 'cp1250', isDefault: false }, + { collation: 'cp1251_bin', charSet: 'cp1251', isDefault: false }, + { collation: 'cp1251_bulgarian_ci', charSet: 'cp1251', isDefault: false }, + { collation: 'cp1251_general_ci', charSet: 'cp1251', isDefault: true }, + { collation: 'cp1251_general_cs', charSet: 'cp1251', isDefault: false }, + { collation: 'cp1251_ukrainian_ci', charSet: 'cp1251', isDefault: false }, + { collation: 'cp1256_bin', charSet: 'cp1256', isDefault: false }, + { collation: 'cp1256_general_ci', charSet: 'cp1256', isDefault: true }, + { collation: 'cp1257_bin', charSet: 'cp1257', isDefault: false }, + { collation: 'cp1257_general_ci', charSet: 'cp1257', isDefault: true }, + { collation: 'cp1257_lithuanian_ci', charSet: 'cp1257', isDefault: false }, + { collation: 'cp850_bin', charSet: 'cp850', isDefault: false }, + { collation: 'cp850_general_ci', charSet: 'cp850', isDefault: true }, + { collation: 'cp852_bin', charSet: 'cp852', isDefault: false }, + { collation: 'cp852_general_ci', charSet: 'cp852', isDefault: true }, + { collation: 'cp866_bin', charSet: 'cp866', isDefault: false }, + { collation: 'cp866_general_ci', charSet: 'cp866', isDefault: true }, + { collation: 'cp932_bin', charSet: 'cp932', isDefault: false }, + { collation: 'cp932_japanese_ci', charSet: 'cp932', isDefault: true }, + { collation: 'dec8_bin', charSet: 'dec8', isDefault: false }, + { collation: 'dec8_swedish_ci', charSet: 'dec8', isDefault: true }, + { collation: 'eucjpms_bin', charSet: 'eucjpms', isDefault: false }, + { collation: 'eucjpms_japanese_ci', charSet: 'eucjpms', isDefault: true }, + { collation: 'euckr_bin', charSet: 'euckr', isDefault: false }, + { collation: 'euckr_korean_ci', charSet: 'euckr', isDefault: true }, + { collation: 'gb18030_bin', charSet: 'gb18030', isDefault: false }, + { collation: 'gb18030_chinese_ci', charSet: 'gb18030', isDefault: true }, + { collation: 'gb18030_unicode_520_ci', charSet: 'gb18030', isDefault: false }, + { collation: 'gb2312_bin', charSet: 'gb2312', isDefault: false }, + { collation: 'gb2312_chinese_ci', charSet: 'gb2312', isDefault: true }, + { collation: 'gbk_bin', charSet: 'gbk', isDefault: false }, + { collation: 'gbk_chinese_ci', charSet: 'gbk', isDefault: true }, + { collation: 'geostd8_bin', charSet: 'geostd8', isDefault: false }, + { collation: 'geostd8_general_ci', charSet: 'geostd8', isDefault: true }, + { collation: 'greek_bin', charSet: 'greek', isDefault: false }, + { collation: 'greek_general_ci', charSet: 'greek', isDefault: true }, + { collation: 'hebrew_bin', charSet: 'hebrew', isDefault: false }, + { collation: 'hebrew_general_ci', charSet: 'hebrew', isDefault: true }, + { collation: 'hp8_bin', charSet: 'hp8', isDefault: false }, + { collation: 'hp8_english_ci', charSet: 'hp8', isDefault: true }, + { collation: 'keybcs2_bin', charSet: 'keybcs2', isDefault: false }, + { collation: 'keybcs2_general_ci', charSet: 'keybcs2', isDefault: true }, + { collation: 'koi8r_bin', charSet: 'koi8r', isDefault: false }, + { collation: 'koi8r_general_ci', charSet: 'koi8r', isDefault: true }, + { collation: 'koi8u_bin', charSet: 'koi8u', isDefault: false }, + { collation: 'koi8u_general_ci', charSet: 'koi8u', isDefault: true }, + { collation: 'latin1_bin', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_danish_ci', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_general_ci', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_general_cs', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_german1_ci', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_german2_ci', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_spanish_ci', charSet: 'latin1', isDefault: false }, + { collation: 'latin1_swedish_ci', charSet: 'latin1', isDefault: true }, + { collation: 'latin2_bin', charSet: 'latin2', isDefault: false }, + { collation: 'latin2_croatian_ci', charSet: 'latin2', isDefault: false }, + { collation: 'latin2_czech_cs', charSet: 'latin2', isDefault: false }, + { collation: 'latin2_general_ci', charSet: 'latin2', isDefault: true }, + { collation: 'latin2_hungarian_ci', charSet: 'latin2', isDefault: false }, + { collation: 'latin5_bin', charSet: 'latin5', isDefault: false }, + { collation: 'latin5_turkish_ci', charSet: 'latin5', isDefault: true }, + { collation: 'latin7_bin', charSet: 'latin7', isDefault: false }, + { collation: 'latin7_estonian_cs', charSet: 'latin7', isDefault: false }, + { collation: 'latin7_general_ci', charSet: 'latin7', isDefault: true }, + { collation: 'latin7_general_cs', charSet: 'latin7', isDefault: false }, + { collation: 'macce_bin', charSet: 'macce', isDefault: false }, + { collation: 'macce_general_ci', charSet: 'macce', isDefault: true }, + { collation: 'macroman_bin', charSet: 'macroman', isDefault: false }, + { collation: 'macroman_general_ci', charSet: 'macroman', isDefault: true }, + { collation: 'sjis_bin', charSet: 'sjis', isDefault: false }, + { collation: 'sjis_japanese_ci', charSet: 'sjis', isDefault: true }, + { collation: 'swe7_bin', charSet: 'swe7', isDefault: false }, + { collation: 'swe7_swedish_ci', charSet: 'swe7', isDefault: true }, + { collation: 'tis620_bin', charSet: 'tis620', isDefault: false }, + { collation: 'tis620_thai_ci', charSet: 'tis620', isDefault: true }, + { collation: 'ucs2_bin', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_croatian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_czech_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_danish_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_esperanto_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_estonian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_general_ci', charSet: 'ucs2', isDefault: true }, + { collation: 'ucs2_general_mysql500_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_german2_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_hungarian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_icelandic_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_latvian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_lithuanian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_persian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_polish_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_romanian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_roman_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_sinhala_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_slovak_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_slovenian_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_spanish2_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_spanish_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_swedish_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_turkish_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_unicode_520_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_unicode_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ucs2_vietnamese_ci', charSet: 'ucs2', isDefault: false }, + { collation: 'ujis_bin', charSet: 'ujis', isDefault: false }, + { collation: 'ujis_japanese_ci', charSet: 'ujis', isDefault: true }, + { collation: 'utf16_bin', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_croatian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_czech_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_danish_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_esperanto_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_estonian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_general_ci', charSet: 'utf16', isDefault: true }, + { collation: 'utf16_german2_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_hungarian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_icelandic_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_latvian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_lithuanian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_persian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_polish_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_romanian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_roman_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_sinhala_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_slovak_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_slovenian_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_spanish2_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_spanish_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_swedish_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_turkish_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_unicode_520_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_unicode_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16_vietnamese_ci', charSet: 'utf16', isDefault: false }, + { collation: 'utf16le_bin', charSet: 'utf16le', isDefault: false }, + { collation: 'utf16le_general_ci', charSet: 'utf16le', isDefault: true }, + { collation: 'utf32_bin', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_croatian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_czech_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_danish_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_esperanto_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_estonian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_general_ci', charSet: 'utf32', isDefault: true }, + { collation: 'utf32_german2_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_hungarian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_icelandic_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_latvian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_lithuanian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_persian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_polish_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_romanian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_roman_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_sinhala_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_slovak_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_slovenian_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_spanish2_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_spanish_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_swedish_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_turkish_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_unicode_520_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_unicode_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf32_vietnamese_ci', charSet: 'utf32', isDefault: false }, + { collation: 'utf8mb3_bin', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_croatian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_czech_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_danish_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_esperanto_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_estonian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_general_ci', charSet: 'utf8mb3', isDefault: true }, + { collation: 'utf8mb3_general_mysql500_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_german2_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_hungarian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_icelandic_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_latvian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_lithuanian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_persian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_polish_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_romanian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_roman_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_sinhala_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_slovak_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_slovenian_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_spanish2_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_spanish_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_swedish_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_tolower_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_turkish_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_unicode_520_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_unicode_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb3_vietnamese_ci', charSet: 'utf8mb3', isDefault: false }, + { collation: 'utf8mb4_0900_ai_ci', charSet: 'utf8mb4', isDefault: true }, + { collation: 'utf8mb4_0900_as_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_0900_bin', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_bg_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_bg_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_bin', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_bs_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_bs_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_croatian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_cs_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_cs_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_czech_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_danish_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_da_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_da_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_de_pb_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_de_pb_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_eo_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_eo_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_esperanto_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_estonian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_es_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_es_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_es_trad_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_es_trad_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_et_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_et_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_general_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_german2_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_gl_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_gl_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_hr_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_hr_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_hungarian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_hu_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_hu_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_icelandic_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_is_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_is_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ja_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ja_0900_as_cs_ks', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_latvian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_la_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_la_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_lithuanian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_lt_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_lt_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_lv_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_lv_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_mn_cyrl_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_mn_cyrl_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_nb_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_nb_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_nn_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_nn_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_persian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_pl_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_pl_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_polish_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_romanian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_roman_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ro_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ro_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ru_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_ru_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sinhala_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sk_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sk_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_slovak_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_slovenian_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sl_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sl_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_spanish2_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_spanish_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sr_latn_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sr_latn_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sv_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_sv_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_swedish_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_tr_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_tr_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_turkish_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_unicode_520_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_unicode_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_vietnamese_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_vi_0900_ai_ci', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_vi_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, + { collation: 'utf8mb4_zh_0900_as_cs', charSet: 'utf8mb4', isDefault: false }, +]; +export const charSetAndCollationCommutative = ( + left: { charSet: string | null; collation: string | null }, + right: { collation: string | null; charSet: string | null }, +): boolean => { + if (!left.charSet && !left.collation && !right.charSet && !right.collation) return true; + + const normalize = (input: { charSet: string | null; collation: string | null }) => { + let { charSet, collation } = input; + + if (!charSet && collation) { + const match = commutativeCharSetAndCollation.find((x) => x.collation === collation); + if (!match) return null; + charSet = match.charSet; + } + + if (charSet && !collation) { + const match = commutativeCharSetAndCollation.find((x) => x.charSet === charSet && x.isDefault); + if (!match) return null; + collation = match.collation; + } + + if (charSet && collation) { + const match = commutativeCharSetAndCollation.find((x) => x.charSet === charSet && x.collation === collation); + if (!match) return null; // invalid combination + } + + return { charSet, collation }; + }; + + const leftNorm = normalize(left); + const rightNorm = normalize(right); + + if (!leftNorm || !rightNorm) return false; + + return leftNorm.charSet === rightNorm.charSet && leftNorm.collation === rightNorm.collation; +}; diff --git a/drizzle-kit/src/dialects/mysql/introspect.ts b/drizzle-kit/src/dialects/mysql/introspect.ts new file mode 100644 index 0000000000..3afcde7546 --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/introspect.ts @@ -0,0 +1,439 @@ +import type { IntrospectStage, IntrospectStatus } from 'src/cli/views'; +import type { DB } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { ForeignKey, Index, InterimSchema, PrimaryKey } from './ddl'; +import { parseDefaultValue } from './grammar'; + +export const fromDatabaseForDrizzle = async ( + db: DB, + schema: string, + filter: EntityFilter = () => true, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +): Promise => { + const res = await fromDatabase(db, schema, filter, progressCallback); + res.indexes = res.indexes.filter((x) => { + let skip = x.isUnique === true && x.columns.length === 1 && x.columns[0].isExpression === false; + skip &&= res.columns.some((c) => c.type === 'serial' && c.table === x.table && c.name === x.columns[0].value); + skip ||= res.fks.some((fk) => x.table === fk.table && x.name === fk.name); + return !skip; + }); + + return res; +}; + +export const fromDatabase = async ( + db: DB, + schema: string, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +): Promise => { + const res: InterimSchema = { + tables: [], + columns: [], + pks: [], + fks: [], + checks: [], + indexes: [], + views: [], + viewColumns: [], + }; + + // TODO revise: perfomance_schema contains 'users' table + const tablesAndViews = await db.query<{ name: string; type: 'BASE TABLE' | 'VIEW' }>(` + SELECT + TABLE_NAME as name, + TABLE_TYPE as type + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = '${schema}' + ORDER BY lower(TABLE_NAME); + `).then((rows) => { + queryCallback('tables', rows, null); + return rows.filter((it) => { + return filter({ type: 'table', schema: false, name: it.name }); + }); + }).catch((err) => { + queryCallback('tables', [], err); + throw err; + }); + + const columns = await db.query(` + SELECT + * + FROM information_schema.columns + WHERE table_schema = '${schema}' and table_name != '__drizzle_migrations' + ORDER BY lower(table_name), ordinal_position; + `).then((rows) => { + const filtered = rows.filter((it) => tablesAndViews.some((x) => it['TABLE_NAME'] === x.name)); + queryCallback('columns', filtered, null); + return filtered; + }).catch((err) => { + queryCallback('columns', [], err); + throw err; + }); + + const idxs = await db.query(` + SELECT + * + FROM INFORMATION_SCHEMA.STATISTICS + WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${schema}' + AND INFORMATION_SCHEMA.STATISTICS.TABLE_NAME != '__drizzle_migrations' + AND INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY' + ORDER BY lower(INDEX_NAME); + `).then((rows) => { + const filtered = rows.filter((it) => tablesAndViews.some((x) => it['TABLE_NAME'] === x.name)); + queryCallback('indexes', filtered, null); + return filtered; + }).catch((err) => { + queryCallback('indexes', [], err); + throw err; + }); + + const defaultCharSetAndCollation = await db.query<{ default_charset: string; default_collation: string }>(` + SELECT + DEFAULT_CHARACTER_SET_NAME AS default_charset, + DEFAULT_COLLATION_NAME AS default_collation + FROM information_schema.SCHEMATA + WHERE SCHEMA_NAME = '${schema}'; + `); + + const filteredTablesAndViews = tablesAndViews.filter((it) => columns.some((x) => x['TABLE_NAME'] === it.name)); + const tables = filteredTablesAndViews.filter((it) => it.type === 'BASE TABLE').map((it) => it.name); + for (const table of tables) { + res.tables.push({ + entityType: 'tables', + name: table, + }); + } + + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let checksCount = 0; + let viewsCount = 0; + + for (const column of columns.filter((it) => tables.some((x) => x === it['TABLE_NAME']))) { + columnsCount += 1; + progressCallback('columns', columnsCount, 'fetching'); + + const table = column['TABLE_NAME']; + const name: string = column['COLUMN_NAME']; + const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' + const columnType = column['COLUMN_TYPE']; // varchar(256) + const columnDefault: string = column['COLUMN_DEFAULT'] ?? null; + const dbCollation: string = column['COLLATION_NAME']; + const dbCharSet: string = column['CHARACTER_SET_NAME']; + const geenratedExpression: string = column['GENERATION_EXPRESSION']; + + const extra = column['EXTRA'] ?? ''; + // const isDefaultAnExpression = extra.includes('DEFAULT_GENERATED'); // 'auto_increment', '' + // const dataType = column['DATA_TYPE']; // varchar + const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' + // const numericPrecision = column['NUMERIC_PRECISION']; + // const numericScale = column['NUMERIC_SCALE']; + const isAutoincrement = extra === 'auto_increment'; + const onUpdateNow: boolean = extra.includes('on update CURRENT_TIMESTAMP'); + + const onUpdateNowFspMatch = typeof extra === 'string' + ? extra.match(/\bON\s+UPDATE\s+CURRENT_TIMESTAMP(?:\((\d+)\))?/i) + : null; + const onUpdateNowFsp = onUpdateNow && onUpdateNowFspMatch && onUpdateNowFspMatch[1] + ? Number(onUpdateNowFspMatch[1]) + : null; + + let changedType = columnType.replace('decimal(10,0)', 'decimal'); + + if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { + const uniqueIdx = idxs.filter( + (it) => + it['COLUMN_NAME'] === name + && it['TABLE_NAME'] === table + && it['NON_UNIQUE'] === 0, + ); + if (uniqueIdx && uniqueIdx.length === 1) { + changedType = columnType.replace('bigint unsigned', 'serial'); + } + } + + const def = parseDefaultValue(changedType, columnDefault); + + const { default_charset: defDbCharSet, default_collation: defDbCollation } = defaultCharSetAndCollation[0]; + let charSet: string | null = dbCharSet; + let collation: string | null = dbCollation; + if (defDbCharSet === dbCharSet && defDbCollation === dbCollation) { + charSet = null; + collation = null; + } + + res.columns.push({ + entityType: 'columns', + table: table, + name: name, + type: changedType, + isPK: isPrimary, // isPK is an interim flag we use in Drizzle Schema and ignore in database introspect + notNull: !isNullable, + autoIncrement: isAutoincrement, + collation: collation, + charSet: charSet, + onUpdateNow, + onUpdateNowFsp, + default: def, + generated: geenratedExpression + ? { + as: geenratedExpression, + type: extra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', + } + : null, + isUnique: false, + uniqueName: null, + }); + } + + const pks = await db.query(` + SELECT + CONSTRAINT_NAME, table_name, column_name, ordinal_position + FROM information_schema.table_constraints t + LEFT JOIN information_schema.key_column_usage k USING(constraint_name,table_schema,table_name) + WHERE t.constraint_type='PRIMARY KEY' + AND table_name != '__drizzle_migrations' + AND t.table_schema = '${schema}' + ORDER BY ordinal_position + `).then((rows) => { + queryCallback('pks', rows, null); + return rows; + }).catch((err) => { + queryCallback('pks', [], err); + throw err; + }); + + const tableToPKs = pks.filter((it) => tables.some((x) => x === it['TABLE_NAME'])).reduce>( + (acc, it) => { + const table: string = it['TABLE_NAME']; + const column: string = it['COLUMN_NAME']; + // const position: string = it['ordinal_position']; + + if (table in acc) { + acc[table].columns.push(column); + } else { + acc[table] = { + entityType: 'pks', + table, + name: it['CONSTRAINT_NAME'], + columns: [column], + }; + } + return acc; + }, + {} as Record, + ); + + for (const pk of Object.values(tableToPKs)) { + res.pks.push(pk); + } + + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tables.length, 'done'); + + const fks = await db.query(` + SELECT + kcu.TABLE_SCHEMA, + kcu.TABLE_NAME, + kcu.CONSTRAINT_NAME, + kcu.COLUMN_NAME, + kcu.REFERENCED_TABLE_SCHEMA, + kcu.REFERENCED_TABLE_NAME, + kcu.REFERENCED_COLUMN_NAME, + rc.UPDATE_RULE, + rc.DELETE_RULE + FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu + LEFT JOIN information_schema.referential_constraints rc ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME + WHERE kcu.TABLE_SCHEMA = '${schema}' + AND kcu.CONSTRAINT_NAME != 'PRIMARY' + AND kcu.REFERENCED_TABLE_NAME IS NOT NULL; + `).then((rows) => { + queryCallback('fks', rows, null); + return rows; + }).catch((err) => { + queryCallback('fks', [], err); + throw err; + }); + + const filteredFKs = fks.filter((it) => tables.some((x) => x === it['TABLE_NAME'])); + const groupedFKs = filteredFKs.reduce>( + (acc, it) => { + const name = it['CONSTRAINT_NAME']; + const table: string = it['TABLE_NAME']; + const column: string = it['COLUMN_NAME']; + const refTable: string = it['REFERENCED_TABLE_NAME']; + const refColumn: string = it['REFERENCED_COLUMN_NAME']; + const updateRule: string = it['UPDATE_RULE']; + const deleteRule: string = it['DELETE_RULE']; + + const key = `${table}:${name}`; + + if (key in acc) { + const entry = acc[key]; + entry.columns.push(column); + entry.columnsTo.push(refColumn); + } else { + acc[key] = { + entityType: 'fks', + name, + table, + tableTo: refTable, + columns: [column], + columnsTo: [refColumn], + onDelete: deleteRule?.toUpperCase() as ForeignKey['onUpdate'] ?? 'NO ACTION', + onUpdate: updateRule?.toUpperCase() as ForeignKey['onUpdate'] ?? 'NO ACTION', + nameExplicit: true, + } satisfies ForeignKey; + } + return acc; + }, + {} as Record, + ); + + for (const fk of Object.values(groupedFKs)) { + foreignKeysCount += 1; + progressCallback('fks', foreignKeysCount, 'fetching'); + res.fks.push(fk); + } + + progressCallback('fks', foreignKeysCount, 'done'); + + const groupedIndexes = idxs.reduce>((acc, it) => { + const name = it['INDEX_NAME']; + const table = it['TABLE_NAME']; + const column: string = it['COLUMN_NAME']; + const isUnique = it['NON_UNIQUE'] === 0; + const expression = it['EXPRESSION']; + + const key = `${table}:${name}`; + + if (key in acc) { + const entry = acc[key]; + entry.columns.push({ + value: expression ? expression : column, + isExpression: !!expression, + }); + } else { + acc[key] = { + entityType: 'indexes', + table, + name, + columns: [{ + value: expression ? expression : column, + isExpression: !!expression, + }], + isUnique, + algorithm: null, + lock: null, + using: null, + nameExplicit: true, + } satisfies Index; + } + return acc; + }, {} as Record); + + for (const index of Object.values(groupedIndexes)) { + res.indexes.push(index); + indexesCount += 1; + progressCallback('indexes', indexesCount, 'fetching'); + } + + const views = await db.query( + `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${schema}';`, + ); + + viewsCount = views.length; + progressCallback('views', viewsCount, 'fetching'); + + for await (const view of views) { + const name = view['TABLE_NAME']; + const definition = view['VIEW_DEFINITION']; + + const checkOption = view['CHECK_OPTION'] as string | undefined; + + const withCheckOption = !checkOption || checkOption === 'NONE' + ? null + : checkOption.toLowerCase(); + + const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); + + const [createSqlStatement] = await db.query(`SHOW CREATE VIEW \`${name}\`;`); + const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); + const algorithm = algorithmMatch ? algorithmMatch[1].toLowerCase() : null; + + const viewColumns = columns.filter((it) => it['TABLE_NAME'] === name); + + for (const column of viewColumns) { + res.viewColumns.push({ + view: name, + name: column['COLUMN_NAME'], + notNull: column['IS_NULLABLE'] === 'NO', + type: column['DATA_TYPE'], + }); + } + + res.views.push({ + entityType: 'views', + name, + definition, + algorithm: algorithm, + sqlSecurity, + withCheckOption: withCheckOption as 'local' | 'cascaded' | null, + }); + } + + progressCallback('indexes', indexesCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + const checks = await db.query(` + SELECT + tc.table_name, + tc.constraint_name, + cc.check_clause + FROM information_schema.table_constraints tc + JOIN information_schema.check_constraints cc ON tc.constraint_name = cc.constraint_name + WHERE tc.constraint_schema = '${schema}' + AND tc.constraint_type = 'CHECK'; + `).then((rows) => { + queryCallback('checks', rows, null); + return rows; + }).catch((err) => { + queryCallback('checks', [], err); + throw err; + }); + + checksCount += checks.length; + progressCallback('checks', checksCount, 'fetching'); + + for (const check of checks.filter((it) => tables.some((x) => x === it['TABLE_NAME']))) { + const table = check['TABLE_NAME']; + const name = check['CONSTRAINT_NAME']; + const value = check['CHECK_CLAUSE']; + + res.checks.push({ + entityType: 'checks', + table, + name, + value, + }); + } + + progressCallback('checks', checksCount, 'done'); + + return res; +}; diff --git a/drizzle-kit/src/dialects/mysql/serializer.ts b/drizzle-kit/src/dialects/mysql/serializer.ts new file mode 100644 index 0000000000..4728211ea5 --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/serializer.ts @@ -0,0 +1,82 @@ +import { mysqlSchemaError as schemaError } from 'src/cli/views'; +import type { CasingType } from '../../cli/validations/common'; +import { prepareFilenames } from '../../utils/utils-node'; +import type { MysqlDDL, SchemaError } from './ddl'; +import { createDDL, interimToDDL } from './ddl'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; +import type { MysqlSnapshot } from './snapshot'; +import { drySnapshot, snapshotValidator } from './snapshot'; +export const prepareSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: MysqlDDL; + ddlCur: MysqlDDL; + snapshot: MysqlSnapshot; + snapshotPrev: MysqlSnapshot; + custom: MysqlSnapshot; + errors2: SchemaError[]; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(filenames); + + const interim = fromDrizzleSchema( + res.tables, + res.views, + casing, + ); + + // TODO: errors + // if (warnings.length > 0) { + // console.log(warnings.map((it) => schemaWarning(it)).join('\n\n')); + // } + + // if (errors.length > 0) { + // console.log(errors.map((it) => schemaError(it)).join('\n')); + // process.exit(1); + // } + + const { ddl: ddlCur, errors: errors2 } = interimToDDL(interim); + + // TODO: handle errors + if (errors2.length > 0) { + console.log(errors2.map((it) => schemaError(it)).join('\n')); + process.exit(1); + } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '6', + dialect: 'mysql', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies MysqlSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: MysqlSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom, errors2 }; +}; diff --git a/drizzle-kit/src/dialects/mysql/snapshot.ts b/drizzle-kit/src/dialects/mysql/snapshot.ts new file mode 100644 index 0000000000..83c6edf22f --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/snapshot.ts @@ -0,0 +1,253 @@ +import { randomUUID } from 'crypto'; +import { any, array as zArray, boolean, enum as enumType, literal, object, record, string, type TypeOf } from 'zod'; +import { originUUID } from '../../utils'; +import { array, validator } from '../simpleValidator'; +import type { MysqlDDL, MysqlEntity } from './ddl'; +import { createDDL } from './ddl'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + using: enumType(['btree', 'hash']).optional(), + algorithm: enumType(['default', 'inplace', 'copy']).optional(), + lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + onUpdate: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const tableV3 = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const tableV4 = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraint: record(string(), checkConstraint).default({}), +}).strict(); + +const viewMeta = object({ + algorithm: enumType(['undefined', 'merge', 'temptable']), + sqlSecurity: enumType(['definer', 'invoker']), + withCheckOption: enumType(['local', 'cascaded']).optional(), +}).strict(); + +export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict().merge(viewMeta); +// type SquasherViewMeta = Omit, 'definer'>; + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('mysql'); + +const schemaHash = object({ + id: string(), + prevIds: zArray(string()), +}); + +const schemaHashV6 = object({ + id: string(), + prevId: string(), +}); + +export const schemaInternalV3 = object({ + version: literal('3'), + dialect: dialect, + tables: record(string(), tableV3), +}).strict(); + +export const schemaInternalV4 = object({ + version: literal('4'), + dialect: dialect, + tables: record(string(), tableV4), + schemas: record(string(), string()), +}).strict(); + +export const schemaInternalV5 = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaInternal = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + views: record(string(), view).default({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaV3 = schemaInternalV3.merge(schemaHash); +export const schemaV4 = schemaInternalV4.merge(schemaHash); +export const schemaV5 = schemaInternalV5.merge(schemaHash); +export const schemaV6 = schemaInternal.merge(schemaHashV6); +export const schema = schemaInternal.merge(schemaHash); + +export type Table = TypeOf; +export type Column = TypeOf; +export type SchemaV4 = TypeOf; +export type SchemaV5 = TypeOf; +export type SchemaV6 = TypeOf; +export type Schema = TypeOf; + +const tableSquashedV4 = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), +}).strict(); + +const tableSquashed = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), +}).strict(); + +const viewSquashed = view.omit({ + algorithm: true, + sqlSecurity: true, + withCheckOption: true, +}).extend({ meta: string() }); + +export const schemaSquashed = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), tableSquashed), + views: record(string(), viewSquashed), +}).strict(); + +export const schemaSquashedV4 = object({ + version: literal('4'), + dialect: dialect, + tables: record(string(), tableSquashedV4), + schemas: record(string(), string()), +}).strict(); + +export const mysqlSchema = schema; +export const mysqlSchemaV3 = schemaV3; +export const mysqlSchemaV4 = schemaV4; +export const mysqlSchemaV5 = schemaV5; +export const mysqlSchemaSquashed = schemaSquashed; +export type MysqlSchemaV6 = SchemaV6; +export type MysqlSchema = Schema; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['6'], + dialect: ['mysql'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => ddl.entities.validate(it)), + renames: array((_) => true), +}); + +export type MysqlSnapshot = typeof snapshotValidator.shape; + +export const toJsonSnapshot = (ddl: MysqlDDL, prevIds: string[], renames: string[]): MysqlSnapshot => { + return { dialect: 'mysql', id: randomUUID(), prevIds, version: '6', ddl: ddl.entities.list(), renames }; +}; + +export const drySnapshot = snapshotValidator.strict( + { + version: '6', + dialect: 'mysql', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], + } satisfies MysqlSnapshot, +); diff --git a/drizzle-kit/src/dialects/mysql/statements.ts b/drizzle-kit/src/dialects/mysql/statements.ts new file mode 100644 index 0000000000..555f0cc053 --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/statements.ts @@ -0,0 +1,153 @@ +import type { Simplify } from '../../utils'; +import type { CheckConstraint, Column, DiffEntities, ForeignKey, Index, PrimaryKey, TableFull, View } from './ddl'; + +export interface CreateTable { + type: 'create_table'; + table: TableFull; +} + +export interface DropTable { + type: 'drop_table'; + table: string; +} +export interface RenameTable { + type: 'rename_table'; + from: string; + to: string; +} + +export interface AddColumn { + type: 'add_column'; + column: Column; + isPK: boolean; +} + +export interface DropColumn { + type: 'drop_column'; + column: Column; +} + +export interface RenameColumn { + type: 'rename_column'; + table: string; + from: string; + to: string; +} + +export interface AlterColumn { + type: 'alter_column'; + diff: DiffEntities['columns']; + column: Column; + isPK: boolean; + wasPK: boolean; + origin: { + column: string; + table: string; + }; +} + +export interface RecreateColumn { + type: 'recreate_column'; + column: Column; + isPK: boolean; + diff: DiffEntities['columns']; +} + +export interface CreateIndex { + type: 'create_index'; + index: Index; +} + +export interface DropIndex { + type: 'drop_index'; + index: Index; +} + +export interface CreateFK { + type: 'create_fk'; + fk: ForeignKey; +} + +export interface CreatePK { + type: 'create_pk'; + pk: PrimaryKey; +} + +export interface CreatePK { + type: 'create_pk'; + pk: PrimaryKey; +} + +export interface DropPK { + type: 'drop_pk'; + pk: PrimaryKey; +} + +export interface DropConstraint { + type: 'drop_constraint'; + table: string; + constraint: string; + dropAutoIndex: boolean; +} + +export interface CreateView { + type: 'create_view'; + view: View; + replace: boolean; +} + +export interface DropView { + type: 'drop_view'; + name: string; +} + +export interface RenameView { + type: 'rename_view'; + from: string; + to: string; +} + +export interface AlterView { + type: 'alter_view'; + diff: DiffEntities['views']; + view: View; +} + +export interface CreateCheck { + type: 'create_check'; + check: CheckConstraint; +} + +export type JsonStatement = + | CreateTable + | DropTable + | RenameTable + | AddColumn + | DropColumn + | RenameColumn + | AlterColumn + | RecreateColumn + | CreateIndex + | DropIndex + | CreateFK + | CreatePK + | DropPK + | CreateView + | DropView + | RenameView + | AlterView + | DropConstraint + | CreateCheck; + +export const prepareStatement = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + args: Omit, +): Simplify => { + return { + type, + ...args, + } as TStatement; +}; diff --git a/drizzle-kit/src/dialects/mysql/typescript.ts b/drizzle-kit/src/dialects/mysql/typescript.ts new file mode 100644 index 0000000000..fc201852a5 --- /dev/null +++ b/drizzle-kit/src/dialects/mysql/typescript.ts @@ -0,0 +1,435 @@ +import { toCamelCase } from 'drizzle-orm/casing'; +import type { Casing } from 'src/cli/validations/common'; +import { assertUnreachable } from '../../utils'; +import { inspect } from '../utils'; +import type { CheckConstraint, Column, ForeignKey, Index, MysqlDDL, PrimaryKey, ViewColumn } from './ddl'; +import { Enum, parseEnum, typeFor } from './grammar'; + +export const imports = [ + 'boolean', + 'tinyint', + 'smallint', + 'mediumint', + 'int', + 'bigint', + 'binary', + 'char', + 'date', + 'datetime', + 'decimal', + 'double', + 'float', + 'json', + 'real', + 'serial', + 'text', + 'tinytext', + 'mediumtext', + 'longtext', + 'time', + 'timestamp', + 'varbinary', + 'varchar', + 'year', + 'mysqlEnum', + 'singlestoreEnum', + 'customType', + 'mediumblob', + 'blob', + 'tinyblob', + 'longblob', + // TODO: add new type BSON + // TODO: add new type UUID + // TODO: add new type GUID + // TODO: add new type Vector + // TODO: add new type GeoPoint +] as const; +export type Import = typeof imports[number]; + +const mysqlImportsList = new Set([ + 'mysqlTable', + 'singlestoreTable', + ...imports, +]); + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const prepareCasing = (casing?: Casing) => (value: string) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(toCamelCase(value)); + } + + assertUnreachable(casing); +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +export const ddlToTypeScript = ( + ddl: MysqlDDL, + viewColumns: ViewColumn[], + casing: Casing, + vendor: 'mysql' | 'singlestore', +) => { + const withCasing = prepareCasing(casing); + + for (const fk of ddl.fks.list()) { + const relation = `${fk.table}-${fk.tableTo}`; + relations.add(relation); + } + + const imports = new Set([ + vendor === 'mysql' ? 'mysqlTable' : 'signlestoreTable', + vendor === 'mysql' ? 'mysqlSchema' : 'singlestoreSchema', + vendor === 'mysql' ? 'AnyMySqlColumn' : 'AnySinsgleStoreColumn', + ]); + + const viewEntities = viewColumns.map((it) => { + return { + entityType: 'viewColumn', + ...it, + } as const; + }); + + for (const it of [...ddl.entities.list(), ...viewEntities]) { + if (it.entityType === 'indexes') imports.add(it.isUnique ? 'uniqueIndex' : 'index'); + if (it.entityType === 'fks') imports.add('foreignKey'); + if (it.entityType === 'pks' && (it.columns.length > 1)) imports.add('primaryKey'); + if (it.entityType === 'checks') imports.add('check'); + if (it.entityType === 'views') imports.add(vendor === 'mysql' ? 'mysqlView' : 'singlestoreView'); + + if (it.entityType === 'columns' || it.entityType === 'viewColumn') { + const grammarType = typeFor(it.type); + imports.add(grammarType.drizzleImport(vendor)); + if (mysqlImportsList.has(it.type)) imports.add(it.type); + } + } + + const tableStatements = [] as string[]; + for (const table of ddl.tables.list()) { + let statement = `export const ${withCasing(table.name)} = ${vendor}Table("${table.name}", {\n`; + statement += createTableColumns( + ddl.columns.list({ table: table.name }), + ddl.pks.one({ table: table.name }), + ddl.fks.list({ table: table.name }), + withCasing, + casing, + vendor, + ); + statement += '}'; + + const fks = ddl.fks.list({ table: table.name }); + const indexes = ddl.indexes.list({ table: table.name }); + const checks = ddl.checks.list({ table: table.name }); + const pk = ddl.pks.one({ table: table.name }); + + // more than 2 fields or self reference or cyclic + const filteredFKs = fks.filter((it) => { + return it.columns.length > 1 || isSelf(it) || isCyclic(it) || it.nameExplicit; + }); + + const hasIndexes = indexes.length > 0; + const hasFKs = filteredFKs.length > 0; + const hasPK = pk && pk.columns.length > 1; + const hasChecks = checks.length > 0; + const hasCallbackParams = hasIndexes || hasFKs || hasPK || hasChecks; + + if (hasCallbackParams) { + statement += ',\n'; + statement += '(table) => [\n'; + statement += hasPK ? createTablePK(pk, withCasing) : ''; + statement += createTableIndexes(indexes, withCasing); + statement += createTableFKs(filteredFKs, withCasing); + statement += createTableChecks(checks); + statement += ']'; + } + + statement += ');'; + + tableStatements.push(statement); + } + + const viewsStatements = [] as string[]; + for (const view of ddl.views.list()) { + const { name, algorithm, definition, sqlSecurity, withCheckOption } = view; + const columns = viewColumns.filter((x) => x.view === view.name); + + let statement = ''; + statement += `export const ${withCasing(name)} = ${vendor}View("${name}", {\n`; + statement += createViewColumns(columns, withCasing, casing, vendor); + statement += '})'; + + statement += algorithm ? `.algorithm("${algorithm}")` : ''; + statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; + statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; + statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; + + viewsStatements.push(statement); + } + + const importsTs = `import { ${ + [...imports].join( + ', ', + ) + } } from "drizzle-orm/${vendor}-core"\nimport { sql } from "drizzle-orm"\n\n`; + + let decalrations = ''; + decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements.join('\n\n'); + + const file = importsTs + decalrations; + + const schemaEntry = ` + { + ${ + Object.values(ddl.tables) + .map((it) => withCasing(it.name)) + .join(',') + } + } + `; + + return { + file, // backward compatible, print to file + imports: importsTs, + decalrations, + schemaEntry, + }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.table}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.table}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.table === fk.tableTo; +}; + +const column = ( + type: string, + name: string, + casing: (value: string) => string, + rawCasing: Casing, + defaultValue: Column['default'], + autoincrement: boolean, + onUpdateNow: Column['onUpdateNow'], + onUpdateNowFsp: Column['onUpdateNowFsp'], + collation: Column['collation'], + charSet: Column['charSet'], + vendor: 'mysql' | 'singlestore', +) => { + let lowered = type.startsWith('enum(') ? type : type.toLowerCase(); + if (lowered.startsWith('enum')) { + const values = parseEnum(lowered).map((it) => `"${it.replaceAll("''", "'").replaceAll('"', '\\"')}"`).join(','); + let out = `${casing(name)}: ${vendor}Enum(${dbColumnName({ name, casing: rawCasing, withMode: true })}[${values}])`; + + const { default: def } = Enum.toTs('', defaultValue) as any; + out += def ? `.default(${def})` : ''; + out += charSet ? `.charSet("${charSet}")` : ''; + out += collation ? `.collate("${collation}")` : ''; + + return out; + } + + if (lowered === 'serial') { + return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; + } + + const grammarType = typeFor(lowered); + const key = casing(name); + const columnName = dbColumnName({ name, casing: rawCasing }); + const ts = grammarType.toTs(lowered, defaultValue); + const { default: def, options, customType } = typeof ts === 'string' ? { default: ts, options: {} } : ts; + + const drizzleType = grammarType.drizzleImport(); + const defaultStatement = def ? def.startsWith('.') ? def : `.default(${def})` : ''; + const paramsString = inspect(options); + const comma = columnName && paramsString ? ', ' : ''; + + let res = `${key}: ${drizzleType}${ + customType ? `({ dataType: () => '${customType}' })` : '' + }(${columnName}${comma}${paramsString})`; + res += autoincrement ? `.autoincrement()` : ''; + res += defaultStatement; + res += onUpdateNow ? `.onUpdateNow(${onUpdateNowFsp ? '{ fsp: ' + onUpdateNowFsp + ' }' : ''})` : ''; + res += charSet ? `.charSet("${charSet}")` : ''; + res += collation ? `.collate("${collation}")` : ''; + + return res; +}; + +const createTableColumns = ( + columns: Column[], + pk: PrimaryKey | null, + fks: ForeignKey[], + casing: (val: string) => string, + rawCasing: Casing, + vendor: 'mysql' | 'singlestore', +): string => { + let statement = ''; + + for (const it of columns) { + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === it.name; + + statement += '\t'; + statement += column( + it.type, + it.name, + casing, + rawCasing, + it.default, + it.autoIncrement, + it.onUpdateNow, + it.onUpdateNowFsp, + it.collation, + it.charSet, + vendor, + ); + + statement += isPK ? '.primaryKey()' : ''; + statement += it.notNull && !isPK ? '.notNull()' : ''; + + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as.replace( + /`/g, + '\\`', + ) + }\`, { mode: "${it.generated.type}" })` + : ''; + + const columnFKs = fks.filter((x) => !x.nameExplicit && x.columns.length === 1 && x.columns[0] === it.name); + + for (const fk of columnFKs) { + const onDelete = fk.onDelete !== 'NO ACTION' ? fk.onDelete?.toLowerCase() : null; + const onUpdate = fk.onUpdate !== 'NO ACTION' ? fk.onUpdate?.toLowerCase() : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(fk) ? vendor === 'mysql' ? ': AnyMySqlColumn' : ': AnySinsgleStoreColumn' : ''; + + const paramsStr = objToStatement2(params); + if (paramsStr) { + statement += `.references(()${typeSuffix} => ${ + casing( + fk.tableTo, + ) + }.${casing(fk.columnsTo[0])}, ${paramsStr} )`; + } else { + statement += `.references(()${typeSuffix} => ${casing(fk.tableTo)}.${ + casing( + fk.columnsTo[0], + ) + })`; + } + } + statement += ',\n'; + } + + return statement; +}; + +const createViewColumns = ( + columns: ViewColumn[], + casing: (value: string) => string, + rawCasing: Casing, + vendor: 'mysql' | 'singlestore', +) => { + let statement = ''; + + for (const it of columns) { + statement += '\n'; + statement += column(it.type, it.name, casing, rawCasing, null, false, false, null, null, null, vendor); + statement += it.notNull ? '.notNull()' : ''; + statement += ',\n'; + } + return statement; +}; + +const createTableIndexes = ( + idxs: Index[], + casing: (value: string) => string, +): string => { + let statement = ''; + for (const it of idxs) { + const columns = it.columns.map((x) => + x.isExpression ? `sql\`${x.value.replaceAll('`', '\\`')}\`` : `table.${casing(x.value)}` + ).join(', '); + statement += it.isUnique ? '\tuniqueIndex(' : '\tindex('; + statement += `"${it.name}")`; + statement += `.on(${columns}),\n`; + } + return statement; +}; + +const createTableChecks = ( + checks: CheckConstraint[], +): string => { + let statement = ''; + + for (const it of checks) { + statement += `\tcheck("${it.name}", sql\`${it.value.replace(/`/g, '\\`')}\`),\n`; + } + + return statement; +}; + +const createTablePK = (pk: PrimaryKey, casing: (value: string) => string): string => { + const columns = pk.columns.map((x) => `table.${casing(x)}`).join(', '); + let statement = `\tprimaryKey({ columns: [${columns}] }),`; + return statement; +}; + +const createTableFKs = ( + fks: ForeignKey[], + casing: (value: string) => string, +): string => { + let statement = ''; + + for (const it of fks) { + const tableTo = isSelf(it) ? 'table' : `${casing(it.tableTo)}`; + const columnsFrom = it.columns.map((x) => `table.${casing(x)}`).join(', '); + const columnsTo = it.columnsTo.map((x) => `${tableTo}.${casing(x)}`).join(', '); + statement += `\tforeignKey({\n`; + statement += `\t\tcolumns: [${columnsFrom}],\n`; + statement += `\t\tforeignColumns: [${columnsTo}],\n`; + statement += `\t\tname: "${it.name}"\n`; + statement += `\t})`; + statement += it.onUpdate !== 'NO ACTION' ? `.onUpdate("${it.onUpdate?.toLowerCase()}")` : ''; + statement += it.onDelete !== 'NO ACTION' ? `.onDelete("${it.onDelete?.toLowerCase()}")` : ''; + statement += `,\n`; + } + + return statement; +}; diff --git a/drizzle-kit/src/dialects/postgres/aws-introspect.ts b/drizzle-kit/src/dialects/postgres/aws-introspect.ts new file mode 100644 index 0000000000..1e513a3d39 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/aws-introspect.ts @@ -0,0 +1,1238 @@ +import camelcase from 'camelcase'; +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import { type DB, splitExpressions, trimChar } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + Enum, + ForeignKey, + Index, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import { + defaultForColumn, + isSerialExpression, + isSystemNamespace, + parseOnType, + parseViewDefinition, + stringFromDatabaseIdentityProperty as parseIdentityProperty, + wrapRecord, +} from './grammar'; + +export const fromDatabase = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +): Promise => { + const schemas: Schema[] = []; + const enums: Enum[] = []; + const tables: PostgresEntities['tables'][] = []; + const columns: InterimColumn[] = []; + const indexes: InterimIndex[] = []; + const pks: PrimaryKey[] = []; + const fks: ForeignKey[] = []; + const uniques: UniqueConstraint[] = []; + const checks: CheckConstraint[] = []; + const sequences: Sequence[] = []; + const roles: Role[] = []; + const privileges: Privilege[] = []; + const policies: Policy[] = []; + const views: View[] = []; + const viewColumns: ViewColumn[] = []; + + // type OP = { + // oid: string; + // name: string; + // default: boolean; + // }; + + type Namespace = { + oid: string; + name: string; + }; + + // ! Use `pg_catalog` for system functions + + // TODO: potential improvements + // use pg_catalog.has_table_privilege(pg_class.oid, 'SELECT') for tables + // --- default access method + // SHOW default_table_access_method; + // SELECT current_setting('default_table_access_method') AS default_am; + + const accessMethodsQuery = db.query<{ oid: string; name: string }>( + `SELECT oid, amname as name FROM pg_catalog.pg_am WHERE amtype OPERATOR(pg_catalog.=) 't' ORDER BY pg_catalog.lower(amname);`, + ).then((rows) => { + queryCallback('accessMethods', rows, null); + return rows; + }).catch((error) => { + queryCallback('accessMethods', [], error); + throw error; + }); + + const tablespacesQuery = db.query<{ + oid: string; + name: string; + }>( + `SELECT oid, spcname as "name" FROM pg_catalog.pg_tablespace ORDER BY pg_catalog.lower(spcname)`, + ).then((rows) => { + queryCallback('tablespaces', rows, null); + return rows; + }).catch((error) => { + queryCallback('tablespaces', [], error); + throw error; + }); + + const namespacesQuery = db.query( + 'SELECT oid, nspname as name FROM pg_catalog.pg_namespace ORDER BY pg_catalog.lower(nspname)', + ) + .then((rows) => { + queryCallback('namespaces', rows, null); + return rows; + }).catch((error) => { + queryCallback('namespaces', [], error); + throw error; + }); + + const defaultsQuery = db.query<{ + tableId: string; + ordinality: number; + expression: string; + }>(` + SELECT + adrelid AS "tableId", + adnum AS "ordinality", + pg_catalog.pg_get_expr(adbin, adrelid) AS "expression" + FROM + pg_catalog.pg_attrdef; + `).then((rows) => { + queryCallback('defaults', rows, null); + return rows; + }).catch((error) => { + queryCallback('defaults', [], error); + throw error; + }); + + const [ams, tablespaces, namespaces, defaultsList] = await Promise.all([ + accessMethodsQuery, + tablespacesQuery, + namespacesQuery, + defaultsQuery, + ]); + + const { other } = namespaces.reduce<{ system: Namespace[]; other: Namespace[] }>( + (acc, it) => { + if (isSystemNamespace(it.name)) { + acc.system.push(it); + } else { + acc.other.push(it); + } + return acc; + }, + { system: [], other: [] }, + ); + + const filteredNamespaces = other.filter((it) => filter({ type: 'schema', name: it.name })); + const filteredNamespacesStringForSQL = filteredNamespaces.map((ns) => `'${ns.name}'`).join(','); + + schemas.push(...filteredNamespaces.map((it) => ({ entityType: 'schemas', name: it.name }))); + + type TableListItem = { + oid: string; + schema: string; + name: string; + /* r - table, p - partitioned table, v - view, m - materialized view */ + kind: 'r' | 'p' | 'v' | 'm'; + accessMethod: string; + options: string[] | null; + rlsEnabled: boolean; + tablespaceid: string; + definition: string | null; + }; + const tablesList = filteredNamespacesStringForSQL + ? await db + .query(` + SELECT + pg_class.oid, + nspname as "schema", + relname AS "name", + relkind::text AS "kind", + relam as "accessMethod", + reloptions::text[] as "options", + reltablespace as "tablespaceid", + relrowsecurity AS "rlsEnabled", + CASE + WHEN relkind OPERATOR(pg_catalog.=) 'v' OR relkind OPERATOR(pg_catalog.=) 'm' + THEN pg_catalog.pg_get_viewdef(pg_class.oid, true) + ELSE null + END AS "definition" + FROM + pg_catalog.pg_class + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) relnamespace + WHERE + relkind IN ('r', 'p', 'v', 'm') + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('tables', rows, null); + return rows; + }).catch((error) => { + queryCallback('tables', [], error); + throw error; + }) + : [] as TableListItem[]; + + const viewsList = tablesList.filter((it) => it.kind === 'v' || it.kind === 'm'); + + const filteredTables = tablesList.filter((it) => { + if (!((it.kind === 'r' || it.kind === 'p') && filter({ type: 'table', schema: it.schema, name: it.name }))) { + return false; + } + it.schema = trimChar(it.schema, '"'); // when camel case name e.x. mySchema -> it gets wrapped to "mySchema" + return true; + }); + + const filteredTableIds = filteredTables.map((it) => it.oid); + const viewsIds = viewsList.map((it) => it.oid); + const filteredViewsAndTableIds = [...filteredTableIds, ...viewsIds]; + + const filterByTableIds = filteredTableIds.length > 0 ? `(${filteredTableIds.join(',')})` : ''; + const filterByTableAndViewIds = filteredViewsAndTableIds.length > 0 ? `(${filteredViewsAndTableIds.join(',')})` : ''; + + for (const table of filteredTables) { + tables.push({ + entityType: 'tables', + schema: trimChar(table.schema, "'"), + name: table.name, + isRlsEnabled: table.rlsEnabled, + }); + } + + const dependQuery = db.query<{ + oid: string; + tableId: string; + ordinality: number; + + /* + a - An “auto” dependency means the dependent object can be dropped separately, + and will be automatically removed if the referenced object is dropped—regardless of CASCADE or RESTRICT. + Example: A named constraint on a table is auto-dependent on the table, so it vanishes when the table is dropped + + i - An “internal” dependency marks objects that were created as part of building another object. + Directly dropping the dependent is disallowed—you must drop the referenced object instead. + Dropping the referenced object always cascades to the dependent + Example: A trigger enforcing a foreign-key constraint is internally dependent on its pg_constraint entry + */ + deptype: 'a' | 'i'; + }>( + `SELECT + objid as oid, + refobjid as "tableId", + refobjsubid as "ordinality", + deptype::text + FROM + pg_catalog.pg_depend + where ${filterByTableIds ? ` refobjid IN ${filterByTableIds}` : 'false'};`, + ).then((rows) => { + queryCallback('depend', rows, null); + return rows; + }).catch((error) => { + queryCallback('depend', [], error); + throw error; + }); + + type EnumListItem = { + oid: string; + name: string; + schema: string; + arrayTypeId: number; + ordinality: number; + value: string; + }; + const enumsQuery = filteredNamespacesStringForSQL + ? db + .query(`SELECT + pg_type.oid as "oid", + typname as "name", + nspname as "schema", + pg_type.typarray as "arrayTypeId", + pg_enum.enumsortorder AS "ordinality", + pg_enum.enumlabel AS "value" + FROM + pg_catalog.pg_type + JOIN pg_catalog.pg_enum ON pg_enum.enumtypid OPERATOR(pg_catalog.=) pg_type.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_type.typnamespace + WHERE + pg_type.typtype OPERATOR(pg_catalog.=) 'e' + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_type.oid, pg_enum.enumsortorder + `).then((rows) => { + queryCallback('enums', rows, null); + return rows; + }).catch((error) => { + queryCallback('enums', [], error); + throw error; + }) + : [] as EnumListItem[]; + + // fetch for serials, adrelid = tableid + const serialsQuery = db + .query<{ + oid: string; + tableId: string; + ordinality: number; + expression: string; + }>(`SELECT + oid, + adrelid as "tableId", + adnum as "ordinality", + pg_catalog.pg_get_expr(adbin, adrelid) as "expression" + FROM + pg_catalog.pg_attrdef + WHERE ${filterByTableIds ? ` adrelid IN ${filterByTableIds}` : 'false'} + `).then((rows) => { + queryCallback('serials', rows, null); + return rows; + }).catch((error) => { + queryCallback('serials', [], error); + throw error; + }); + + type SequenceListItem = { + schema: string; + oid: string; + name: string; + startWith: string; + minValue: string; + maxValue: string; + incrementBy: string; + cycle: boolean; + cacheSize: number; + }; + const sequencesQuery = filteredNamespacesStringForSQL + ? db.query(`SELECT + nspname as "schema", + relname as "name", + seqrelid as "oid", + seqstart as "startWith", + seqmin as "minValue", + seqmax as "maxValue", + seqincrement as "incrementBy", + seqcycle as "cycle", + seqcache as "cacheSize" + FROM pg_catalog.pg_sequence + JOIN pg_catalog.pg_class ON pg_sequence.seqrelid OPERATOR(pg_catalog.=) pg_class.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_class.relnamespace + WHERE nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('sequences', rows, null); + return rows; + }).catch((error) => { + queryCallback('sequences', [], error); + throw error; + }) + : [] as SequenceListItem[]; + + // I'm not yet aware of how we handle policies down the pipeline for push, + // and since postgres does not have any default policies, we can safely fetch all of them for now + // and filter them out in runtime, simplifying filterings + const policiesQuery = db.query< + { + schema: string; + table: string; + name: string; + as: Policy['as']; + to: string | string[]; + for: Policy['for']; + using: string | undefined | null; + withCheck: string | undefined | null; + } + >(`SELECT + schemaname as "schema", + tablename as "table", + policyname as "name", + permissive as "as", + roles as "to", + cmd as "for", + qual as "using", + with_check as "withCheck" + FROM pg_catalog.pg_policies + ORDER BY + pg_catalog.lower(schemaname), + pg_catalog.lower(tablename), + pg_catalog.lower(policyname); + `).then((rows) => { + queryCallback('policies', rows, null); + return rows; + }).catch((error) => { + queryCallback('policies', [], error); + throw error; + }); + + const rolesQuery = db.query< + { + rolname: string; + rolsuper: boolean; + rolinherit: boolean; + rolcreaterole: boolean; + rolcreatedb: boolean; + rolcanlogin: boolean; + rolreplication: boolean; + rolconnlimit: number; + rolvaliduntil: string | null; + rolbypassrls: boolean; + } + >( + `SELECT + rolname, + rolsuper, + rolinherit, + rolcreaterole, + rolcreatedb, + rolcanlogin, + rolreplication, + rolconnlimit, + rolvaliduntil::text, + rolbypassrls + FROM pg_catalog.pg_roles + ORDER BY pg_catalog.lower(rolname);`, + ).then((rows) => { + queryCallback('roles', rows, null); + return rows; + }).catch((error) => { + queryCallback('roles', [], error); + throw error; + }); + + type PrivilegeListItem = { + grantor: string; + grantee: string; + schema: string; + table: string; + type: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | 'TRUNCATE' | 'REFERENCES' | 'TRIGGER'; + isGrantable: boolean; + }; + const privilegesQuery = filteredNamespacesStringForSQL + ? db.query(` + SELECT + grantor, + grantee, + table_schema AS "schema", + table_name AS "table", + privilege_type AS "type", + CASE is_grantable WHEN 'YES' THEN true ELSE false END AS "isGrantable" + FROM information_schema.role_table_grants + WHERE table_schema IN (${filteredNamespacesStringForSQL}) + ORDER BY + pg_catalog.lower(table_schema), + pg_catalog.lower(table_name), + pg_catalog.lower(grantee); + `).then((rows) => { + queryCallback('privileges', rows, null); + return rows; + }).catch((error) => { + queryCallback('privileges', [], error); + throw error; + }) + : [] as PrivilegeListItem[]; + + const constraintsQuery = db.query<{ + oid: string; + schemaId: string; + tableId: string; + name: string; + type: 'p' | 'u' | 'f' | 'c'; // p - primary key, u - unique, f - foreign key, c - check + definition: string; + indexId: number; + columnsOrdinals: number[]; + tableToId: string; + columnsToOrdinals: number[]; + onUpdate: 'a' | 'd' | 'r' | 'c' | 'n'; + onDelete: 'a' | 'd' | 'r' | 'c' | 'n'; + }>(` + SELECT + oid, + connamespace AS "schemaId", + conrelid AS "tableId", + conname AS "name", + contype::text AS "type", + pg_catalog.pg_get_constraintdef(oid) AS "definition", + conindid AS "indexId", + conkey AS "columnsOrdinals", + confrelid AS "tableToId", + confkey AS "columnsToOrdinals", + confupdtype::text AS "onUpdate", + confdeltype::text AS "onDelete" + FROM + pg_catalog.pg_constraint + WHERE ${filterByTableIds ? ` conrelid IN ${filterByTableIds}` : 'false'} + ORDER BY conrelid, contype, pg_catalog.lower(conname); + `).then((rows) => { + queryCallback('constraints', rows, null); + return rows; + }).catch((error) => { + queryCallback('constraints', [], error); + throw error; + }); + + type ColumnMetadata = { + seqId: string | null; + generation: string | null; + start: string | null; + increment: string | null; + max: string | null; + min: string | null; + cycle: string; + generated: 'ALWAYS' | 'BY DEFAULT'; + expression: string | null; + }; + + // for serials match with pg_attrdef via attrelid(tableid)+adnum(ordinal position), for enums with pg_enum above + const columnsQuery = db.query<{ + tableId: string; + kind: 'r' | 'p' | 'v' | 'm'; + name: string; + ordinality: number; + notNull: boolean; + type: string; + dimensions: number; + typeId: string; + /* s - stored */ + generatedType: 's' | ''; + /* + 'a' for GENERATED ALWAYS + 'd' for GENERATED BY DEFAULT + */ + identityType: 'a' | 'd' | ''; + metadata: string | null; + }>(`SELECT + attrelid AS "tableId", + relkind::text AS "kind", + attname AS "name", + attnum AS "ordinality", + attnotnull AS "notNull", + attndims as "dimensions", + atttypid as "typeId", + attgenerated::text as "generatedType", + attidentity::text as "identityType", + pg_catalog.format_type(atttypid, atttypmod) as "type", + CASE + WHEN attidentity IN ('a', 'd') or attgenerated OPERATOR(pg_catalog.=) 's' THEN ( + SELECT + pg_catalog.row_to_json(c.*) + FROM + ( + SELECT + pg_catalog.pg_get_serial_sequence('"' OPERATOR(pg_catalog.||) "table_schema" OPERATOR(pg_catalog.||) '"."' OPERATOR(pg_catalog.||) "table_name" OPERATOR(pg_catalog.||) '"', "attname")::regclass::oid as "seqId", + "identity_generation" AS generation, + "identity_start" AS "start", + "identity_increment" AS "increment", + "identity_maximum" AS "max", + "identity_minimum" AS "min", + "identity_cycle" AS "cycle", + "generation_expression" AS "expression" + FROM + information_schema.columns c + WHERE + c.column_name OPERATOR(pg_catalog.=) attname + AND c.table_schema OPERATOR(pg_catalog.=) nspname + AND c.table_name OPERATOR(pg_catalog.=) cls.relname + ) c + ) + ELSE NULL + END AS "metadata" + FROM + pg_catalog.pg_attribute attr + JOIN pg_catalog.pg_class cls ON cls.oid OPERATOR(pg_catalog.=) attr.attrelid + JOIN pg_catalog.pg_namespace nsp ON nsp.oid OPERATOR(pg_catalog.=) cls.relnamespace + WHERE + ${filterByTableAndViewIds ? ` attrelid IN ${filterByTableAndViewIds}` : 'false'} + AND attnum OPERATOR(pg_catalog.>) 0 + AND attisdropped OPERATOR(pg_catalog.=) FALSE + ORDER BY attnum; + `).then((rows) => { + queryCallback('columns', rows, null); + return rows; + }).catch((error) => { + queryCallback('columns', [], error); + throw error; + }); + + const [ + dependList, + enumsList, + serialsList, + sequencesList, + policiesList, + rolesList, + privilegesList, + constraintsList, + columnsList, + ] = await Promise + .all([ + dependQuery, + enumsQuery, + serialsQuery, + sequencesQuery, + policiesQuery, + rolesQuery, + privilegesQuery, + constraintsQuery, + columnsQuery, + ]); + + const groupedEnums = enumsList.reduce((acc, it) => { + if (!(it.oid in acc)) { + acc[it.oid] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.oid].values.push(it.value); + } + return acc; + }, {} as Record); + + const groupedArrEnums = enumsList.reduce((acc, it) => { + if (!(it.arrayTypeId in acc)) { + acc[it.arrayTypeId] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.arrayTypeId].values.push(it.value); + } + return acc; + }, {} as Record); + + for (const it of Object.values(groupedEnums)) { + enums.push({ + entityType: 'enums', + schema: it.schema, + name: it.name, + values: it.values, + }); + } + + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let tableCount = 0; + let checksCount = 0; + let viewsCount = 0; + + for (const seq of sequencesList) { + const depend = dependList.find((it) => it.oid === seq.oid); + + if (depend && (depend.deptype === 'a' || depend.deptype === 'i')) { + // TODO: add type field to sequence in DDL + // skip fo sequences or identity columns + // console.log('skip for auto created', seq.name); + continue; + } + + sequences.push({ + entityType: 'sequences', + schema: seq.schema, + name: seq.name, + startWith: parseIdentityProperty(seq.startWith), + minValue: parseIdentityProperty(seq.minValue), + maxValue: parseIdentityProperty(seq.maxValue), + incrementBy: parseIdentityProperty(seq.incrementBy), + cycle: seq.cycle, + cacheSize: Number(parseIdentityProperty(seq.cacheSize) ?? 1), + }); + } + + progressCallback('enums', Object.keys(groupedEnums).length, 'done'); + + // TODO: drizzle link + const filteredRoles = rolesList.filter((x) => filter({ type: 'role', name: x.rolname })); + for (const dbRole of filteredRoles) { + roles.push({ + entityType: 'roles', + name: dbRole.rolname, + superuser: dbRole.rolsuper, + inherit: dbRole.rolinherit, + createRole: dbRole.rolcreatedb, + createDb: dbRole.rolcreatedb, + canLogin: dbRole.rolcanlogin, + replication: dbRole.rolreplication, + connLimit: dbRole.rolconnlimit, + password: null, + validUntil: dbRole.rolvaliduntil, + bypassRls: dbRole.rolbypassrls, + }); + } + + for (const privilege of privilegesList) { + privileges.push({ + entityType: 'privileges', + // TODO: remove name and implement custom pk + name: `${privilege.grantor}_${privilege.grantee}_${privilege.schema}_${privilege.table}_${privilege.type}`, + grantor: privilege.grantor, + grantee: privilege.grantee, + schema: privilege.schema, + table: privilege.table, + type: privilege.type, + isGrantable: privilege.isGrantable, + }); + } + + for (const it of policiesList) { + policies.push({ + entityType: 'policies', + schema: it.schema, + table: it.table, + name: it.name, + as: it.as, + for: it.for, + roles: typeof it.to === 'string' ? it.to.slice(1, -1).split(',') : it.to, + using: it.using ?? null, + withCheck: it.withCheck ?? null, + }); + } + + progressCallback('policies', policiesList.length, 'done'); + + type DBColumn = (typeof columnsList)[number]; + + // supply serials + for (const column of columnsList.filter((x) => x.kind === 'r' || x.kind === 'p')) { + const type = column.type; + + if (!(type === 'smallint' || type === 'bigint' || type === 'integer')) { + continue; + } + + const expr = serialsList.find( + (it) => it.tableId === column.tableId && it.ordinality === column.ordinality, + ); + + if (expr) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + + const isSerial = isSerialExpression(expr.expression, table.schema); + column.type = isSerial ? type === 'bigint' ? 'bigserial' : type === 'integer' ? 'serial' : 'smallserial' : type; + } + } + + for (const column of columnsList.filter((x) => x.kind === 'r' || x.kind === 'p')) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + + // supply enums + const enumType = column.typeId in groupedEnums + ? groupedEnums[column.typeId] + : column.typeId in groupedArrEnums + ? groupedArrEnums[column.typeId] + : null; + + let columnTypeMapped = enumType ? enumType.name : column.type.replace('[]', ''); + + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace(' with time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + const columnDefault = defaultsList.find( + (it) => it.tableId === column.tableId && it.ordinality === column.ordinality, + ); + + const defaultValue = defaultForColumn( + columnTypeMapped, + columnDefault?.expression, + column.dimensions, + Boolean(enumType), + ); + + const unique = constraintsList.find((it) => { + return it.type === 'u' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const pk = constraintsList.find((it) => { + return it.type === 'p' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const metadata = column.metadata ? JSON.parse(column.metadata) as ColumnMetadata : null; + if (column.generatedType === 's' && (!metadata || !metadata.expression)) { + throw new Error( + `Generated ${table.schema}.${table.name}.${column.name} columns missing expression: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + if (column.identityType !== '' && !metadata) { + throw new Error( + `Identity ${table.schema}.${table.name}.${column.name} columns missing metadata: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + const sequence = metadata?.seqId ? sequencesList.find((it) => it.oid === metadata.seqId) ?? null : null; + + columnTypeMapped += '[]'.repeat(column.dimensions); + + columns.push({ + entityType: 'columns', + schema: table.schema, + table: table.name, + name: column.name, + type: columnTypeMapped, + typeSchema: enumType ? enumType.schema ?? 'public' : null, + dimensions: column.dimensions, + default: column.generatedType === 's' ? null : defaultValue, + unique: !!unique, + uniqueName: unique ? unique.name : null, + uniqueNullsNotDistinct: unique?.definition.includes('NULLS NOT DISTINCT') ?? false, + notNull: column.notNull, + pk: pk !== null, + pkName: pk !== null ? pk.name : null, + generated: column.generatedType === 's' ? { type: 'stored', as: metadata!.expression! } : null, + identity: column.identityType !== '' + ? { + type: column.identityType === 'a' ? 'always' : 'byDefault', + name: sequence?.name ?? '', + increment: parseIdentityProperty(metadata?.increment), + minValue: parseIdentityProperty(metadata?.min), + maxValue: parseIdentityProperty(metadata?.max), + startWith: parseIdentityProperty(metadata?.start), + cycle: metadata?.cycle === 'YES', + cache: Number(parseIdentityProperty(sequence?.cacheSize ?? 1)), + } + : null, + }); + } + + for (const unique of constraintsList.filter((it) => it.type === 'u')) { + const table = tablesList.find((it) => it.oid === unique.tableId)!; + const schema = namespaces.find((it) => it.oid === unique.schemaId)!; + + const columns = unique.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === unique.tableId && column.ordinality === it)!; + return column.name; + }); + + uniques.push({ + entityType: 'uniques', + schema: schema.name, + table: table.name, + name: unique.name, + nameExplicit: true, + columns, + nullsNotDistinct: unique.definition.includes('NULLS NOT DISTINCT'), + }); + } + + for (const pk of constraintsList.filter((it) => it.type === 'p')) { + const table = tablesList.find((it) => it.oid === pk.tableId)!; + const schema = namespaces.find((it) => it.oid === pk.schemaId)!; + + const columns = pk.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === pk.tableId && column.ordinality === it)!; + return column.name; + }); + + pks.push({ + entityType: 'pks', + schema: schema.name, + table: table.name, + name: pk.name, + columns, + nameExplicit: true, + }); + } + + for (const fk of constraintsList.filter((it) => it.type === 'f')) { + const table = tablesList.find((it) => it.oid === fk.tableId)!; + const schema = namespaces.find((it) => it.oid === fk.schemaId)!; + const tableTo = tablesList.find((it) => it.oid === fk.tableToId)!; + + const columns = fk.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableId && column.ordinality === it)!; + return column.name; + }); + + const columnsTo = fk.columnsToOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableToId && column.ordinality === it)!; + return column.name; + }); + + fks.push({ + entityType: 'fks', + schema: schema.name, + table: table.name, + name: fk.name, + nameExplicit: true, + columns, + tableTo: tableTo.name, + schemaTo: tableTo.schema, + columnsTo, + onUpdate: parseOnType(fk.onUpdate), + onDelete: parseOnType(fk.onDelete), + }); + } + + for (const check of constraintsList.filter((it) => it.type === 'c')) { + const table = tablesList.find((it) => it.oid === check.tableId)!; + const schema = namespaces.find((it) => it.oid === check.schemaId)!; + + checks.push({ + entityType: 'checks', + schema: schema.name, + table: table.name, + name: check.name, + value: check.definition, + }); + } + + type IndexMetadata = { + tableId: number; + expression: string | null; + where: string; + columnOrdinals: number[]; + opclasses: { oid: number; name: string; default: boolean }[]; + options: number[]; + isUnique: boolean; + isPrimary: boolean; + }; + + const idxs = await db.query<{ + oid: number; + schema: string; + name: string; + accessMethod: string; + with?: string[]; + metadata: string; + }>(` + SELECT + pg_class.oid, + nspname as "schema", + relname AS "name", + am.amname AS "accessMethod", + reloptions AS "with", + pg_catalog.row_to_json(metadata.*) as "metadata" + FROM + pg_catalog.pg_class + JOIN pg_catalog.pg_am am ON am.oid OPERATOR(pg_catalog.=) pg_class.relam + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) relnamespace + JOIN LATERAL ( + SELECT + pg_catalog.pg_get_expr(indexprs, indrelid) AS "expression", + pg_catalog.pg_get_expr(indpred, indrelid) AS "where", + indrelid::int AS "tableId", + indkey::int[] as "columnOrdinals", + indoption::int[] as "options", + indisunique as "isUnique", + indisprimary as "isPrimary", + array( + SELECT + pg_catalog.json_build_object( + 'oid', opclass.oid, + 'name', pg_am.amname, + 'default', pg_opclass.opcdefault + ) + FROM + pg_catalog.unnest(indclass) WITH ORDINALITY AS opclass(oid, ordinality) + JOIN pg_catalog.pg_opclass ON opclass.oid OPERATOR(pg_catalog.=) pg_opclass.oid + JOIN pg_catalog.pg_am ON pg_opclass.opcmethod OPERATOR(pg_catalog.=) pg_am.oid + ORDER BY opclass.ordinality + ) as "opclasses" + FROM + pg_catalog.pg_index + WHERE + pg_index.indexrelid OPERATOR(pg_catalog.=) pg_class.oid + ) metadata ON TRUE + WHERE + relkind OPERATOR(pg_catalog.=) 'i' + AND ${filterByTableIds ? `metadata."tableId" IN ${filterByTableIds}` : 'false'} + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('indexes', rows, null); + return rows; + }).catch((error) => { + queryCallback('indexes', [], error); + throw error; + }); + + for (const idx of idxs) { + const metadata = JSON.parse(idx.metadata) as IndexMetadata; + + // filter for drizzle only? + const forUnique = metadata.isUnique && constraintsList.some((x) => x.type === 'u' && x.indexId === idx.oid); + const forPK = metadata.isPrimary && constraintsList.some((x) => x.type === 'p' && x.indexId === idx.oid); + + const expr = splitExpressions(metadata.expression); + + const table = tablesList.find((it) => it.oid === String(metadata.tableId))!; + + const nonColumnsCount = metadata.columnOrdinals.reduce((acc, it) => { + if (it === 0) acc += 1; + return acc; + }, 0); + + if (expr.length !== nonColumnsCount) { + throw new Error( + `expression split doesn't match non-columns count: [${ + metadata.columnOrdinals.join( + ', ', + ) + }] '${metadata.expression}':${expr.length}:${nonColumnsCount}`, + ); + } + + const opts = metadata.options.map((it) => { + return { + descending: (it & 1) === 1, + nullsFirst: (it & 2) === 2, + }; + }); + + const res = [] as ( + & ( + | { type: 'expression'; value: string } + | { type: 'column'; value: DBColumn } + ) + & { options: (typeof opts)[number]; opclass: { name: string; default: boolean } } + )[]; + + let k = 0; + for (let i = 0; i < metadata.columnOrdinals.length; i++) { + const ordinal = metadata.columnOrdinals[i]; + if (ordinal === 0) { + res.push({ + type: 'expression', + value: expr[k], + options: opts[i], + opclass: metadata.opclasses[i], + }); + k += 1; + } else { + const column = columnsList.find((column) => { + return column.tableId === String(metadata.tableId) && column.ordinality === ordinal; + }); + if (!column) throw new Error(`missing column: ${metadata.tableId}:${ordinal}`); + + // ! options and opclass can be undefined when index have "INCLUDE" columns (columns from "INCLUDE" don't have options and opclass) + const options = opts[i] as typeof opts[number] | undefined; + const opclass = metadata.opclasses[i] as { name: string; default: boolean } | undefined; + if (options && opclass) { + res.push({ + type: 'column', + value: column, + options: opts[i], + opclass: metadata.opclasses[i], + }); + } + } + } + + const columns = res.map((it) => { + return { + asc: !it.options.descending, + nullsFirst: it.options.nullsFirst, + opclass: it.opclass.default ? null : { + name: it.opclass.name, + default: it.opclass.default, + }, + isExpression: it.type === 'expression', + value: it.type === 'expression' ? it.value : it.value.name, // column name + } satisfies Index['columns'][number]; + }); + + indexes.push({ + entityType: 'indexes', + schema: idx.schema, + table: table.name, + name: idx.name, + nameExplicit: true, + method: idx.accessMethod, + isUnique: metadata.isUnique, + with: idx.with?.join(', ') ?? '', + where: metadata.where, + columns: columns, + concurrently: false, + forUnique, + forPK, + }); + } + + progressCallback('columns', columnsCount, 'fetching'); + progressCallback('checks', checksCount, 'fetching'); + progressCallback('indexes', indexesCount, 'fetching'); + progressCallback('tables', tableCount, 'done'); + + for (const it of columnsList.filter((x) => x.kind === 'm' || x.kind === 'v')) { + const view = viewsList.find((x) => x.oid === it.tableId)!; + + const enumType = it.typeId in groupedEnums + ? groupedEnums[it.typeId] + : it.typeId in groupedArrEnums + ? groupedArrEnums[it.typeId] + : null; + + let columnTypeMapped = enumType ? enumType.name : it.type.replace('[]', ''); + columnTypeMapped = trimChar(columnTypeMapped, '"'); + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + columnTypeMapped += '[]'.repeat(it.dimensions); + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + const typeDimensions = it.type.split('[]').length - 1; + + viewColumns.push({ + schema: view.schema, + view: view.name, + name: it.name, + type: columnTypeMapped, + notNull: it.notNull, + dimensions: it.dimensions, + typeDimensions, + typeSchema: enumType ? enumType.schema : null, + }); + } + + for (const view of viewsList) { + if (!filter({ type: 'table', schema: view.schema, name: view.name })) continue; + tableCount += 1; + + const accessMethod = view.accessMethod === '0' ? null : ams.find((it) => it.oid === view.accessMethod); + const tablespace = view.tablespaceid === '0' ? null : tablespaces.find((it) => it.oid === view.tablespaceid)!.name; + + const definition = parseViewDefinition(view.definition); + const withOpts = wrapRecord( + view.options?.reduce((acc, it) => { + const opt = it.split('='); + if (opt.length !== 2) { + throw new Error(`Unexpected view option: ${it}`); + } + + const key = camelcase(opt[0].trim()); + const value = opt[1].trim(); + acc[key] = value; + return acc; + }, {} as Record) ?? {}, + ); + + const opts = { + checkOption: withOpts.literal('checkOption', ['local', 'cascaded']), + securityBarrier: withOpts.bool('securityBarrier'), + securityInvoker: withOpts.bool('securityInvoker'), + fillfactor: withOpts.num('fillfactor'), + toastTupleTarget: withOpts.num('toastTupleTarget'), + parallelWorkers: withOpts.num('parallelWorkers'), + autovacuumEnabled: withOpts.bool('autovacuumEnabled'), + vacuumIndexCleanup: withOpts.literal('vacuumIndexCleanup', ['auto', 'on', 'off']), + vacuumTruncate: withOpts.bool('vacuumTruncate'), + autovacuumVacuumThreshold: withOpts.num('autovacuumVacuumThreshold'), + autovacuumVacuumScaleFactor: withOpts.num('autovacuumVacuumScaleFactor'), + autovacuumVacuumCostDelay: withOpts.num('autovacuumVacuumCostDelay'), + autovacuumVacuumCostLimit: withOpts.num('autovacuumVacuumCostLimit'), + autovacuumFreezeMinAge: withOpts.num('autovacuumFreezeMinAge'), + autovacuumFreezeMaxAge: withOpts.num('autovacuumFreezeMaxAge'), + autovacuumFreezeTableAge: withOpts.num('autovacuumFreezeTableAge'), + autovacuumMultixactFreezeMinAge: withOpts.num('autovacuumMultixactFreezeMinAge'), + autovacuumMultixactFreezeMaxAge: withOpts.num('autovacuumMultixactFreezeMaxAge'), + autovacuumMultixactFreezeTableAge: withOpts.num('autovacuumMultixactFreezeTableAge'), + logAutovacuumMinDuration: withOpts.num('logAutovacuumMinDuration'), + userCatalogTable: withOpts.bool('userCatalogTable'), + }; + + const hasNonNullOpt = Object.values(opts).some((x) => x !== null); + views.push({ + entityType: 'views', + schema: view.schema, + name: view.name, + definition, + with: hasNonNullOpt ? opts : null, + materialized: view.kind === 'm', + tablespace, + using: accessMethod?.name ?? null, + withNoData: null, + }); + } + + // TODO: update counts! + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + return { + schemas, + tables, + enums, + columns, + indexes, + pks, + fks, + uniques, + checks, + sequences, + roles, + privileges, + policies, + views, + viewColumns, + } satisfies InterimSchema; +}; + +export const fromDatabaseForDrizzle = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +) => { + const res = await fromDatabase(db, filter, progressCallback); + res.schemas = res.schemas.filter((it) => it.name !== 'public'); + res.indexes = res.indexes.filter((it) => !it.forPK && !it.forUnique); + + return res; +}; diff --git a/drizzle-kit/src/dialects/postgres/commutativity.ts b/drizzle-kit/src/dialects/postgres/commutativity.ts new file mode 100644 index 0000000000..df5359017e --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/commutativity.ts @@ -0,0 +1,787 @@ +import { existsSync, readFileSync } from 'fs'; +import { dirname } from 'path'; +import { assertUnreachable } from 'src/utils'; +import { createDDL, type PostgresDDL } from './ddl'; +import { ddlDiffDry } from './diff'; +import { drySnapshot, type PostgresSnapshot } from './snapshot'; +import type { JsonStatement } from './statements'; + +export type BranchConflict = { + parentId: string; + parentPath?: string; + branchA: { headId: string; path: string; statement: JsonStatement }; + branchB: { headId: string; path: string; statement: JsonStatement }; +}; + +export type PostgresNonCommutativityReport = { + conflicts: BranchConflict[]; + leafNodes: string[]; // IDs of all leaf nodes (terminal nodes with no children) +}; + +type SnapshotNode = { + id: string; + prevIds: string[]; + path: string; // full path to snapshot.json + folderPath: string; // folder containing snapshot.json + raw: TSnapshot; +}; + +const footprintMap: Record = { + // Table operations + create_table: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + ], + drop_table: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + 'add_column', + 'drop_column', + 'alter_column', + 'recreate_column', + 'rename_column', + 'alter_rls', + 'create_index', + 'recreate_index', + ], + rename_table: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + ], + move_table: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + ], + remove_from_schema: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + ], + set_new_schema: [ + 'create_table', + 'drop_table', + 'rename_table', + 'move_table', + 'remove_from_schema', + 'set_new_schema', + ], + + // Column operations + add_column: ['add_column', 'alter_column', 'drop_column', 'rename_column', 'recreate_column'], + drop_column: ['add_column', 'drop_column', 'alter_column', 'rename_column', 'recreate_column'], + alter_column: ['add_column', 'drop_column', 'alter_column', 'rename_column', 'recreate_column'], + recreate_column: ['add_column', 'drop_column', 'alter_column', 'recreate_column', 'rename_column'], + rename_column: ['add_column', 'drop_column', 'alter_column', 'recreate_column', 'rename_column'], + + // Index operations + create_index: ['create_index', 'drop_index', 'rename_index'], + drop_index: ['create_index', 'drop_index', 'rename_index'], + rename_index: ['create_index', 'drop_index', 'rename_index'], + recreate_index: ['create_index', 'drop_index', 'rename_index'], + + // Primary key operations + add_pk: ['add_pk', 'drop_pk', 'alter_pk'], + drop_pk: ['add_pk', 'drop_pk', 'alter_pk'], + alter_pk: ['add_pk', 'drop_pk', 'alter_pk'], + + // Foreign key operations + create_fk: ['create_fk', 'drop_fk', 'recreate_fk'], + drop_fk: ['create_fk', 'drop_fk', 'recreate_fk'], + recreate_fk: ['create_fk', 'drop_fk', 'recreate_fk'], + + // Unique constraint operations + add_unique: ['add_unique', 'drop_unique', 'alter_unique'], + drop_unique: ['add_unique', 'drop_unique', 'alter_unique'], + alter_unique: ['add_unique', 'drop_unique', 'alter_unique'], + + // Check constraint operations + add_check: ['add_check', 'drop_check', 'alter_check'], + drop_check: ['add_check', 'drop_check', 'alter_check'], + alter_check: ['add_check', 'drop_check', 'alter_check'], + + // Constraint operations + rename_constraint: [ + 'rename_constraint', + 'add_pk', + 'drop_pk', + 'alter_pk', + 'add_unique', + 'drop_unique', + 'alter_unique', + 'add_check', + 'drop_check', + 'alter_check', + 'create_fk', + 'drop_fk', + 'recreate_fk', + ], + + // Enum operations + create_enum: ['create_enum', 'drop_enum', 'rename_enum', 'alter_enum', 'recreate_enum', 'move_enum'], + drop_enum: [ + 'create_enum', + 'drop_enum', + 'rename_enum', + 'alter_enum', + 'recreate_enum', + 'move_enum', + 'alter_type_drop_value', + ], + rename_enum: ['create_enum', 'drop_enum', 'rename_enum', 'alter_enum', 'recreate_enum', 'move_enum'], + alter_enum: [ + 'create_enum', + 'drop_enum', + 'rename_enum', + 'alter_enum', + 'recreate_enum', + 'move_enum', + 'alter_type_drop_value', + ], + recreate_enum: ['create_enum', 'drop_enum', 'rename_enum', 'alter_enum', 'recreate_enum', 'move_enum'], + move_enum: ['create_enum', 'drop_enum', 'rename_enum', 'alter_enum', 'recreate_enum', 'move_enum'], + alter_type_drop_value: ['drop_enum', 'alter_enum', 'alter_type_drop_value'], + + // Sequence operations + create_sequence: ['create_sequence', 'drop_sequence', 'rename_sequence', 'alter_sequence', 'move_sequence'], + drop_sequence: ['create_sequence', 'drop_sequence', 'rename_sequence', 'alter_sequence', 'move_sequence'], + rename_sequence: ['create_sequence', 'drop_sequence', 'rename_sequence', 'alter_sequence', 'move_sequence'], + alter_sequence: ['create_sequence', 'drop_sequence', 'rename_sequence', 'alter_sequence', 'move_sequence'], + move_sequence: ['create_sequence', 'drop_sequence', 'rename_sequence', 'alter_sequence', 'move_sequence'], + + // View operations + create_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + drop_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + rename_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + alter_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + recreate_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + move_view: ['create_view', 'drop_view', 'rename_view', 'alter_view', 'recreate_view', 'move_view'], + + // Schema operations + create_schema: ['create_schema', 'drop_schema', 'rename_schema'], + drop_schema: ['create_schema', 'drop_schema', 'rename_schema'], + rename_schema: ['create_schema', 'drop_schema', 'rename_schema'], + + // Policy operations + create_policy: ['create_policy', 'drop_policy', 'rename_policy', 'alter_policy', 'recreate_policy'], + drop_policy: ['create_policy', 'drop_policy', 'rename_policy', 'alter_policy', 'recreate_policy'], + rename_policy: ['create_policy', 'drop_policy', 'rename_policy', 'alter_policy', 'recreate_policy'], + alter_policy: ['create_policy', 'drop_policy', 'rename_policy', 'alter_policy', 'recreate_policy'], + recreate_policy: ['create_policy', 'drop_policy', 'rename_policy', 'alter_policy', 'recreate_policy'], + + // RLS operations + alter_rls: ['alter_rls', 'create_policy', 'drop_policy', 'alter_policy', 'recreate_policy'], + + // Role operations + create_role: ['create_role', 'drop_role', 'rename_role', 'alter_role'], + drop_role: [ + 'create_role', + 'drop_role', + 'rename_role', + 'alter_role', + 'grant_privilege', + 'revoke_privilege', + 'regrant_privilege', + ], + rename_role: ['create_role', 'drop_role', 'rename_role', 'alter_role'], + alter_role: ['create_role', 'drop_role', 'rename_role', 'alter_role'], + + // Privilege operations + grant_privilege: ['grant_privilege', 'revoke_privilege', 'regrant_privilege'], + revoke_privilege: ['grant_privilege', 'revoke_privilege', 'regrant_privilege'], + regrant_privilege: ['grant_privilege', 'revoke_privilege', 'regrant_privilege'], +}; + +function formatFootprint(action: string, schema: string, objectName: string, columnName: string): string { + return `${action};${schema};${objectName};${columnName}`; +} + +function extractStatementInfo( + statement: JsonStatement, +): { action: string; schema: string; objectName: string; columnName: string } { + const action = statement.type; + let schema = ''; + let objectName = ''; + let columnName = ''; + + switch (statement.type) { + // Table operations + case 'create_table': + case 'drop_table': + schema = statement.table.schema; + objectName = statement.table.name; + break; + case 'rename_table': + schema = statement.schema; + objectName = statement.from; + break; + case 'move_table': + schema = statement.from; + objectName = statement.name; + break; + case 'remove_from_schema': + schema = statement.schema; + objectName = statement.table; + break; + case 'set_new_schema': + schema = statement.from; + objectName = statement.table; + break; + + // Column operations + case 'add_column': + case 'drop_column': + schema = statement.column.schema; + objectName = statement.column.table; + columnName = statement.column.name; + break; + case 'recreate_column': + schema = statement.diff.schema; + objectName = statement.diff.table; + columnName = statement.diff.name; + break; + case 'alter_column': + schema = statement.to.schema; + objectName = statement.to.table; + columnName = statement.to.name; + break; + case 'rename_column': + schema = statement.from.schema; + objectName = statement.from.table; + columnName = statement.from.name; + break; + + // Index operations + case 'create_index': + break; + case 'drop_index': + schema = statement.index.schema; + objectName = statement.index.name; + break; + case 'rename_index': + schema = statement.schema; + objectName = statement.from; + break; + case 'recreate_index': + schema = statement.diff.schema; + objectName = statement.diff.name; + break; + + // Primary key operations + case 'add_pk': + case 'drop_pk': + case 'alter_pk': + schema = statement.pk.schema; + objectName = statement.pk.table; + break; + + // Foreign key operations + case 'create_fk': + case 'drop_fk': + case 'recreate_fk': + schema = statement.fk.schema; + objectName = statement.fk.table; + break; + + // Unique constraint operations + case 'add_unique': + case 'drop_unique': + schema = statement.unique.schema; + objectName = statement.unique.table; + break; + case 'alter_unique': + schema = (statement as any).diff.schema; + objectName = (statement as any).diff.table; + break; + + // Check constraint operations + case 'add_check': + schema = statement.check.schema; + objectName = statement.check.table; + break; + case 'drop_check': + schema = statement.check.schema; + objectName = statement.check.table; + break; + case 'alter_check': + schema = statement.diff.schema; + objectName = statement.diff.table; + break; + + // Constraint operations + case 'rename_constraint': + schema = statement.schema; + objectName = statement.table; + break; + + // Enum operations + case 'create_enum': + schema = statement.enum.schema; + objectName = statement.enum.name; + break; + case 'drop_enum': + schema = statement.enum.schema; + objectName = statement.enum.name; + break; + case 'alter_enum': + schema = statement.to.schema; + objectName = statement.to.name; + break; + case 'recreate_enum': + schema = statement.to.schema; + objectName = statement.to.name; + break; + case 'rename_enum': + schema = statement.schema; + objectName = statement.from; + break; + case 'move_enum': + schema = statement.from.schema || 'public'; + objectName = statement.from.name; + break; + case 'alter_type_drop_value': + schema = statement.enum.schema; + objectName = statement.enum.name; + break; + + // Sequence operations + case 'create_sequence': + case 'drop_sequence': + case 'alter_sequence': + schema = statement.sequence.schema; + objectName = statement.sequence.name; + break; + case 'rename_sequence': + schema = statement.from.schema; + objectName = statement.from.name; + break; + case 'move_sequence': + schema = statement.from.schema || 'public'; + objectName = statement.from.name; + break; + + // View operations + case 'create_view': + case 'drop_view': + schema = statement.view.schema; + objectName = statement.view.name; + break; + case 'alter_view': + schema = statement.view.schema; + objectName = statement.view.name; + break; + case 'recreate_view': + schema = statement.to.schema; + objectName = statement.to.name; + break; + case 'rename_view': + schema = statement.from.schema; + objectName = statement.from.name; + break; + case 'move_view': + schema = statement.fromSchema; + objectName = statement.view.name; + break; + + // Schema operations + case 'create_schema': + case 'drop_schema': + objectName = statement.name; + break; + case 'rename_schema': + objectName = statement.from.name; + break; + + // Policy operations + case 'create_policy': + case 'drop_policy': + case 'alter_policy': + case 'recreate_policy': + schema = statement.policy.schema; + objectName = statement.policy.table; + break; + case 'rename_policy': + schema = statement.from.schema; + objectName = statement.from.table; + break; + + // RLS operations + case 'alter_rls': + schema = (statement as any).schema; + objectName = (statement as any).name; + break; + + // Role operations + case 'create_role': + case 'drop_role': + case 'alter_role': + objectName = statement.role.name; + break; + case 'rename_role': + objectName = statement.from.name; + break; + + // Privilege operations + case 'grant_privilege': + case 'revoke_privilege': + case 'regrant_privilege': + schema = statement.privilege.schema || ''; + objectName = statement.privilege.table || ''; + break; + + default: + assertUnreachable(statement); + } + + return { action, schema, objectName, columnName }; +} + +export function footprint(statement: JsonStatement, snapshot?: PostgresSnapshot): [string[], string[]] { + const info = extractStatementInfo(statement); + const conflictingTypes = footprintMap[statement.type]; + + const statementFootprint = [formatFootprint(statement.type, info.schema, info.objectName, info.columnName)]; + + let conflictFootprints = conflictingTypes.map((conflictType) => + formatFootprint(conflictType, info.schema, info.objectName, info.columnName) + ); + + if (snapshot) { + const expandedFootprints = expandFootprintsFromSnapshot(statement, info, conflictingTypes, snapshot); + conflictFootprints = [...conflictFootprints, ...expandedFootprints]; + } + + return [statementFootprint, conflictFootprints]; +} + +function generateLeafFootprints(statements: JsonStatement[], snapshot?: PostgresSnapshot): { + statementHashes: Array<{ hash: string; statement: JsonStatement }>; + conflictFootprints: Array<{ hash: string; statement: JsonStatement }>; +} { + const statementHashes: Array<{ hash: string; statement: JsonStatement }> = []; + const conflictFootprints: Array<{ hash: string; statement: JsonStatement }> = []; + + for (let i = 0; i < statements.length; i++) { + const statement = statements[i]; + const [hashes, conflicts] = footprint(statement, snapshot); + + for (const hash of hashes) { + statementHashes.push({ hash, statement }); + } + + for (const conflict of conflicts) { + conflictFootprints.push({ hash: conflict, statement }); + } + } + + return { statementHashes, conflictFootprints }; +} + +function expandFootprintsFromSnapshot( + statement: JsonStatement, + info: { action: string; schema: string; objectName: string; columnName: string }, + conflictingTypes: JsonStatement['type'][], + snapshot: PostgresSnapshot, +): string[] { + const expandedFootprints: string[] = []; + + // For schemas - include all tables/views/enums/sequences in that schema + if (statement.type === 'drop_schema' || statement.type === 'rename_schema') { + const childEntities = findChildEntitiesInSchemaFromSnapshot(info.objectName, snapshot); + for (const entity of childEntities) { + for (const conflictType of conflictingTypes) { + expandedFootprints.push(formatFootprint(conflictType, entity.schema, entity.objectName, entity.columnName)); + } + } + } // For tables - include all columns/indexes/constraints in that table + else if ( + statement.type === 'drop_table' || statement.type === 'rename_table' + ) { + const childEntities = findChildEntitiesInTableFromSnapshot(info.schema, info.objectName, snapshot); + for (const entity of childEntities) { + for (const conflictType of conflictingTypes) { + expandedFootprints.push(formatFootprint(conflictType, entity.schema, entity.objectName, entity.columnName)); + } + } + // all indexes in changed tables should make a conflict in this case + // maybe we need to make other fields optional + // TODO: revise formatFootprint + expandedFootprints.push(formatFootprint('create_index', '', '', '')); + } + + return expandedFootprints; +} + +function findChildEntitiesInSchemaFromSnapshot( + schemaName: string, + snapshot: PostgresSnapshot, +): Array<{ schema: string; objectName: string; columnName: string }> { + const entities: Array<{ schema: string; objectName: string; columnName: string }> = []; + + for (const entity of snapshot.ddl) { + if (entity.entityType === 'tables' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'columns' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'views' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'enums' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'sequences' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'indexes' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'pks' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'fks' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'uniques' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'checks' && entity.schema === schemaName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } + } + + return entities; +} + +function findChildEntitiesInTableFromSnapshot( + schemaName: string, + tableName: string, + snapshot: PostgresSnapshot, +): Array<{ schema: string; objectName: string; columnName: string }> { + const entities: Array<{ schema: string; objectName: string; columnName: string }> = []; + + for (const entity of snapshot.ddl) { + if (entity.entityType === 'columns' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: entity.name }); + } else if (entity.entityType === 'indexes' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.name, columnName: '' }); + } else if (entity.entityType === 'pks' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'fks' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'uniques' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } else if (entity.entityType === 'checks' && entity.schema === schemaName && entity.table === tableName) { + entities.push({ schema: entity.schema, objectName: entity.table, columnName: '' }); + } + } + + return entities; +} + +function findFootprintIntersections( + branchAHashes: Array<{ hash: string; statement: JsonStatement }>, + branchAConflicts: Array<{ hash: string; statement: JsonStatement }>, + branchBHashes: Array<{ hash: string; statement: JsonStatement }>, + branchBConflicts: Array<{ hash: string; statement: JsonStatement }>, +) { + // const intersections: { leftStatement: string; rightStatement: string }[] = []; + + for (const hashInfoA of branchAHashes) { + for (const conflictInfoB of branchBConflicts) { + if (hashInfoA.hash === conflictInfoB.hash) { + // Decided to return a first issue. You should run check and fix them until you have 0 + // intersections.push({ leftStatement: hashInfoA.hash, rightStatement: conflictInfoB.hash }); + return { leftStatement: hashInfoA.statement, rightStatement: conflictInfoB.statement }; + } + } + } + + for (const hashInfoB of branchBHashes) { + for (const conflictInfoA of branchAConflicts) { + if (hashInfoB.hash === conflictInfoA.hash) { + // Decided to return a first issue. You should run check and fix them until you have 0 + // intersections.push({ leftStatement: hashInfoB.hash, rightStatement: conflictInfoA.hash }); + return { leftStatement: hashInfoB.statement, rightStatement: conflictInfoA.statement }; + } + } + } + + // return intersections; +} + +export const getReasonsFromStatements = async ( + aStatements: JsonStatement[], + bStatements: JsonStatement[], + snapshot?: PostgresSnapshot, +) => { + const parentSnapshot = snapshot ?? drySnapshot; + const branchAFootprints = generateLeafFootprints( + aStatements, + parentSnapshot, + ); + const branchBFootprints = generateLeafFootprints( + bStatements, + parentSnapshot, + ); + + return findFootprintIntersections( + branchAFootprints.statementHashes, + branchAFootprints.conflictFootprints, + branchBFootprints.statementHashes, + branchBFootprints.conflictFootprints, + ); +}; + +export const detectNonCommutative = async ( + snapshots: string[], +): Promise => { + const nodes = buildSnapshotGraph(snapshots); + + // Build parent -> children mapping (a child can have multiple parents) + const prevToChildren: Record = {}; + for (const node of Object.values(nodes)) { + for (const parentId of node.prevIds) { + const arr = prevToChildren[parentId] ?? []; + arr.push(node.id); + prevToChildren[parentId] = arr; + } + } + + const conflicts: BranchConflict[] = []; + + for (const [prevId, childIds] of Object.entries(prevToChildren)) { + if (childIds.length <= 1) continue; + + const parentNode = nodes[prevId]; + + const childToLeaves: Record = {}; + for (const childId of childIds) { + childToLeaves[childId] = collectLeaves(nodes, childId); + } + + const leafStatements: Record = {}; + for (const leaves of Object.values(childToLeaves)) { + for (const leafId of leaves) { + const leafNode = nodes[leafId]!; + const parentSnapshot = parentNode ? parentNode.raw : drySnapshot; + const { statements } = await diffPostgres(parentSnapshot, leafNode.raw); + leafStatements[leafId] = { statements, path: leafNode.folderPath }; + } + } + + for (let i = 0; i < childIds.length; i++) { + for (let j = i + 1; j < childIds.length; j++) { + const groupA = childToLeaves[childIds[i]] ?? []; + const groupB = childToLeaves[childIds[j]] ?? []; + for (const aId of groupA) { + for (const bId of groupB) { + const aStatements = leafStatements[aId]!.statements; + const bStatements = leafStatements[bId]!.statements; + + const parentSnapshot = parentNode ? parentNode.raw : drySnapshot; + + // function that accepts statements are respond with conflicts + const intersectedHashed = await getReasonsFromStatements(aStatements, bStatements, parentSnapshot); + + if (intersectedHashed) { + // parentId and parentPath is a head of a branched leaves + conflicts.push({ + parentId: prevId, + parentPath: parentNode?.folderPath, + branchA: { headId: aId, path: leafStatements[aId]!.path, statement: intersectedHashed.leftStatement }, + branchB: { headId: bId, path: leafStatements[bId]!.path, statement: intersectedHashed.rightStatement }, + }); + } + } + } + } + } + } + + // Collect all leaf nodes (nodes with no children) + const allNodeIds = new Set(Object.keys(nodes)); + const nodesWithChildren = new Set(Object.values(prevToChildren).flat()); + const leafNodes = Array.from(allNodeIds).filter((id) => !nodesWithChildren.has(id)); + + return { conflicts, leafNodes }; +}; + +function buildSnapshotGraph( + snapshotFiles: string[], +): Record> { + const byId: Record> = {}; + for (const file of snapshotFiles) { + if (!existsSync(file)) continue; + const raw = JSON.parse(readFileSync(file, 'utf8')) as TSnapshot; + const node: SnapshotNode = { + id: raw.id, + prevIds: raw.prevIds, + path: file, + folderPath: dirname(file), + raw, + }; + byId[node.id] = node; + } + return byId; +} + +function collectLeaves( + graph: Record>, + startId: string, +): string[] { + const leaves: string[] = []; + const stack: string[] = [startId]; + const prevToChildren: Record = {}; + + // Build parent -> children mapping (a child can have multiple parents) + for (const node of Object.values(graph)) { + for (const parentId of node.prevIds) { + const arr = prevToChildren[parentId] ?? []; + arr.push(node.id); + prevToChildren[parentId] = arr; + } + } + + while (stack.length) { + const id = stack.pop()!; + const children = prevToChildren[id] ?? []; + if (children.length === 0) { + leaves.push(id); + } else { + for (const c of children) stack.push(c); + } + } + return leaves; +} + +async function diffPostgres( + fromSnap: PostgresSnapshot | 'dry', + toSnap: PostgresSnapshot, +): Promise<{ statements: JsonStatement[] }>; +async function diffPostgres( + fromSnap: PostgresSnapshot, + toSnap: PostgresSnapshot, +): Promise<{ statements: JsonStatement[] }>; +async function diffPostgres(fromSnap: any, toSnap: any): Promise<{ statements: JsonStatement[] }> { + const fromDDL: PostgresDDL = createDDL(); + const toDDL: PostgresDDL = createDDL(); + + if (fromSnap !== 'dry') { + for (const e of fromSnap.ddl) fromDDL.entities.push(e); + } + for (const e of toSnap.ddl) toDDL.entities.push(e); + + const { statements } = await ddlDiffDry(fromDDL, toDDL, 'default'); + return { statements }; +} diff --git a/drizzle-kit/src/dialects/postgres/convertor.ts b/drizzle-kit/src/dialects/postgres/convertor.ts new file mode 100644 index 0000000000..c791e1b668 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/convertor.ts @@ -0,0 +1,1107 @@ +import { escapeSingleQuotes, type Simplify, wrapWith } from '../../utils'; +import { defaultNameForPK, defaults, defaultToSQL, isDefaultAction, isSerialType } from './grammar'; +import type { JsonStatement } from './statements'; + +export const convertor = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + convertor: (statement: Simplify>) => string | string[], +) => { + return { + type, + can: (st: JsonStatement) => { + return st.type === type; + }, + convert: convertor, + }; +}; + +const createSchemaConvertor = convertor('create_schema', (st) => { + return `CREATE SCHEMA "${st.name}";\n`; +}); + +const dropSchemaConvertor = convertor('drop_schema', (st) => { + return `DROP SCHEMA "${st.name}";\n`; +}); + +const renameSchemaConvertor = convertor('rename_schema', (st) => { + return `ALTER SCHEMA "${st.from.name}" RENAME TO "${st.to.name}";\n`; +}); + +const createViewConvertor = convertor('create_view', (st) => { + const { definition, name: viewName, schema, with: withOption, materialized, withNoData, tablespace, using } = st.view; + + const name = schema !== 'public' ? `"${schema}"."${viewName}"` : `"${viewName}"`; + let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`; + if (using) statement += ` USING "${using}"`; + + const options: string[] = []; + if (withOption) { + statement += ` WITH (`; + for (const [key, value] of Object.entries(withOption)) { + if (value === null) continue; + options.push(`${key.snake_case()} = ${value}`); + } + statement += options.join(', '); + statement += `)`; + } + + if (tablespace) statement += ` TABLESPACE ${tablespace}`; + statement += ` AS (${definition})`; + if (withNoData) statement += ` WITH NO DATA`; + statement += `;`; + + return statement; +}); + +const dropViewConvertor = convertor('drop_view', (st) => { + const { name: viewName, schema, materialized } = st.view; + const name = schema !== 'public' ? `"${schema}"."${viewName}"` : `"${viewName}"`; + return `DROP${materialized ? ' MATERIALIZED' : ''} VIEW ${name};`; +}); + +const renameViewConvertor = convertor('rename_view', (st) => { + const materialized = st.from.materialized; + const nameFrom = st.from.schema !== 'public' ? `"${st.from.schema}"."${st.from.name}"` : `"${st.from.name}"`; + + return `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW ${nameFrom} RENAME TO "${st.to.name}";`; +}); + +const moveViewConvertor = convertor('move_view', (st) => { + const { fromSchema, toSchema, view } = st; + const from = fromSchema === 'public' ? `"${view.name}"` : `"${fromSchema}"."${view.name}"`; + return `ALTER${view.materialized ? ' MATERIALIZED' : ''} VIEW ${from} SET SCHEMA "${toSchema}";`; +}); + +const alterViewConvertor = convertor('alter_view', (st) => { + const diff = st.diff; + + const statements = [] as string[]; + const key = st.view.schema !== 'public' ? `"${st.view.schema}"."${st.view.name}"` : `"${st.view.name}"`; + const viewClause = st.view.materialized ? `MATERIALIZED VIEW ${key}` : `VIEW ${key}`; + + const withFrom = diff.with?.from || {}; + const withTo = diff.with?.to || {}; + + const resetOptions = Object.entries(withFrom).filter(([key, val]) => { + return val !== null && (key in withTo ? withTo[key as keyof typeof withTo] === null : true); + }).map((it) => it[0].snake_case()); + + const setOptions = Object.entries(withTo).filter(([key, val]) => { + const from = key in withFrom ? withFrom[key as keyof typeof withFrom] : null; + return val !== null && from !== val; + }).map((it) => `${it[0].snake_case()} = ${it[1]}`).join(', '); + + if (setOptions.length > 0) statements.push(`ALTER ${viewClause} SET (${setOptions});`); + if (resetOptions.length > 0) statements.push(`ALTER ${viewClause} RESET (${resetOptions.join(', ')});`); + // TODO: reset missing options, set changed options and new options? + + if (diff.tablespace) { + const to = diff.tablespace.to || defaults.tablespace; + statements.push(`ALTER ${viewClause} SET TABLESPACE "${to}";`); + } + + if (diff.using) { + const toUsing = diff.using.to ?? defaults.accessMethod; + statements.push(`ALTER ${viewClause} SET ACCESS METHOD "${toUsing}";`); + } + + return statements; +}); + +const recreateViewConvertor = convertor('recreate_view', (st) => { + const drop = dropViewConvertor.convert({ view: st.from }) as string; + const create = createViewConvertor.convert({ view: st.to }) as string; + return [drop, create]; +}); + +const createTableConvertor = convertor('create_table', (st) => { + const { schema, name, columns, pk, uniques, checks, policies, isRlsEnabled } = st.table; + + const statements = [] as string[]; + let statement = ''; + const key = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + statement += `CREATE TABLE ${key} (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const isPK = pk && pk.columns.length === 1 && pk.columns[0] === column.name + && pk.name === defaultNameForPK(column.table); + const isSerial = isSerialType(column.type); + + const primaryKeyStatement = isPK ? ' PRIMARY KEY' : ''; + const notNullStatement = pk?.columns.includes(column.name) || isSerial + ? '' + : column.notNull && !column.identity + ? ' NOT NULL' + : ''; + const defaultStatement = column.default ? ` DEFAULT ${defaultToSQL(column)}` : ''; + + const unique = uniques.find((u) => u.columns.length === 1 && u.columns[0] === column.name); + + const unqiueConstraintPrefix = unique + ? unique.nameExplicit ? `CONSTRAINT "${unique.name}" UNIQUE` : 'UNIQUE' + : ''; + + const uniqueConstraintStatement = unique + ? ` ${unqiueConstraintPrefix}${unique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : ''}` + : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const colType = column.typeSchema + ? `"${column.type}"` + : column.type; + const type = `${schemaPrefix}${colType}${'[]'.repeat(column.dimensions)}`; + + const generated = column.generated; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + const identityWithSchema = schema !== 'public' + ? `"${schema}"."${column.identity?.name}"` + : `"${column.identity?.name}"`; + + const identity = column.identity + ? ` GENERATED ${ + column.identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + column.identity.increment + ? ` INCREMENT BY ${column.identity.increment}` + : '' + }${ + column.identity.minValue + ? ` MINVALUE ${column.identity.minValue}` + : '' + }${ + column.identity.maxValue + ? ` MAXVALUE ${column.identity.maxValue}` + : '' + }${ + column.identity.startWith + ? ` START WITH ${column.identity.startWith}` + : '' + }${column.identity.cache ? ` CACHE ${column.identity.cache}` : ''}${column.identity.cycle ? ` CYCLE` : ''})` + : ''; + + statement += '\t' + + `"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${uniqueConstraintStatement}${identity}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (pk && (pk.columns.length > 1 || pk.name !== defaultNameForPK(st.table.name))) { + statement += ',\n'; + statement += `\tCONSTRAINT "${pk.name}" PRIMARY KEY("${pk.columns.join(`","`)}")`; + } + + for (const it of uniques.filter((u) => u.columns.length > 1)) { + statement += ',\n'; + statement += `\tCONSTRAINT "${it.name}" UNIQUE${it.nullsNotDistinct ? ' NULLS NOT DISTINCT' : ''}("${ + it.columns.join(`","`) + }")`; + } + + for (const check of checks) { + statement += ',\n'; + statement += `\tCONSTRAINT "${check.name}" CHECK (${check.value})`; + } + + statement += `\n);`; + statement += `\n`; + statements.push(statement); + + if (policies && policies.length > 0 || isRlsEnabled) { + statements.push(toggleRlsConvertor.convert({ + isRlsEnabled: true, + name: st.table.name, + schema: st.table.schema, + }) as string); + } + + return statements; +}); + +const dropTableConvertor = convertor('drop_table', (st) => { + const { name, schema, policies } = st.table; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${name}"` + : `"${name}"`; + + const droppedPolicies = policies.map((policy) => dropPolicyConvertor.convert({ policy }) as string); + + return [ + ...droppedPolicies, + `DROP TABLE ${tableNameWithSchema};`, + ]; +}); + +const renameTableConvertor = convertor('rename_table', (st) => { + const schemaPrefix = st.schema !== 'public' + ? `"${st.schema}".` + : ''; + + return `ALTER TABLE ${schemaPrefix}"${st.from}" RENAME TO "${st.to}";`; +}); + +const moveTableConvertor = convertor('move_table', (st) => { + const from = st.from !== 'public' ? `"${st.from}"."${st.name}"` : `"${st.name}"`; + + return `ALTER TABLE ${from} SET SCHEMA "${st.to}";\n`; +}); + +const addColumnConvertor = convertor('add_column', (st) => { + const { schema, table, name, identity, generated } = st.column; + const column = st.column; + + const primaryKeyStatement = st.isPK ? ' PRIMARY KEY' : ''; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + const defaultStatement = column.default ? ` DEFAULT ${defaultToSQL(column)}` : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const type = column.typeSchema + ? `"${column.type}"` + : column.type; + let fixedType = `${schemaPrefix}${type}${'[]'.repeat(column.dimensions)}`; + + const isSerial = isSerialType(column.type); + + const notNullStatement = column.notNull && !identity && !generated && !isSerial && !st.isCompositePK + ? ' NOT NULL' + : ''; + + const identityWithSchema = schema !== 'public' + ? `"${schema}"."${identity?.name}"` + : `"${identity?.name}"`; + + const identityStatement = identity + ? ` GENERATED ${ + identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + identity.increment + ? ` INCREMENT BY ${identity.increment}` + : '' + }${ + identity.minValue + ? ` MINVALUE ${identity.minValue}` + : '' + }${ + identity.maxValue + ? ` MAXVALUE ${identity.maxValue}` + : '' + }${ + identity.startWith + ? ` START WITH ${identity.startWith}` + : '' + }${identity.cache ? ` CACHE ${identity.cache}` : ''}${identity.cycle ? ` CYCLE` : ''})` + : ''; + + const generatedStatement = column.generated ? ` GENERATED ALWAYS AS (${column.generated.as}) STORED` : ''; + + return `ALTER TABLE ${tableNameWithSchema} ADD COLUMN "${name}" ${fixedType}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${identityStatement};`; +}); + +const dropColumnConvertor = convertor('drop_column', (st) => { + const { schema, table, name } = st.column; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP COLUMN "${name}";`; +}); + +const renameColumnConvertor = convertor('rename_column', (st) => { + const { table, schema } = st.from; + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} RENAME COLUMN "${st.from.name}" TO "${st.to.name}";`; +}); + +const recreateColumnConvertor = convertor('recreate_column', (st) => { + // AlterTableAlterColumnSetExpressionConvertor + // AlterTableAlterColumnAlterGeneratedConvertor + + const r = st.diff.$right; + const drop = dropColumnConvertor.convert({ column: r }) as string; + const add = addColumnConvertor.convert({ column: r, isPK: st.isPK, isCompositePK: false }) as string; + + return [drop, add]; +}); + +const recreateIndexConvertor = convertor('recreate_index', (st) => { + // AlterTableAlterColumnSetExpressionConvertor + // AlterTableAlterColumnAlterGeneratedConvertor + + const drop = dropIndexConvertor.convert({ index: st.index }) as string; + const add = createIndexConvertor.convert({ index: st.index }) as string; + + return [drop, add]; +}); + +const alterColumnConvertor = convertor('alter_column', (st) => { + const { diff, to: column, isEnum, wasEnum } = st; + const statements = [] as string[]; + + const key = column.schema !== 'public' + ? `"${column.schema}"."${column.table}"` + : `"${column.table}"`; + + const recreateDefault = diff.type && (isEnum || wasEnum) && (column.default || (diff.default && diff.default.from)); + if (recreateDefault) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + + if (diff.type) { + const typeSchema = column.typeSchema && column.typeSchema !== 'public' ? `"${column.typeSchema}".` : ''; + const textProxy = wasEnum && isEnum ? 'text::' : ''; // using enum1::text::enum2 + const suffix = isEnum + ? ` USING "${column.name}"::${textProxy}${typeSchema}"${column.type}"${'[]'.repeat(column.dimensions)}` + : ''; + let type: string; + + if (diff.type) { + type = diff.typeSchema?.to && diff.typeSchema.to !== 'public' + ? `"${diff.typeSchema.to}"."${diff.type.to}"` + : isEnum + ? `"${diff.type.to}"` + : diff.type.to; + } else { + type = `${typeSchema}${column.typeSchema ? `"${column.type}"` : column.type}`; + } + + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE ${type}${ + '[]'.repeat(column.dimensions) + }${suffix};`, + ); + + if (recreateDefault) { + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(column)};`, + ); + } + } + + if (diff.default && !recreateDefault) { + if (diff.default.to) { + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(diff.$right)};`, + ); + } else { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + } + + if (diff.generated && diff.generated.to === null) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP EXPRESSION;`); + } + + if (diff.identity) { + if (diff.identity.from === null) { + const identity = column.identity!; + const identityWithSchema = column.schema !== 'public' + ? `"${column.schema}"."${identity.name}"` + : `"${identity.name}"`; + const typeClause = identity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'; + const incrementClause = identity.increment ? ` INCREMENT BY ${identity.increment}` : ''; + const minClause = identity.minValue ? ` MINVALUE ${identity.minValue}` : ''; + const maxClause = identity.maxValue ? ` MAXVALUE ${identity.maxValue}` : ''; + const startWith = identity.startWith ? ` START WITH ${identity.startWith}` : ''; + const cache = identity.cache ? ` CACHE ${identity.cache}` : ''; + const cycle = identity.cycle ? ` CYCLE` : ''; + const identityStatement = + `GENERATED ${typeClause} AS IDENTITY (sequence name ${identityWithSchema}${incrementClause}${minClause}${maxClause}${startWith}${cache}${cycle})`; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" ADD ${identityStatement};`); + } else if (diff.identity.to === null) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP IDENTITY;`); + } else { + const { from, to } = diff.identity; + + if (from.type !== to.type) { + const typeClause = to.type === 'always' ? 'ALWAYS' : 'BY DEFAULT'; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET GENERATED ${typeClause};`); + } + if (from.minValue !== to.minValue) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET MINVALUE ${to.minValue};`); + } + + if (from.maxValue !== to.maxValue) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET MAXVALUE ${to.maxValue};`); + } + + if (from.increment !== to.increment) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET INCREMENT BY ${to.increment};`); + } + + if (from.startWith !== to.startWith) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET START WITH ${to.startWith};`); + } + + if (from.cache !== to.cache) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET CACHE ${to.cache};`); + } + + if (from.cycle !== to.cycle) { + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET ${to.cycle ? `CYCLE` : 'NO CYCLE'};`); + } + } + } + + if (diff.notNull && !(diff.notNull.to && diff.identity && diff.identity.to)) { + const clause = diff.notNull.to ? 'SET NOT NULL' : 'DROP NOT NULL'; + statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" ${clause};`); + } + + return statements; +}); + +const createIndexConvertor = convertor('create_index', (st) => { + const { + schema, + table, + name, + columns, + isUnique, + concurrently, + with: w, + method, + where, + } = st.index; + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const value = columns + .map((it) => { + const expr = it.isExpression ? it.value : `"${it.value}"`; + const opcl = it.opclass && !it.opclass.default ? ` ${it.opclass.name}` : ''; + + // ASC - default + const ord = it.asc ? '' : ' DESC'; + + // skip if asc+nulls last or desc+nulls first + const nulls = (it.asc && !it.nullsFirst) || (!it.asc && it.nullsFirst) + ? '' + : it.nullsFirst + ? ' NULLS FIRST' + : ' NULLS LAST'; + + return `${expr}${opcl}${ord}${nulls}`; + }).join(','); + + const key = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + const concur = concurrently ? ' CONCURRENTLY' : ''; + const withClause = w ? ` WITH (${w})` : ''; + const whereClause = where ? ` WHERE ${where}` : ''; + const using = method !== defaults.index.method ? ` USING ${method}` : ''; + return `CREATE ${indexPart}${concur} "${name}" ON ${key}${using} (${value})${withClause}${whereClause};`; +}); + +const dropIndexConvertor = convertor('drop_index', (st) => { + return `DROP INDEX "${st.index.name}";`; +}); + +const renameIndexConvertor = convertor('rename_index', (st) => { + const key = st.schema !== 'public' ? `"${st.schema}"."${st.from}"` : `"${st.from}"`; + + return `ALTER INDEX ${key} RENAME TO "${st.to}";`; +}); + +const addPrimaryKeyConvertor = convertor('add_pk', (st) => { + const { pk } = st; + const key = pk.schema !== 'public' + ? `"${pk.schema}"."${pk.table}"` + : `"${pk.table}"`; + + if (!pk.nameExplicit) { + return `ALTER TABLE ${key} ADD PRIMARY KEY ("${pk.columns.join('","')}");`; + } + return `ALTER TABLE ${key} ADD CONSTRAINT "${pk.name}" PRIMARY KEY("${pk.columns.join('","')}");`; +}); + +const dropPrimaryKeyConvertor = convertor('drop_pk', (st) => { + const pk = st.pk; + const key = pk.schema !== 'public' + ? `"${pk.schema}"."${pk.table}"` + : `"${pk.table}"`; + + return `ALTER TABLE ${key} DROP CONSTRAINT "${pk.name}";`; +}); + +const recreatePrimaryKeyConvertor = convertor('alter_pk', (it) => { + const drop = dropPrimaryKeyConvertor.convert({ pk: it.pk }) as string; + const create = addPrimaryKeyConvertor.convert({ pk: it.pk }) as string; + return [drop, create]; +}); + +const renameConstraintConvertor = convertor('rename_constraint', (st) => { + const key = st.schema !== 'public' + ? `"${st.schema}"."${st.table}"` + : `"${st.table}"`; + + return `ALTER TABLE ${key} RENAME CONSTRAINT "${st.from}" TO "${st.to}";`; +}); + +const createForeignKeyConvertor = convertor('create_fk', (st) => { + const { schema, table, name, tableTo, columns, columnsTo, onDelete, onUpdate, schemaTo } = st.fk; + + const onDeleteStatement = onDelete && !isDefaultAction(onDelete) ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate && !isDefaultAction(onUpdate) ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columns.map((it) => `"${it}"`).join(','); + const toColumnsString = columnsTo.map((it) => `"${it}"`).join(','); + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + const tableToNameWithSchema = schemaTo !== 'public' + ? `"${schemaTo}"."${tableTo}"` + : `"${tableTo}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; +}); + +const recreateFKConvertor = convertor('recreate_fk', (st) => { + const { fk } = st; + + const key = fk.schema !== 'public' + ? `"${fk.schema}"."${fk.table}"` + : `"${fk.table}"`; + + const onDeleteStatement = fk.onDelete !== 'NO ACTION' + ? ` ON DELETE ${fk.onDelete}` + : ''; + const onUpdateStatement = fk.onUpdate !== 'NO ACTION' + ? ` ON UPDATE ${fk.onUpdate}` + : ''; + + const fromColumnsString = fk.columns + .map((it) => `"${it}"`) + .join(','); + const toColumnsString = fk.columnsTo.map((it) => `"${it}"`).join(','); + + const tableToNameWithSchema = fk.schemaTo !== 'public' + ? `"${fk.schemaTo}"."${fk.tableTo}"` + : `"${fk.tableTo}"`; + + let sql = `ALTER TABLE ${key} DROP CONSTRAINT "${fk.name}", `; + sql += `ADD CONSTRAINT "${fk.name}" FOREIGN KEY (${fromColumnsString}) `; + sql += `REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; + + return sql; +}); + +const dropForeignKeyConvertor = convertor('drop_fk', (st) => { + const { schema, table, name } = st.fk; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${name}";`; +}); + +const addCheckConvertor = convertor('add_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'public' + ? `"${check.schema}"."${check.table}"` + : `"${check.table}"`; + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${check.name}" CHECK (${check.value});`; +}); + +const dropCheckConvertor = convertor('drop_check', (st) => { + const { check } = st; + const tableNameWithSchema = check.schema !== 'public' + ? `"${check.schema}"."${check.table}"` + : `"${check.table}"`; + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${check.name}";`; +}); + +const recreateCheckConvertor = convertor('alter_check', (st) => { + const { diff } = st; + + const check = diff.$right; + + const key = check.schema !== 'public' + ? `"${check.schema}"."${check.table}"` + : `"${check.table}"`; + + let sql = `ALTER TABLE ${key} DROP CONSTRAINT "${check.name}", `; + sql += `ADD CONSTRAINT "${check.name}" CHECK (${check.value});`; + + return sql; +}); + +const addUniqueConvertor = convertor('add_unique', (st) => { + const { unique } = st; + const tableNameWithSchema = unique.schema !== 'public' + ? `"${unique.schema}"."${unique.table}"` + : `"${unique.table}"`; + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unique.name}" UNIQUE${ + unique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' + }("${unique.columns.join('","')}");`; +}); + +const dropUniqueConvertor = convertor('drop_unique', (st) => { + const { unique } = st; + const tableNameWithSchema = unique.schema !== 'public' + ? `"${unique.schema}"."${unique.table}"` + : `"${unique.table}"`; + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${unique.name}";`; +}); + +const createEnumConvertor = convertor('create_enum', (st) => { + const { name, schema, values } = st.enum; + const enumNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + let valuesStatement = '('; + valuesStatement += values.map((it) => wrapWith(it.replaceAll("'", "''"), "'")).join(', '); + valuesStatement += ')'; + + return `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`; +}); + +const dropEnumConvertor = convertor('drop_enum', (st) => { + const { name, schema } = st.enum; + const enumNameWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + return `DROP TYPE ${enumNameWithSchema};`; +}); + +const renameEnumConvertor = convertor('rename_enum', (st) => { + const from = st.schema !== 'public' ? `"${st.schema}"."${st.from}"` : `"${st.from}"`; + return `ALTER TYPE ${from} RENAME TO "${st.to}";`; +}); + +const moveEnumConvertor = convertor('move_enum', (st) => { + const { from, to } = st; + + const enumNameWithSchema = from.schema !== 'public' ? `"${from.schema}"."${from.name}"` : `"${from.name}"`; + return `ALTER TYPE ${enumNameWithSchema} SET SCHEMA "${to.schema || 'public'}";`; +}); + +const alterEnumConvertor = convertor('alter_enum', (st) => { + const { diff, to: e } = st; + const key = e.schema !== 'public' ? `"${e.schema}"."${e.name}"` : `"${e.name}"`; + + const statements = [] as string[]; + for (const d of diff.filter((it) => it.type === 'added')) { + if (d.beforeValue) { + statements.push(`ALTER TYPE ${key} ADD VALUE '${d.value}' BEFORE '${d.beforeValue}';`); + } else { + statements.push(`ALTER TYPE ${key} ADD VALUE '${d.value}';`); + } + } + return statements; +}); + +const recreateEnumConvertor = convertor('recreate_enum', (st) => { + const { to, columns } = st; + const statements: string[] = []; + for (const column of columns) { + const key = column.schema !== 'public' ? `"${column.schema}"."${column.table}"` : `"${column.table}"`; + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE text${'[]'.repeat(column.dimensions)};`, + ); + if (column.default) statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column.name}" DROP DEFAULT;`); + } + statements.push(dropEnumConvertor.convert({ enum: to }) as string); + statements.push(createEnumConvertor.convert({ enum: to }) as string); + + for (const column of columns) { + const key = column.schema !== 'public' ? `"${column.schema}"."${column.table}"` : `"${column.table}"`; + const enumType = to.schema !== 'public' ? `"${to.schema}"."${to.name}"` : `"${to.name}"`; + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DATA TYPE ${enumType}${ + '[]'.repeat(column.dimensions) + } USING "${column.name}"::${enumType}${'[]'.repeat(column.dimensions)};`, + ); + if (column.default) { + statements.push( + `ALTER TABLE ${key} ALTER COLUMN "${column.name}" SET DEFAULT ${defaultToSQL(column)};`, + ); + } + } + + return statements; +}); + +const createSequenceConvertor = convertor('create_sequence', (st) => { + const { name, schema, minValue, maxValue, incrementBy, startWith, cacheSize, cycle } = st.sequence; + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `CREATE SEQUENCE ${sequenceWithSchema}${incrementBy ? ` INCREMENT BY ${incrementBy}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cacheSize ? ` CACHE ${cacheSize}` : '' + }${cycle ? ` CYCLE` : ''};`; +}); + +const dropSequenceConvertor = convertor('drop_sequence', (st) => { + const { name, schema } = st.sequence; + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + return `DROP SEQUENCE ${sequenceWithSchema};`; +}); + +const renameSequenceConvertor = convertor('rename_sequence', (st) => { + const sequenceWithSchemaFrom = st.from.schema !== 'public' + ? `"${st.from.schema}"."${st.from.name}"` + : `"${st.from.name}"`; + return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${st.to.name}";`; +}); + +const moveSequenceConvertor = convertor('move_sequence', (st) => { + const { from, to } = st; + const sequenceWithSchema = from.schema !== 'public' + ? `"${from.schema}"."${from.name}"` + : `"${from.name}"`; + const seqSchemaTo = `"${to.schema}"`; + return `ALTER SEQUENCE ${sequenceWithSchema} SET SCHEMA ${seqSchemaTo};`; +}); + +const alterSequenceConvertor = convertor('alter_sequence', (st) => { + const { schema, name, incrementBy, minValue, maxValue, startWith, cacheSize, cycle } = st.sequence; + + const sequenceWithSchema = schema !== 'public' ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER SEQUENCE ${sequenceWithSchema}${incrementBy ? ` INCREMENT BY ${incrementBy}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cacheSize ? ` CACHE ${cacheSize}` : '' + }${cycle ? ` CYCLE` : ''};`; +}); + +const createRoleConvertor = convertor('create_role', (st) => { + const { + name, + superuser, + createDb, + createRole, + inherit, + canLogin, + replication, + bypassRls, + connLimit, + password, + validUntil, + } = st.role; + const withClause = + superuser || createDb || createRole || !inherit || canLogin || replication || bypassRls || validUntil + || (typeof connLimit === 'number' && connLimit !== -1) || password + ? ` WITH${superuser ? ' SUPERUSER' : ''}${createDb ? ' CREATEDB' : ''}${createRole ? ' CREATEROLE' : ''}${ + inherit ? '' : ' NOINHERIT' + }${canLogin ? ' LOGIN' : ''}${replication ? ' REPLICATION' : ''}${bypassRls ? ' BYPASSRLS' : ''}${ + typeof connLimit === 'number' && connLimit !== -1 ? ` CONNECTION LIMIT ${connLimit}` : '' + }${password ? ` PASSWORD '${escapeSingleQuotes(password)}'` : ''}${ + validUntil ? ` VALID UNTIL '${validUntil}'` : '' + }` + : ''; + + return `CREATE ROLE "${name}"${withClause};`; +}); + +const dropRoleConvertor = convertor('drop_role', (st) => { + return `DROP ROLE "${st.role.name}";`; +}); + +const renameRoleConvertor = convertor('rename_role', (st) => { + return `ALTER ROLE "${st.from.name}" RENAME TO "${st.to.name}";`; +}); + +const alterRoleConvertor = convertor('alter_role', ({ diff, role }) => { + const { + name, + } = role; + const st1 = diff.superuser + ? diff.superuser.to + ? ' SUPERUSER' + : ' NOSUPERUSER' + : ''; + const st2 = diff.createDb + ? diff.createDb.to + ? ' CREATEDB' + : ' NOCREATEDB' + : ''; + const st3 = diff.createRole + ? diff.createRole.to + ? ' CREATEROLE' + : ' NOCREATEROLE' + : ''; + const st4 = diff.inherit + ? diff.inherit.to + ? ' INHERIT' + : ' NOINHERIT' + : ''; + const st5 = diff.canLogin + ? diff.canLogin.to + ? ' LOGIN' + : ' NOLOGIN' + : ''; + const st6 = diff.replication + ? diff.replication.to + ? ' REPLICATION' + : ' NOREPLICATION' + : ''; + const st7 = diff.bypassRls + ? diff.bypassRls.to + ? ' BYPASSRLS' + : ' NOBYPASSRLS' + : ''; + const st8 = diff.connLimit + ? typeof diff.connLimit.to === 'number' + ? ` CONNECTION LIMIT ${diff.connLimit.to}` + : ' CONNECTION LIMIT -1' + : ''; + const st9 = diff.password + ? diff.password.to + ? ` PASSWORD '${escapeSingleQuotes(diff.password.to)}'` + : ' PASSWORD NULL' + : ''; + const st10 = diff.validUntil + ? diff.validUntil.to + ? ` VALID UNTIL '${diff.validUntil.to}'` + : ` VALID UNTIL 'infinity'` + : ''; + + return `ALTER ROLE "${name}" WITH${st1}${st2}${st3}${st4}${st5}${st6}${st7}${st8}${st9}${st10};`; + + // return `ALTER ROLE "${name}"${` WITH${diff.superuser ? ' SUPERUSER' : ' NOSUPERUSER'}${ + // createDb ? ' CREATEDB' : ' NOCREATEDB' + // }${createRole ? ' CREATEROLE' : ' NOCREATEROLE'}${inherit ? ' INHERIT' : ' NOINHERIT'}${ + // canLogin ? ' LOGIN' : ' NOLOGIN' + // }${replication ? ' REPLICATION' : ' NOREPLICATION'}${bypassRls ? ' BYPASSRLS' : ' NOBYPASSRLS'}${ + // typeof connLimit === 'number' ? ` CONNECTION LIMIT ${connLimit}` : ' CONNECTION LIMIT -1' + // }${password ? ` PASSWORD '${escapeSingleQuotes(password)}'` : ' PASSWORD NULL'}${ + // validUntil ? ` VALID UNTIL '${validUntil}'` : ` VALID UNTIL 'infinity'` + // }`};`; +}); + +const grantPrivilegeConvertor = convertor('grant_privilege', (st) => { + const { schema, table } = st.privilege; + const privilege = st.privilege; + + return `GRANT ${privilege.type} ON ${ + schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"` + } TO ${privilege.grantee}${privilege.isGrantable ? ' WITH GRANT OPTION' : ''} GRANTED BY ${privilege.grantor};`; +}); + +const revokePrivilegeConvertor = convertor('revoke_privilege', (st) => { + const { schema, table } = st.privilege; + const privilege = st.privilege; + + return `REVOKE ${privilege.type} ON ${ + schema !== 'public' ? `"${schema}"."${table}"` : `"${table}"` + } FROM ${privilege.grantee};`; +}); + +const regrantPrivilegeConvertor = convertor('regrant_privilege', (st) => { + const privilege = st.privilege; + const revokeStatement = revokePrivilegeConvertor.convert({ privilege }) as string; + const grantStatement = grantPrivilegeConvertor.convert({ privilege }) as string; + return [revokeStatement, grantStatement]; +}); + +const createPolicyConvertor = convertor('create_policy', (st) => { + const { schema, table } = st.policy; + const policy = st.policy; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${table}"` + : `"${table}"`; + + const usingPart = policy.using ? ` USING (${policy.using})` : ''; + + const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; + + const policyToPart = policy.roles?.map((v) => + ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` + ).join(', '); + + return `CREATE POLICY "${policy.name}" ON ${tableNameWithSchema} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; +}); + +const dropPolicyConvertor = convertor('drop_policy', (st) => { + const policy = st.policy; + + const tableNameWithSchema = policy.schema !== 'public' + ? `"${policy.schema}"."${policy.table}"` + : `"${policy.table}"`; + + return `DROP POLICY "${policy.name}" ON ${tableNameWithSchema};`; +}); + +const renamePolicyConvertor = convertor('rename_policy', (st) => { + const { from, to } = st; + + const tableNameWithSchema = to.schema !== 'public' + ? `"${to.schema}"."${to.table}"` + : `"${to.table}"`; + + return `ALTER POLICY "${from.name}" ON ${tableNameWithSchema} RENAME TO "${to.name}";`; +}); + +const alterPolicyConvertor = convertor('alter_policy', (st) => { + const { policy } = st; + + const tableNameWithSchema = policy.schema !== 'public' + ? `"${policy.schema}"."${policy.table}"` + : `"${policy.table}"`; + + const usingPart = policy.using + ? ` USING (${policy.using})` + : ''; + + const withCheckPart = policy.withCheck + ? ` WITH CHECK (${policy.withCheck})` + : ''; + + const toClause = policy.roles?.map((v) => + ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` + ).join(', '); + + return `ALTER POLICY "${policy.name}" ON ${tableNameWithSchema} TO ${toClause}${usingPart}${withCheckPart};`; +}); + +const recreatePolicy = convertor('recreate_policy', (st) => { + return [ + dropPolicyConvertor.convert({ policy: st.policy }) as string, + createPolicyConvertor.convert({ policy: st.policy }) as string, + ]; +}); + +const toggleRlsConvertor = convertor('alter_rls', (st) => { + const { schema, name, isRlsEnabled } = st; + + const tableNameWithSchema = schema !== 'public' + ? `"${schema}"."${name}"` + : `"${name}"`; + + return `ALTER TABLE ${tableNameWithSchema} ${isRlsEnabled ? 'ENABLE' : 'DISABLE'} ROW LEVEL SECURITY;`; +}); + +const convertors = [ + createSchemaConvertor, + dropSchemaConvertor, + renameSchemaConvertor, + createViewConvertor, + dropViewConvertor, + renameViewConvertor, + moveViewConvertor, + alterViewConvertor, + recreateViewConvertor, + createTableConvertor, + dropTableConvertor, + renameTableConvertor, + moveTableConvertor, + addColumnConvertor, + dropColumnConvertor, + renameColumnConvertor, + recreateColumnConvertor, + alterColumnConvertor, + createIndexConvertor, + dropIndexConvertor, + recreateIndexConvertor, + renameIndexConvertor, + addPrimaryKeyConvertor, + dropPrimaryKeyConvertor, + recreatePrimaryKeyConvertor, + createForeignKeyConvertor, + recreateFKConvertor, + dropForeignKeyConvertor, + addCheckConvertor, + dropCheckConvertor, + recreateCheckConvertor, + addUniqueConvertor, + dropUniqueConvertor, + renameConstraintConvertor, + createEnumConvertor, + dropEnumConvertor, + renameEnumConvertor, + moveEnumConvertor, + alterEnumConvertor, + recreateEnumConvertor, + createSequenceConvertor, + dropSequenceConvertor, + renameSequenceConvertor, + moveSequenceConvertor, + alterSequenceConvertor, + createRoleConvertor, + dropRoleConvertor, + renameRoleConvertor, + alterRoleConvertor, + grantPrivilegeConvertor, + revokePrivilegeConvertor, + regrantPrivilegeConvertor, + createPolicyConvertor, + dropPolicyConvertor, + renamePolicyConvertor, + alterPolicyConvertor, + recreatePolicy, + toggleRlsConvertor, +]; + +export function fromJson( + statements: JsonStatement[], +) { + const grouped = statements + .map((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + if (!convertor) throw new Error(`No convertor for: ${statement.type} statement`); + + const sqlStatements = convertor.convert(statement as any); + const statements = typeof sqlStatements === 'string' ? [sqlStatements] : sqlStatements; + return { jsonStatement: statement, sqlStatements: statements }; + }) + .filter((it) => it !== null); + + const result = { + sqlStatements: grouped.map((it) => it.sqlStatements).flat(), + groupedStatements: grouped, + }; + return result; +} + +// blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ +// test case for enum altering +// oxlint-disable-next-line no-unused-expressions +` +create table users ( + id int, + name character varying(128) +); + +create type venum as enum('one', 'two', 'three'); +alter table users add column typed venum; + +insert into users(id, name, typed) values (1, 'name1', 'one'); +insert into users(id, name, typed) values (2, 'name2', 'two'); +insert into users(id, name, typed) values (3, 'name3', 'three'); + +alter type venum rename to __venum; +create type venum as enum ('one', 'two', 'three', 'four', 'five'); + +ALTER TABLE users ALTER COLUMN typed TYPE venum USING typed::text::venum; + +insert into users(id, name, typed) values (4, 'name4', 'four'); +insert into users(id, name, typed) values (5, 'name5', 'five'); + +drop type __venum; +`; diff --git a/drizzle-kit/src/dialects/postgres/ddl.ts b/drizzle-kit/src/dialects/postgres/ddl.ts new file mode 100644 index 0000000000..4c0c5bccf3 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/ddl.ts @@ -0,0 +1,595 @@ +import type { SchemaForPull } from 'src/cli/commands/pull-common'; +import { create } from '../dialect'; +import { defaultNameForPK, defaultNameForUnique } from './grammar'; + +export const createDDL = () => { + return create({ + schemas: {}, + tables: { schema: 'required', isRlsEnabled: 'boolean' }, + enums: { + schema: 'required', + values: 'string[]', + }, + columns: { + schema: 'required', + table: 'required', + type: 'string', + typeSchema: 'string?', + notNull: 'boolean', + dimensions: 'number', + default: 'string?', + generated: { + type: ['stored'], + as: 'string', + }, + identity: { + name: 'string', + type: ['always', 'byDefault'], + increment: 'string?', + minValue: 'string?', + maxValue: 'string?', + startWith: 'string?', + cache: 'number?', + cycle: 'boolean?', + }, + }, + indexes: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: [ + { + value: 'string', + isExpression: 'boolean', + asc: 'boolean', + nullsFirst: 'boolean', + opclass: { + name: 'string', + default: 'boolean', + }, + }, + ], + isUnique: 'boolean', + where: 'string?', + with: 'string', + method: 'string', + concurrently: 'boolean', + }, + fks: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: 'string[]', + schemaTo: 'string', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + onDelete: ['NO ACTION', 'RESTRICT', 'SET NULL', 'CASCADE', 'SET DEFAULT', null], + }, + pks: { + schema: 'required', + table: 'required', + columns: 'string[]', + nameExplicit: 'boolean', + }, + uniques: { + schema: 'required', + table: 'required', + nameExplicit: 'boolean', + columns: 'string[]', + nullsNotDistinct: 'boolean', + }, + checks: { + schema: 'required', + table: 'required', + value: 'string', + }, + sequences: { + schema: 'required', + incrementBy: 'string?', + minValue: 'string?', + maxValue: 'string?', + startWith: 'string?', + cacheSize: 'number?', + cycle: 'boolean?', + }, + roles: { + superuser: 'boolean?', + createDb: 'boolean?', + createRole: 'boolean?', + inherit: 'boolean?', + canLogin: 'boolean?', + replication: 'boolean?', + bypassRls: 'boolean?', + connLimit: 'number?', + password: 'string?', + validUntil: 'string?', + }, + privileges: { + grantor: 'string', + grantee: 'string', + schema: 'required', + table: 'required', + type: ['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE', 'REFERENCES', 'TRIGGER'], + isGrantable: 'boolean', + }, + policies: { + schema: 'required', + table: 'required', + as: ['PERMISSIVE', 'RESTRICTIVE'], + for: ['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE'], + roles: 'string[]', // TO { role_name | PUBLIC | CURRENT_ROLE | SESSION_USER } + using: 'string?', + withCheck: 'string?', + }, + views: { + schema: 'required', + definition: 'string?', + with: { + checkOption: ['local', 'cascaded', null], + securityBarrier: 'boolean?', + securityInvoker: 'boolean?', + fillfactor: 'number?', + toastTupleTarget: 'number?', + parallelWorkers: 'number?', + autovacuumEnabled: 'boolean?', + vacuumIndexCleanup: ['auto', 'off', 'on', null], + vacuumTruncate: 'boolean?', + autovacuumVacuumThreshold: 'number?', + autovacuumVacuumScaleFactor: 'number?', + autovacuumVacuumCostDelay: 'number?', + autovacuumVacuumCostLimit: 'number?', + autovacuumFreezeMinAge: 'number?', + autovacuumFreezeMaxAge: 'number?', + autovacuumFreezeTableAge: 'number?', + autovacuumMultixactFreezeMinAge: 'number?', + autovacuumMultixactFreezeMaxAge: 'number?', + autovacuumMultixactFreezeTableAge: 'number?', + logAutovacuumMinDuration: 'number?', + userCatalogTable: 'boolean?', + }, + withNoData: 'boolean?', + using: 'string?', + tablespace: 'string?', + materialized: 'boolean', + }, + }); +}; + +export type PostgresDDL = ReturnType; + +export type PostgresEntities = PostgresDDL['_']['types']; +export type PostgresEntity = PostgresEntities[keyof PostgresEntities]; + +export type DiffEntities = PostgresDDL['_']['diffs']['alter']; + +export type Schema = PostgresEntities['schemas']; +export type Enum = PostgresEntities['enums']; +export type Sequence = PostgresEntities['sequences']; +export type Column = PostgresEntities['columns']; +export type Identity = Column['identity']; +export type Role = PostgresEntities['roles']; +export type Privilege = PostgresEntities['privileges']; +export type Index = PostgresEntities['indexes']; +export type IndexColumn = Index['columns'][number]; +export type ForeignKey = PostgresEntities['fks']; +export type PrimaryKey = PostgresEntities['pks']; +export type UniqueConstraint = PostgresEntities['uniques']; +export type CheckConstraint = PostgresEntities['checks']; +export type Policy = PostgresEntities['policies']; +export type View = PostgresEntities['views']; + +export type ViewColumn = { + schema: string; + view: string; + type: string; + typeDimensions: number; + typeSchema: string | null; + notNull: boolean; + dimensions: number; + name: string; +}; + +export type Table = { + schema: string; + name: string; + columns: Column[]; + indexes: Index[]; + pk: PrimaryKey | null; + fks: ForeignKey[]; + uniques: UniqueConstraint[]; + checks: CheckConstraint[]; + policies: Policy[]; + isRlsEnabled: boolean; +}; + +export type InterimColumn = Omit & { + pk: boolean; + pkName: string | null; +} & { + unique: boolean; + uniqueName: string | null; + uniqueNullsNotDistinct: boolean; +}; + +export type InterimIndex = Index & { + forPK: boolean; + forUnique: boolean; +}; + +export interface InterimSchema { + schemas: Schema[]; + enums: Enum[]; + tables: PostgresEntities['tables'][]; + columns: InterimColumn[]; + indexes: InterimIndex[]; + pks: PrimaryKey[]; + fks: ForeignKey[]; + uniques: UniqueConstraint[]; + checks: CheckConstraint[]; + sequences: Sequence[]; + roles: Role[]; + privileges: Privilege[]; + policies: Policy[]; + views: View[]; + viewColumns: ViewColumn[]; +} + +export function postgresToRelationsPull(schema: PostgresDDL): SchemaForPull { + return Object.values(schema.tables.list()).map((table) => { + const rawTable = tableFromDDL(table, schema); + return { + schema: rawTable.schema, + foreignKeys: rawTable.fks, + uniques: [ + ...Object.values(rawTable.uniques).map((unq) => ({ + columns: unq.columns, + })), + ...Object.values(rawTable.indexes).map((idx) => ({ + columns: idx.columns.map((idxc) => { + if (!idxc.isExpression && idx.isUnique) { + return idxc.value; + } + }).filter((item) => item !== undefined), + })), + ], + }; + }); +} + +export const tableFromDDL = ( + table: PostgresEntities['tables'], + ddl: PostgresDDL, +): Table => { + const filter = { schema: table.schema, table: table.name } as const; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const uniques = ddl.uniques.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + const policies = ddl.policies.list(filter); + return { + ...table, + columns, + pk, + fks, + uniques, + checks, + indexes, + policies, + }; +}; + +interface SchemaDuplicate { + type: 'schema_name_duplicate'; + name: string; +} + +interface EnumDuplicate { + type: 'enum_name_duplicate'; + name: string; + schema: string; +} + +interface TableDuplicate { + type: 'table_name_duplicate'; + name: string; + schema: string; +} +interface ColumnDuplicate { + type: 'column_name_duplicate'; + schema: string; + table: string; + name: string; +} + +interface ConstraintDuplicate { + type: 'constraint_name_duplicate'; + schema: string; + table: string; + name: string; +} +interface SequenceDuplicate { + type: 'sequence_name_duplicate'; + schema: string; + name: string; +} + +interface ViewDuplicate { + type: 'view_name_duplicate'; + schema: string; + name: string; +} + +interface IndexWithoutName { + type: 'index_no_name'; + schema: string; + table: string; + sql: string; +} + +interface IndexDuplicate { + type: 'index_duplicate'; + schema: string; + table: string; + name: string; +} + +interface PgVectorIndexNoOp { + type: 'pgvector_index_noop'; + table: string; + column: string; + indexName: string; + method: string; +} + +interface PolicyDuplicate { + type: 'policy_duplicate'; + schema: string; + table: string; + policy: string; +} + +interface RoleDuplicate { + type: 'role_duplicate'; + name: string; +} + +interface PrivilegeDuplicate { + type: 'privilege_duplicate'; + name: string; +} + +export type SchemaError = + | SchemaDuplicate + | EnumDuplicate + | TableDuplicate + | ColumnDuplicate + | ViewDuplicate + | ConstraintDuplicate + | SequenceDuplicate + | IndexWithoutName + | IndexDuplicate + | PgVectorIndexNoOp + | RoleDuplicate + | PolicyDuplicate + | PrivilegeDuplicate; + +interface PolicyNotLinked { + type: 'policy_not_linked'; + policy: string; +} +export type SchemaWarning = PolicyNotLinked; + +export const fromEntities = (entities: PostgresEntity[]) => { + const ddl = createDDL(); + for (const it of entities) { + ddl.entities.push(it); + } + + return ddl; +}; + +export const interimToDDL = ( + schema: InterimSchema, +): { ddl: PostgresDDL; errors: SchemaError[] } => { + const ddl = createDDL(); + const errors: SchemaError[] = []; + + for (const it of schema.schemas) { + const res = ddl.schemas.push(it); + if (res.status === 'CONFLICT') { + errors.push({ type: 'schema_name_duplicate', name: it.name }); + } + } + + for (const it of schema.enums) { + const res = ddl.enums.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'enum_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const it of schema.tables) { + const res = ddl.tables.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'table_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const column of schema.columns) { + const { pk: _1, pkName: _2, unique: _3, uniqueName: _4, uniqueNullsNotDistinct: _5, ...rest } = column; + + const res = ddl.columns.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'column_name_duplicate', + schema: column.schema, + table: column.table, + name: column.name, + }); + } + } + + for (const it of schema.indexes) { + const { forPK: _1, forUnique: _2, ...rest } = it; + // TODO: check within schema, pk =[schema, table, name], we need only [schema, table] + const res = ddl.indexes.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'index_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + } + + for (const it of schema.fks) { + const res = ddl.fks.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + } + + for (const it of schema.pks) { + const res = ddl.pks.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + } + + for (const column of schema.columns.filter((it) => it.pk)) { + const name = column.pkName !== null ? column.pkName : defaultNameForPK(column.table); + const exists = ddl.pks.one({ schema: column.schema, table: column.table }) !== null; + if (exists) continue; + + ddl.pks.push({ + schema: column.schema, + table: column.table, + name, + nameExplicit: column.pkName !== null, + columns: [column.name], + }); + } + + for (const it of schema.uniques) { + const res = ddl.uniques.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + } + + for (const column of schema.columns.filter((it) => it.unique)) { + const name = column.uniqueName !== null ? column.uniqueName : defaultNameForUnique(column.table, column.name); + const exists = ddl.uniques.one({ schema: column.schema, table: column.table, columns: [column.name] }) !== null; + if (exists) continue; + + ddl.uniques.push({ + schema: column.schema, + table: column.table, + name, + nameExplicit: column.uniqueName !== null, + nullsNotDistinct: column.uniqueNullsNotDistinct, + columns: [column.name], + }); + } + + for (const it of schema.checks) { + const res = ddl.checks.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'constraint_name_duplicate', + schema: it.schema, + table: it.table, + name: it.name, + }); + } + } + + for (const it of schema.sequences) { + const res = ddl.sequences.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'sequence_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const it of schema.roles) { + const res = ddl.roles.push(it); + if (res.status === 'CONFLICT') { + errors.push({ type: 'role_duplicate', name: it.name }); + } + } + + for (const it of schema.privileges) { + const res = ddl.privileges.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'privilege_duplicate', + name: it.name, + }); + } + } + + for (const it of schema.policies) { + const res = ddl.policies.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'policy_duplicate', + schema: it.schema, + table: it.table, + policy: it.name, + }); + } + } + for (const it of schema.views) { + const res = ddl.views.push(it); + if (res.status === 'CONFLICT') { + errors.push({ + type: 'view_name_duplicate', + schema: it.schema, + name: it.name, + }); + } + } + + for (const it of ddl.entities.list()) { + let err = false; + + if (!ddl.entities.validate(it)) { + console.log('invalid entity:', it); + err = true; + } + if (err) throw new Error(); + } + + return { ddl, errors }; +}; diff --git a/drizzle-kit/src/dialects/postgres/diff.ts b/drizzle-kit/src/dialects/postgres/diff.ts new file mode 100644 index 0000000000..1e5d0ae17f --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/diff.ts @@ -0,0 +1,1277 @@ +import { parse, stringify } from 'src/utils/when-json-met-bigint'; +import { prepareMigrationRenames, trimChar } from '../../utils'; +import { mockResolver } from '../../utils/mocks'; +import { diffStringArrays } from '../../utils/sequence-matcher'; +import type { Resolver } from '../common'; +import { diff } from '../dialect'; +import { groupDiffs, preserveEntityNames } from '../utils'; +import { fromJson } from './convertor'; +import type { + CheckConstraint, + Column, + DiffEntities, + Enum, + ForeignKey, + Index, + IndexColumn, + Policy, + PostgresDDL, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, +} from './ddl'; +import { createDDL, tableFromDDL } from './ddl'; +import { defaults, defaultsCommutative } from './grammar'; +import type { JsonRecreateIndex, JsonStatement } from './statements'; +import { prepareStatement } from './statements'; + +export const ddlDiffDry = async (ddlFrom: PostgresDDL, ddlTo: PostgresDDL, mode: 'default' | 'push') => { + const mocks = new Set(); + return ddlDiff( + ddlFrom, + ddlTo, + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mockResolver(mocks), + mode, + ); +}; + +export const ddlDiff = async ( + ddl1: PostgresDDL, + ddl2: PostgresDDL, + schemasResolver: Resolver, + enumsResolver: Resolver, + sequencesResolver: Resolver, + policyResolver: Resolver, + roleResolver: Resolver, + privilegesResolver: Resolver, + tablesResolver: Resolver, + columnsResolver: Resolver, + viewsResolver: Resolver, + uniquesResolver: Resolver, + indexesResolver: Resolver, + checksResolver: Resolver, + pksResolver: Resolver, + fksResolver: Resolver, + mode: 'default' | 'push', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { jsonStatement: JsonStatement; sqlStatements: string[] }[]; + renames: string[]; +}> => { + const ddl1Copy = createDDL(); + for (const entity of ddl1.entities.list()) { + ddl1Copy.entities.push(entity); + } + + const schemasDiff = diff(ddl1, ddl2, 'schemas'); + const { + created: createdSchemas, + deleted: deletedSchemas, + renamedOrMoved: renamedSchemas, + } = await schemasResolver({ + created: schemasDiff.filter((it) => it.$diffType === 'create'), + deleted: schemasDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const rename of renamedSchemas) { + ddl1.entities.update({ + set: { + schema: rename.to.name, + }, + where: { + schema: rename.from.name, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.name, + }, + where: { + schemaTo: rename.from.name, + }, + }); + } + + const enumsDiff = diff(ddl1, ddl2, 'enums'); + const { + created: createdEnums, + deleted: deletedEnums, + renamedOrMoved: renamedOrMovedEnums, + } = await enumsResolver({ + created: enumsDiff.filter((it) => it.$diffType === 'create'), + deleted: enumsDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedEnums = renamedOrMovedEnums.filter((it) => it.from.name !== it.to.name); + const movedEnums = renamedOrMovedEnums.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedEnums) { + ddl1.enums.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + ddl1.columns.update({ + set: { + type: rename.to.name, + typeSchema: rename.to.schema, + }, + where: { + type: rename.from.name, + typeSchema: rename.from.schema, + }, + }); + } + for (const move of movedEnums) { + ddl1.enums.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + ddl1.columns.update({ + set: { + typeSchema: move.to.schema, + }, + where: { + type: move.from.name, + typeSchema: move.from.schema, + }, + }); + } + + const sequencesDiff = diff(ddl1, ddl2, 'sequences'); + const { + created: createdSequences, + deleted: deletedSequences, + renamedOrMoved: renamedOrMovedSequences, + } = await sequencesResolver({ + created: sequencesDiff.filter((it) => it.$diffType === 'create'), + deleted: sequencesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedSequences = renamedOrMovedSequences.filter((it) => it.from.schema === it.to.schema); + const movedSequences = renamedOrMovedSequences.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedSequences) { + ddl1.sequences.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + for (const move of movedSequences) { + ddl1.sequences.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + } + + const rolesDiff = diff(ddl1, ddl2, 'roles'); + const { + created: createdRoles, + deleted: deletedRoles, + renamedOrMoved: renamedRoles, + } = await roleResolver({ + created: rolesDiff.filter((it) => it.$diffType === 'create'), + deleted: rolesDiff.filter((it) => it.$diffType === 'drop'), + }); + for (const rename of renamedRoles) { + ddl1.roles.update({ + set: { + name: rename.to.name, + }, + where: { + name: rename.from.name, + }, + }); + } + + const privilegesDiff = diff(ddl1, ddl2, 'privileges'); + const { + created: createdPrivileges, + deleted: deletedPrivileges, + } = await privilegesResolver({ + created: privilegesDiff.filter((it) => it.$diffType === 'create'), + deleted: privilegesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const tablesDiff = diff(ddl1, ddl2, 'tables'); + const { + created: createdTables, + deleted: deletedTables, + renamedOrMoved: renamedOrMovedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.filter((it) => it.$diffType === 'create'), + deleted: tablesDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedTables = renamedOrMovedTables.filter((it) => it.from.name !== it.to.name); + const movedTables = renamedOrMovedTables.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedOrMovedTables) { + ddl1.tables.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl1.fks.update({ + set: { + schemaTo: rename.to.schema, + tableTo: rename.to.name, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.name, + }, + }); + ddl2.fks.update({ + set: { + schemaTo: rename.to.schema, + tableTo: rename.to.name, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.name, + }, + }); + + ddl1.fks.update({ + set: { + schema: rename.to.schema, + table: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.name, + }, + }); + + ddl1.entities.update({ + set: { + table: rename.to.name, + schema: rename.to.schema, + }, + where: { + table: rename.from.name, + schema: rename.from.schema, + }, + }); + + ddl2.entities.update({ + set: { + table: rename.to.name, + schema: rename.to.schema, + }, + where: { + table: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const columnsDiff = diff(ddl1, ddl2, 'columns'); + const columnRenames = [] as { from: Column; to: Column }[]; + const columnsToCreate = [] as Column[]; + const columnsToDelete = [] as Column[]; + + const groupedByTable = groupDiffs(columnsDiff); + + for (let it of groupedByTable) { + const { created, deleted, renamedOrMoved } = await columnsResolver({ + created: it.inserted, + deleted: it.deleted, + }); + + columnsToCreate.push(...created); + columnsToDelete.push(...deleted); + columnRenames.push(...renamedOrMoved); + } + + for (const rename of columnRenames) { + ddl1.columns.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + + // DDL2 updates are needed for Drizzle Studio + const update1 = { + set: { + columns: (it: IndexColumn) => { + if (!it.isExpression && it.value === rename.from.name) { + return { ...it, value: rename.to.name }; + } + return it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + } as const; + ddl1.indexes.update(update1); + ddl2.indexes.update(update1); + + const update2 = { + set: { + columns: (it: string) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + } as const; + ddl1.pks.update(update2); + ddl2.pks.update(update2); + + const update3 = { + set: { + columns: (it: string) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + } as const; + ddl1.fks.update(update3); + ddl2.fks.update(update3); + + const update4 = { + set: { + columnsTo: (it: string) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schemaTo: rename.from.schema, + tableTo: rename.from.table, + }, + } as const; + ddl1.fks.update(update4); + ddl2.fks.update(update4); + + const update5 = { + set: { + columns: (it: string) => { + return it === rename.from.name ? rename.to.name : it; + }, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + }, + } as const; + ddl1.uniques.update(update5); + ddl2.uniques.update(update5); + + const update6 = { + set: { + value: rename.to.name, + }, + where: { + schema: rename.from.schema, + table: rename.from.table, + value: rename.from.name, + }, + } as const; + ddl1.checks.update(update6); + ddl2.checks.update(update6); + } + + preserveEntityNames(ddl1.uniques, ddl2.uniques, mode); + preserveEntityNames(ddl1.fks, ddl2.fks, mode); + preserveEntityNames(ddl1.pks, ddl2.pks, mode); + preserveEntityNames(ddl1.indexes, ddl2.indexes, mode); + + const uniquesDiff = diff(ddl1, ddl2, 'uniques'); + const groupedUniquesDiff = groupDiffs(uniquesDiff); + + const uniqueRenames = [] as { from: UniqueConstraint; to: UniqueConstraint }[]; + const uniqueCreates = [] as UniqueConstraint[]; + const uniqueDeletes = [] as UniqueConstraint[]; + + for (const entry of groupedUniquesDiff) { + const { renamedOrMoved: renamed, created, deleted } = await uniquesResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + uniqueCreates.push(...created); + uniqueDeletes.push(...deleted); + uniqueRenames.push(...renamed); + } + + for (const rename of uniqueRenames) { + ddl1.uniques.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffChecks = diff(ddl1, ddl2, 'checks'); + const groupedChecksDiff = groupDiffs(diffChecks); + const checkRenames = [] as { from: CheckConstraint; to: CheckConstraint }[]; + const checkCreates = [] as CheckConstraint[]; + const checkDeletes = [] as CheckConstraint[]; + + for (const entry of groupedChecksDiff) { + const { renamedOrMoved, created, deleted } = await checksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + checkCreates.push(...created); + checkDeletes.push(...deleted); + checkRenames.push(...renamedOrMoved); + } + + for (const rename of checkRenames) { + ddl1.checks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffIndexes = diff(ddl1, ddl2, 'indexes'); + const groupedIndexesDiff = groupDiffs(diffIndexes); + const indexesRenames = [] as { from: Index; to: Index }[]; + const indexesCreates = [] as Index[]; + const indexesDeletes = [] as Index[]; + + for (const entry of groupedIndexesDiff) { + const { renamedOrMoved, created, deleted } = await indexesResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + indexesCreates.push(...created); + indexesDeletes.push(...deleted); + indexesRenames.push(...renamedOrMoved); + } + + for (const rename of indexesRenames) { + ddl1.indexes.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffPKs = diff(ddl1, ddl2, 'pks'); + const groupedPKsDiff = groupDiffs(diffPKs); + const pksRenames = [] as { from: PrimaryKey; to: PrimaryKey }[]; + const pksCreates = [] as PrimaryKey[]; + const pksDeletes = [] as PrimaryKey[]; + + for (const entry of groupedPKsDiff) { + const { renamedOrMoved, created, deleted } = await pksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + pksCreates.push(...created); + pksDeletes.push(...deleted); + pksRenames.push(...renamedOrMoved); + } + + for (const rename of pksRenames) { + ddl1.pks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const diffFKs = diff(ddl1, ddl2, 'fks'); + const groupedFKsDiff = groupDiffs(diffFKs); + const fksRenames = [] as { from: ForeignKey; to: ForeignKey }[]; + const fksCreates = [] as ForeignKey[]; + const fksDeletes = [] as ForeignKey[]; + + for (const entry of groupedFKsDiff) { + const { renamedOrMoved, created, deleted } = await fksResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + fksCreates.push(...created); + fksDeletes.push(...deleted); + fksRenames.push(...renamedOrMoved); + } + + for (const rename of fksRenames) { + ddl1.fks.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const policiesDiff = diff(ddl1, ddl2, 'policies'); + const policiesDiffGrouped = groupDiffs(policiesDiff); + + const policyRenames = [] as { from: Policy; to: Policy }[]; + const policyCreates = [] as Policy[]; + const policyDeletes = [] as Policy[]; + + for (const entry of policiesDiffGrouped) { + const { renamedOrMoved, created, deleted } = await policyResolver({ + created: entry.inserted, + deleted: entry.deleted, + }); + + policyCreates.push(...created); + policyDeletes.push(...deleted); + policyRenames.push(...renamedOrMoved); + } + + for (const rename of policyRenames) { + ddl1.policies.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + + const viewsDiff = diff(ddl1, ddl2, 'views'); + + const { + created: createdViews, + deleted: deletedViews, + renamedOrMoved: renamedOrMovedViews, + } = await viewsResolver({ + created: viewsDiff.filter((it) => it.$diffType === 'create'), + deleted: viewsDiff.filter((it) => it.$diffType === 'drop'), + }); + + const renamedViews = renamedOrMovedViews.filter((it) => it.from.schema === it.to.schema); + const movedViews = renamedOrMovedViews.filter((it) => it.from.schema !== it.to.schema); + + for (const rename of renamedViews) { + ddl1.views.update({ + set: { + name: rename.to.name, + schema: rename.to.schema, + }, + where: { + name: rename.from.name, + schema: rename.from.schema, + }, + }); + } + for (const move of movedViews) { + ddl1.views.update({ + set: { + schema: move.to.schema, + }, + where: { + name: move.from.name, + schema: move.from.schema, + }, + }); + } + + const alters = diff.alters(ddl1, ddl2); + + const jsonStatements: JsonStatement[] = []; + + /* + with new DDL when table gets created with constraints, etc. + or existing table with constraints and indexes gets deleted, + those entites are treated by diff as newly created or deleted + + we filter them out, because we either create them on table creation + or they get automatically deleted when table is deleted + */ + const tablesFilter = (type: 'deleted' | 'created') => { + return (it: { schema: string; table: string }) => { + if (type === 'created') { + return !createdTables.some((t) => t.schema === it.schema && t.name === it.table); + } else { + return !deletedTables.some((t) => t.schema === it.schema && t.name === it.table); + } + }; + }; + + const jsonCreateIndexes = indexesCreates.map((index) => prepareStatement('create_index', { index })); + const jsonDropIndexes = indexesDeletes.filter(tablesFilter('deleted')).map((index) => + prepareStatement('drop_index', { index }) + ); + + const jsonRenameIndexes = indexesRenames.map((r) => { + return prepareStatement('rename_index', { schema: r.to.schema, from: r.from.name, to: r.to.name }); + }); + + const indexesAlters = alters.filter((it): it is DiffEntities['indexes'] => { + if (it.entityType !== 'indexes') return false; + + delete it.concurrently; + + return ddl2.indexes.hasDiff(it); + }); + + const jsonRecreateIndex: JsonRecreateIndex[] = []; + for (const idx of indexesAlters) { + const forWhere = !!idx.where && (idx.where.from !== null && idx.where.to !== null ? mode !== 'push' : true); + const forColumns = !!idx.columns && (idx.columns.from.length === idx.columns.to.length ? mode !== 'push' : true); + + if (idx.isUnique || idx.concurrently || idx.method || idx.with || forColumns || forWhere) { + const index = ddl2.indexes.one({ schema: idx.schema, table: idx.table, name: idx.name })!; + jsonRecreateIndex.push(prepareStatement('recreate_index', { index, diff: idx })); + } + } + + const jsonDropTables = deletedTables.map((it) => { + const oldSchema = renamedSchemas.find((x) => x.to.name === it.schema); + const key = oldSchema ? `"${oldSchema.from.name}"."${it.name}"` : `"${it.schema}"."${it.name}"`; + return prepareStatement('drop_table', { table: tableFromDDL(it, ddl2), key }); + }); + const jsonRenameTables = renamedTables.map((it) => + prepareStatement('rename_table', { + schema: it.from.schema, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonRenameColumnsStatements = columnRenames.map((it) => prepareStatement('rename_column', it)); + const jsonDropColumnsStatemets = columnsToDelete.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_column', { column: it }) + ); + const jsonAddColumnsStatemets = columnsToCreate.filter(tablesFilter('created')).map((it) => + prepareStatement('add_column', { + column: it, + isPK: ddl2.pks.one({ schema: it.schema, table: it.table, columns: [it.name] }) !== null, + isCompositePK: ddl2.pks.one({ schema: it.schema, table: it.table, columns: { CONTAINS: it.name } }) !== null, + }) + ); + + const columnAlters = alters.filter((it) => it.entityType === 'columns').filter((it) => { + if ( + it.default + && ((it.$left.type === 'json' && it.$right.type === 'json') + || (it.$left.type === 'jsonb' && it.$right.type === 'jsonb')) + ) { + if (it.default.from !== null && it.default.to !== null) { + const left = stringify(parse(trimChar(it.default.from, "'"))); + const right = stringify(parse(trimChar(it.default.to, "'"))); + if (left === right) { + delete it.default; + } + } + } + + if (!it.type && it.default && defaultsCommutative(it.default, it.$right.type, it.$right.dimensions)) { + delete it.default; + } + + // commutative types + if (it.type) { + if ( + it.type.from === it.type.to.replace('numeric', 'decimal') + || it.type.to === it.type.from.replace('numeric', 'decimal') + ) { + delete it.type; + } + } + + // geometry + if (it.type && it.$right.type.startsWith('geometry(point') && it.$left.type.startsWith('geometry(point')) { + // geometry(point,0) + const leftSrid = it.$left.type.split(',')[1]?.replace(')', ''); + const rightSrid = it.$right.type.split(',')[1]?.replace(')', ''); + + // undefined or 0 are defaults srids + if (typeof leftSrid === 'undefined' && rightSrid === '0') delete it.type; + if (typeof rightSrid === 'undefined' && leftSrid === '0') delete it.type; + } + + // numeric(19) === numeric(19,0) + if (it.type && it.type.from.replace(',0)', ')') === it.type.to) { + delete it.type; + } + + return ddl2.columns.hasDiff(it); + }); + + const alteredUniques = alters.filter((it) => it.entityType === 'uniques').filter((it) => { + if (it.nameExplicit) { + delete it.nameExplicit; + } + + return ddl2.uniques.hasDiff(it); + }); + + const jsonAlteredUniqueConstraints = alteredUniques.map((it) => prepareStatement('alter_unique', { diff: it })); + + const jsonAddedUniqueConstraints = uniqueCreates.filter(tablesFilter('created')).map((it) => + prepareStatement('add_unique', { unique: it }) + ); + + const jsonDropUniqueConstraints = uniqueDeletes.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_unique', { unique: it }) + ); + const jsonRenamedUniqueConstraints = uniqueRenames.map((it) => + prepareStatement('rename_constraint', { + schema: it.to.schema, + table: it.to.table, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonAddPrimaryKeys = pksCreates.filter(tablesFilter('created')).map((it) => + prepareStatement('add_pk', { pk: it }) + ); + + const jsonDropPrimaryKeys = pksDeletes.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_pk', { pk: it }) + ); + + const jsonRenamePrimaryKey = pksRenames.map((it) => { + return prepareStatement('rename_constraint', { + schema: it.to.schema, + table: it.to.table, + from: it.from.name, + to: it.to.name, + }); + }); + + const jsonSetTableSchemas = movedTables.map((it) => + prepareStatement('move_table', { + name: it.to.name, // rename of table comes first + from: it.from.schema, + to: it.to.schema, + }) + ); + + const jsonCreatedCheckConstraints = checkCreates.filter(tablesFilter('created')).map((it) => + prepareStatement('add_check', { check: it }) + ); + const jsonDropCheckConstraints = checkDeletes.filter(tablesFilter('deleted')).map((it) => + prepareStatement('drop_check', { check: it }) + ); + + // group by tables? + const alteredPKs = alters.filter((it) => it.entityType === 'pks').filter((it) => { + return !!it.columns; // ignore explicit name change + }); + + const alteredChecks = alters.filter((it) => it.entityType === 'checks'); + const jsonAlteredPKs = alteredPKs.map((it) => prepareStatement('alter_pk', { diff: it, pk: it.$right })); + + const jsonRecreateFKs = alters.filter((it) => it.entityType === 'fks').filter((x) => { + if ( + x.nameExplicit + && ((mode === 'push' && x.nameExplicit.from && !x.nameExplicit.to) + || x.nameExplicit.to && !x.nameExplicit.from) + ) { + delete x.nameExplicit; + } + + return ddl2.fks.hasDiff(x); + }).map((it) => prepareStatement('recreate_fk', { fk: it.$right, diff: it })); + + const jsonCreateFKs = fksCreates.map((it) => prepareStatement('create_fk', { fk: it })); + + const jsonDropFKs = fksDeletes.filter((fk) => { + const fromDeletedTable = deletedTables.some((x) => x.schema === fk.schema && x.name === fk.table); + const toDeletedTable = fk.table !== fk.tableTo + && deletedTables.some((x) => x.schema === fk.schemaTo && x.name === fk.tableTo); + if (fromDeletedTable && !toDeletedTable) return false; + return true; + }).map((it) => prepareStatement('drop_fk', { fk: it })); + + const jsonRenameReferences = fksRenames.map((it) => + prepareStatement('rename_constraint', { + schema: it.to.schema, + table: it.to.table, + from: it.from.name, + to: it.to.name, + }) + ); + + const jsonAlterCheckConstraints = alteredChecks.filter((it) => it.value && mode !== 'push').map((it) => + prepareStatement('alter_check', { diff: it }) + ); + const jsonCreatePoliciesStatements = policyCreates.map((it) => prepareStatement('create_policy', { policy: it })); + const jsonDropPoliciesStatements = policyDeletes.map((it) => prepareStatement('drop_policy', { policy: it })); + const jsonRenamePoliciesStatements = policyRenames.map((it) => prepareStatement('rename_policy', it)); + + const alteredPolicies = alters.filter((it) => it.entityType === 'policies').filter((it) => { + if (it.withCheck && it.withCheck.from && it.withCheck.to) { + if (it.withCheck.from === `(${it.withCheck.to})` || it.withCheck.to === `(${it.withCheck.from})`) { + delete it.withCheck; + } + } + return ddl1.policies.hasDiff(it); + }); + + // if I drop policy/ies, I should check if table only had this policy/ies and turn off + // for non explicit rls = + + // using/withcheck in policy is a SQL expression which can be formatted by database in a different way, + // thus triggering recreations/alternations on push + const jsonAlterOrRecreatePoliciesStatements = alteredPolicies.filter((it) => { + return it.as || it.for || it.roles || !((it.using || it.withCheck) && mode === 'push'); + }).map( + (it) => { + const to = ddl2.policies.one({ + schema: it.schema, + table: it.table, + name: it.name, + })!; + if (it.for || it.as) { + return prepareStatement('recreate_policy', { + diff: it, + policy: to, + }); + } else { + return prepareStatement('alter_policy', { + diff: it, + policy: to, + }); + } + }, + ); + + // explicit rls alters + const rlsAlters = alters.filter((it) => it.entityType === 'tables').filter((it) => it.isRlsEnabled); + + const jsonAlterRlsStatements = rlsAlters.map((it) => + prepareStatement('alter_rls', { + schema: it.schema, + name: it.name, + isRlsEnabled: it.isRlsEnabled?.to || false, + }) + ); + + for (const it of policyDeletes) { + if (rlsAlters.some((alter) => alter.schema === it.schema && alter.name === it.table)) continue; // skip for explicit + + const had = ddl1.policies.list({ schema: it.schema, table: it.table }).length; + const has = ddl2.policies.list({ schema: it.schema, table: it.table }).length; + + const prevTable = ddl1.tables.one({ schema: it.schema, name: it.table }); + const table = ddl2.tables.one({ schema: it.schema, name: it.table }); + + // I don't want dedup here, not a valuable optimisation + if ( + table !== null // not external table + && (had > 0 && has === 0 && prevTable && prevTable.isRlsEnabled === false) + && !jsonAlterRlsStatements.some((st) => st.schema === it.schema && st.name === it.table) + ) { + jsonAlterRlsStatements.push(prepareStatement('alter_rls', { + schema: it.schema, + name: it.table, + isRlsEnabled: false, + })); + } + } + + for (const it of policyCreates) { + if (rlsAlters.some((alter) => alter.schema === it.schema && alter.name === it.table)) continue; // skip for explicit + if (createdTables.some((t) => t.schema === it.schema && t.name === it.table)) continue; // skip for created tables + if (jsonAlterRlsStatements.some((st) => st.schema === it.schema && st.name === it.table)) continue; // skip for existing rls toggles + + const had = ddl1.policies.list({ schema: it.schema, table: it.table }).length; + const has = ddl2.policies.list({ schema: it.schema, table: it.table }).length; + + const table = ddl2.tables.one({ schema: it.schema, name: it.table }); + + if ( + table !== null // not external table + && (had === 0 && has > 0 && !table.isRlsEnabled) + ) { + jsonAlterRlsStatements.push(prepareStatement('alter_rls', { + schema: it.schema, + name: it.table, + isRlsEnabled: true, + })); + } + } + + const jsonCreateEnums = createdEnums.map((it) => prepareStatement('create_enum', { enum: it })); + const jsonDropEnums = deletedEnums.map((it) => prepareStatement('drop_enum', { enum: it })); + const jsonMoveEnums = movedEnums.map((it) => prepareStatement('move_enum', it)); + const jsonRenameEnums = renamedEnums.map((it) => + prepareStatement('rename_enum', { + schema: it.to.schema, + from: it.from.name, + to: it.to.name, + }) + ); + const enumsAlters = alters.filter((it) => it.entityType === 'enums'); + + const recreateEnums = [] as Extract[]; + const jsonAlterEnums = [] as Extract[]; + + for (const alter of enumsAlters) { + const values = alter.values!; + const res = diffStringArrays(values.from, values.to); + const e = { ...alter, values: values.to }; + + if (res.some((it) => it.type === 'removed')) { + // recreate enum + const columns = ddl1.columns.list({ typeSchema: alter.schema, type: alter.name }) + .map((it) => { + const c2 = ddl2.columns.one({ schema: it.schema, table: it.table, name: it.name }); + if (c2 === null) return null; + it.default = c2.default; + return it; + }) + .filter((x) => x !== null); + recreateEnums.push(prepareStatement('recreate_enum', { to: e, columns, from: alter.$left })); + } else { + jsonAlterEnums.push(prepareStatement('alter_enum', { diff: res, to: e, from: alter.$left })); + } + } + + const jsonAlterColumns = columnAlters.filter((it) => !(it.generated && it.generated.to !== null)) + .filter((it) => { + // if column is of type enum we're about to recreate - we will reset default anyway + if ( + it.default + && recreateEnums.some((x) => + x.columns.some((c) => it.schema === c.schema && it.table === c.table && it.name === c.name) + ) + ) { + delete it.default; + } + + if (it.notNull && it.notNull.to && (it.$right.generated || it.$right.identity)) { + delete it.notNull; + } + + const pkIn2 = ddl2.pks.one({ schema: it.schema, table: it.table, columns: { CONTAINS: it.name } }); + if (it.notNull && pkIn2) { + delete it.notNull; + } + + return ddl2.columns.hasDiff(it); + }) + .map((it) => { + const column = it.$right; + return prepareStatement('alter_column', { + diff: it, + isEnum: ddl2.enums.one({ schema: column.typeSchema ?? 'public', name: column.type }) !== null, + wasEnum: (it.type && ddl1.enums.one({ schema: column.typeSchema ?? 'public', name: it.type.from }) !== null) + ?? false, + to: column, + }); + }); + + const createSequences = createdSequences.map((it) => prepareStatement('create_sequence', { sequence: it })); + const dropSequences = deletedSequences.map((it) => prepareStatement('drop_sequence', { sequence: it })); + const moveSequences = movedSequences.map((it) => prepareStatement('move_sequence', it)); + const renameSequences = renamedSequences.map((it) => prepareStatement('rename_sequence', it)); + const sequencesAlter = alters.filter((it) => it.entityType === 'sequences'); + const jsonAlterSequences = sequencesAlter.map((it) => + prepareStatement('alter_sequence', { diff: it, sequence: it.$right }) + ); + + const jsonCreateRoles = createdRoles.map((it) => prepareStatement('create_role', { role: it })); + const jsonDropRoles = deletedRoles.map((it) => prepareStatement('drop_role', { role: it })); + const jsonRenameRoles = renamedRoles.map((it) => prepareStatement('rename_role', it)); + const jsonAlterRoles = alters.filter((it) => it.entityType === 'roles').map((it) => + prepareStatement('alter_role', { diff: it, role: it.$right }) + ); + + const jsonGrantPrivileges = createdPrivileges.map((it) => prepareStatement('grant_privilege', { privilege: it })); + const jsonRevokePrivileges = deletedPrivileges.map((it) => prepareStatement('revoke_privilege', { privilege: it })); + const jsonAlterPrivileges = alters.filter((it) => it.entityType === 'privileges').map((it) => + prepareStatement('regrant_privilege', { privilege: it.$right, diff: it }) + ); + + const createSchemas = createdSchemas.map((it) => prepareStatement('create_schema', it)); + const dropSchemas = deletedSchemas.map((it) => prepareStatement('drop_schema', it)); + const renameSchemas = renamedSchemas.map((it) => prepareStatement('rename_schema', it)); + + const createTables = createdTables.map((it) => prepareStatement('create_table', { table: tableFromDDL(it, ddl2) })); + + const createViews = createdViews.map((it) => prepareStatement('create_view', { view: it })); + + const jsonDropViews = deletedViews.map((it) => prepareStatement('drop_view', { view: it })); + + const jsonRenameViews = renamedViews.map((it) => prepareStatement('rename_view', it)); + + const jsonMoveViews = movedViews.map((it) => + prepareStatement('move_view', { fromSchema: it.from.schema, toSchema: it.to.schema, view: it.to }) + ); + + const filteredViewAlters = alters.filter((it): it is DiffEntities['views'] => { + if (it.entityType !== 'views') return false; + + if (it.definition && mode === 'push') { + delete it.definition; + } + + // default access method + // from db -> heap, + // drizzle schema -> null + if (mode === 'push' && it.using && !it.using.to && it.using.from === defaults.accessMethod) { + delete it.using; + } + + if (mode === 'push' && it.tablespace && it.tablespace.from === null && it.tablespace.to === defaults.tablespace) { + delete it.tablespace; + } + + return ddl2.views.hasDiff(it); + }); + + const viewsAlters = filteredViewAlters.map((it) => ({ diff: it, view: it.$right })); + + const jsonAlterViews = viewsAlters.filter((it) => !it.diff.definition).map((it) => { + return prepareStatement('alter_view', { + diff: it.diff, + view: it.view, + }); + }); + + const jsonRecreateViews = viewsAlters.filter((it) => it.diff.definition).map((entry) => { + const it = entry.view; + const schemaRename = renamedSchemas.find((r) => r.to.name === it.schema); + const schema = schemaRename ? schemaRename.from.name : it.schema; + const viewRename = renamedViews.find((r) => r.to.schema === it.schema && r.to.name === it.name); + const name = viewRename ? viewRename.from.name : it.name; + const from = ddl1Copy.views.one({ schema, name }); + + if (!from) { + throw new Error(` + Missing view in original ddl: + ${it.schema}:${it.name} + ${schema}:${name} + `); + } + return prepareStatement('recreate_view', { from, to: it }); + }); + + const columnsToRecreate = columnAlters.filter((it) => it.generated && it.generated.to !== null).filter((it) => { + // if push and definition changed + return !(it.generated?.to && it.generated.from && mode === 'push'); + }); + + const jsonRecreateColumns = columnsToRecreate.map((it) => { + const indexes = ddl2.indexes.list({ table: it.table, schema: it.schema }).filter((index) => + index.columns.some((column) => trimChar(column.value, '`') === it.name) + ); + for (const index of indexes) { + jsonCreateIndexes.push({ type: 'create_index', index }); + } + + const uniques = ddl2.uniques.list({ table: it.table, schema: it.schema, columns: { CONTAINS: it.name } }); + for (const unique of uniques) { + jsonAddedUniqueConstraints.push({ type: 'add_unique', unique }); + } + + // Not sure if anyone tries to add fk on generated column or from it, but still... + const fksFrom = ddl2.fks.list({ table: it.table, schema: it.schema, columns: { CONTAINS: it.name } }); + const fksTo = ddl2.fks.list({ tableTo: it.table, schemaTo: it.schema, columnsTo: { CONTAINS: it.name } }); + for (const fkFrom of fksFrom) { + jsonDropFKs.push({ type: 'drop_fk', fk: fkFrom }); + } + for (const fkTo of fksTo) { + jsonDropFKs.push({ type: 'drop_fk', fk: fkTo }); + jsonCreateFKs.push({ type: 'create_fk', fk: fkTo }); + } + + return prepareStatement('recreate_column', { + diff: it, + isPK: ddl2.pks.one({ schema: it.schema, table: it.table, columns: [it.name] }) !== null, + }); + }); + + jsonStatements.push(...createSchemas); + jsonStatements.push(...renameSchemas); + jsonStatements.push(...jsonCreateEnums); + jsonStatements.push(...jsonMoveEnums); + jsonStatements.push(...jsonRenameEnums); + jsonStatements.push(...jsonAlterEnums); + + jsonStatements.push(...createSequences); + jsonStatements.push(...moveSequences); + jsonStatements.push(...renameSequences); + jsonStatements.push(...jsonAlterSequences); + + jsonStatements.push(...jsonRenameRoles); + jsonStatements.push(...jsonDropRoles); + jsonStatements.push(...jsonCreateRoles); + jsonStatements.push(...jsonAlterRoles); + + jsonStatements.push(...jsonRevokePrivileges); + jsonStatements.push(...jsonGrantPrivileges); + jsonStatements.push(...jsonAlterPrivileges); + + jsonStatements.push(...createTables); + + jsonStatements.push(...jsonDropViews); + jsonStatements.push(...jsonRenameViews); + jsonStatements.push(...jsonMoveViews); + jsonStatements.push(...jsonRecreateViews); + jsonStatements.push(...jsonAlterViews); + + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonDropPoliciesStatements); // before drop tables + jsonStatements.push(...jsonDropFKs); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonAlterRlsStatements); + jsonStatements.push(...jsonSetTableSchemas); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDropUniqueConstraints); + jsonStatements.push(...jsonDropCheckConstraints); + + // TODO: ? will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonRenameIndexes); + jsonStatements.push(...jsonDropIndexes); + jsonStatements.push(...jsonDropPrimaryKeys); + + jsonStatements.push(...jsonAddPrimaryKeys); + jsonStatements.push(...jsonRenamePrimaryKey); + jsonStatements.push(...jsonRenameReferences); + jsonStatements.push(...jsonAddColumnsStatemets); + jsonStatements.push(...recreateEnums); + jsonStatements.push(...jsonRecreateColumns); + jsonStatements.push(...jsonAlterColumns); + jsonStatements.push(...jsonRecreateIndex); + + jsonStatements.push(...jsonRenamedUniqueConstraints); + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonAlteredUniqueConstraints); + jsonStatements.push(...jsonCreateIndexes); // above fks for uniqueness constraint to come first + + jsonStatements.push(...jsonCreateFKs); + jsonStatements.push(...jsonRecreateFKs); + + jsonStatements.push(...jsonDropColumnsStatemets); + jsonStatements.push(...jsonAlteredPKs); + + jsonStatements.push(...jsonCreatedCheckConstraints); + + jsonStatements.push(...jsonAlterCheckConstraints); + + jsonStatements.push(...createViews); + + jsonStatements.push(...jsonRenamePoliciesStatements); + jsonStatements.push(...jsonCreatePoliciesStatements); + jsonStatements.push(...jsonAlterOrRecreatePoliciesStatements); + + jsonStatements.push(...jsonDropEnums); + jsonStatements.push(...dropSequences); + jsonStatements.push(...dropSchemas); + + const { groupedStatements, sqlStatements } = fromJson(jsonStatements); + + const renames = prepareMigrationRenames([ + ...renameSchemas, + ...renamedEnums, + ...renamedOrMovedTables, + ...columnRenames, + ...uniqueRenames, + ...checkRenames, + ...indexesRenames, + ...pksRenames, + ...fksRenames, + ...policyRenames, + ...renamedOrMovedViews, + ...renamedRoles, + ...renamedOrMovedSequences, + ]); + + return { + statements: jsonStatements, + sqlStatements, + groupedStatements: groupedStatements, + renames: renames, + }; +}; diff --git a/drizzle-kit/src/dialects/postgres/drizzle.ts b/drizzle-kit/src/dialects/postgres/drizzle.ts new file mode 100644 index 0000000000..6decf5db61 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/drizzle.ts @@ -0,0 +1,882 @@ +import { getTableName, is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { AnyGelColumn, GelDialect, GelPolicy } from 'drizzle-orm/gel-core'; +import type { + AnyPgColumn, + AnyPgTable, + PgEnum, + PgMaterializedView, + PgMaterializedViewWithConfig, + PgSequence, + UpdateDeleteAction, + ViewWithConfig, +} from 'drizzle-orm/pg-core'; +import { + getMaterializedViewConfig, + getTableConfig, + getViewConfig, + IndexedColumn, + isPgEnum, + isPgMaterializedView, + isPgSequence, + isPgView, + PgArray, + PgDialect, + PgEnumColumn, + PgGeometry, + PgGeometryObject, + PgLineABC, + PgLineTuple, + PgPointObject, + PgPointTuple, + PgPolicy, + PgRole, + PgSchema, + PgTable, + PgView, + uniqueKeyName, +} from 'drizzle-orm/pg-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { safeRegister } from 'src/utils/utils-node'; +import { assertUnreachable } from '../../utils'; +import { getColumnCasing } from '../drizzle'; +import type { EntityFilter } from '../pull-utils'; +import { getOrNull } from '../utils'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PostgresEntities, + PrimaryKey, + Schema, + SchemaError, + SchemaWarning, + UniqueConstraint, +} from './ddl'; +import { + defaultNameForFK, + defaultNameForPK, + GeometryPoint, + indexName, + Line, + maxRangeForIdentityBasedOn, + minRangeForIdentityBasedOn, + Point, + splitSqlType, + stringFromIdentityProperty, + trimDefaultValueSuffix, + typeFor, +} from './grammar'; + +export const policyFrom = (policy: PgPolicy | GelPolicy, dialect: PgDialect | GelDialect) => { + const mappedTo = !policy.to + ? ['public'] + : typeof policy.to === 'string' + ? [policy.to] + : is(policy.to, PgRole) + ? [(policy.to as PgRole).name] + : Array.isArray(policy.to) + ? policy.to.map((it) => { + if (typeof it === 'string') { + return it; + } else if (is(it, PgRole)) { + return it.name; + } + return '' as never; // unreachable unless error in types + }) + : ('' as never); // unreachable unless error in types + + const policyAs = (policy.as?.toUpperCase() as Policy['as']) ?? 'PERMISSIVE'; + const policyFor = (policy.for?.toUpperCase() as Policy['for']) ?? 'ALL'; + const policyTo = mappedTo.sort(); + const policyUsing = is(policy.using, SQL) + ? dialect.sqlToQuery(policy.using).sql + : null; + const withCheck = is(policy.withCheck, SQL) + ? dialect.sqlToQuery(policy.withCheck).sql + : null; + + return { + name: policy.name, + as: policyAs, + for: policyFor, + roles: policyTo, + using: policyUsing, + withCheck, + }; +}; + +export const unwrapColumn = (column: AnyPgColumn | AnyGelColumn) => { + const { baseColumn, dimensions } = is(column, PgArray) + ? unwrapArray(column) + : { baseColumn: column, dimensions: 0 }; + + const isEnum = is(baseColumn, PgEnumColumn); + const typeSchema = isEnum + ? baseColumn.enum.schema || 'public' + : null; + + let sqlBaseType = baseColumn.getSQLType(); + // numeric(6, 2) -> numeric(6,2) + sqlBaseType = sqlBaseType.replace(', ', ','); + + /* legacy, for not to patch orm and don't up snapshot */ + sqlBaseType = sqlBaseType.startsWith('timestamp (') ? sqlBaseType.replace('timestamp (', 'timestamp(') : sqlBaseType; + + const { type, options } = splitSqlType(sqlBaseType); + const sqlType = dimensions > 0 ? `${sqlBaseType}${'[]'.repeat(dimensions)}` : sqlBaseType; + return { + baseColumn, + dimensions, + isEnum, + typeSchema, + sqlType, + baseType: type, + options, + }; +}; + +export const unwrapArray = ( + column: PgArray, + dimensions: number = 1, +): { baseColumn: AnyPgColumn; dimensions: number } => { + const baseColumn = column.baseColumn; + if (is(baseColumn, PgArray)) return unwrapArray(baseColumn, dimensions + 1); + + return { baseColumn, dimensions }; +}; + +export const transformOnUpdateDelete = (on: UpdateDeleteAction): ForeignKey['onUpdate'] => { + if (on === 'no action') return 'NO ACTION'; + if (on === 'cascade') return 'CASCADE'; + if (on === 'restrict') return 'RESTRICT'; + if (on === 'set default') return 'SET DEFAULT'; + if (on === 'set null') return 'SET NULL'; + + assertUnreachable(on); +}; + +type JsonValue = string | number | boolean | null | JsonObject | JsonArray; +type JsonObject = { [key: string]: JsonValue }; +type JsonArray = JsonValue[]; + +export const defaultFromColumn = ( + base: AnyPgColumn | AnyGelColumn, + def: unknown, + dimensions: number, + dialect: PgDialect | GelDialect, +): Column['default'] => { + if (typeof def === 'undefined') return null; + + if (is(def, SQL)) { + let sql = dialect.sqlToQuery(def).sql; + sql = trimDefaultValueSuffix(sql); + + // TODO: check if needed + // const isText = /^'(?:[^']|'')*'$/.test(sql); + // sql = isText ? trimChar(sql, "'") : sql; + + return sql; + } + + const { baseColumn, isEnum } = unwrapColumn(base); + const grammarType = typeFor(base.getSQLType(), isEnum); + if (is(baseColumn, PgPointTuple) || is(baseColumn, PgPointObject)) { + return dimensions > 0 && Array.isArray(def) + ? def.flat(5).length === 0 + ? "'{}'" + : Point.defaultArrayFromDrizzle(def, dimensions, baseColumn.mode) + : Point.defaultFromDrizzle(def, baseColumn.mode); + } + if (is(baseColumn, PgLineABC) || is(baseColumn, PgLineTuple)) { + return dimensions > 0 && Array.isArray(def) + ? def.flat(5).length === 0 + ? "'{}'" + : Line.defaultArrayFromDrizzle(def, dimensions, baseColumn.mode) + : Line.defaultFromDrizzle(def, baseColumn.mode); + } + if (is(baseColumn, PgGeometry) || is(baseColumn, PgGeometryObject)) { + return dimensions > 0 && Array.isArray(def) + ? def.flat(5).length === 0 + ? "'{}'" + : GeometryPoint.defaultArrayFromDrizzle(def, dimensions, baseColumn.mode, baseColumn.srid) + : GeometryPoint.defaultFromDrizzle(def, baseColumn.mode, baseColumn.srid); + } + if (dimensions > 0 && Array.isArray(def)) { + if (def.flat(5).length === 0) return "'{}'"; + + return grammarType.defaultArrayFromDrizzle(def, dimensions); + } + + return grammarType.defaultFromDrizzle(def); +}; + +/* + We map drizzle entities into interim schema entities, + so that both Drizzle Kit and Drizzle Studio are able to share + common business logic of composing and diffing InternalSchema + + By having interim schemas based on arrays instead of records - we can postpone + collissions(duplicate indexes, columns, etc.) checking/or printing via extra `errors` field upwards, + while trimming serializer.ts of Hanji & Chalk dependencies +*/ +export const fromDrizzleSchema = ( + schema: { + schemas: PgSchema[]; + tables: AnyPgTable[]; + enums: PgEnum[]; + sequences: PgSequence[]; + roles: PgRole[]; + policies: PgPolicy[]; + views: PgView[]; + matViews: PgMaterializedView[]; + }, + casing: CasingType | undefined, + filter: EntityFilter, +): { + schema: InterimSchema; + errors: SchemaError[]; + warnings: SchemaWarning[]; +} => { + const dialect = new PgDialect({ casing }); + const errors: SchemaError[] = []; + const warnings: SchemaWarning[] = []; + + const res: InterimSchema = { + indexes: [], + pks: [], + fks: [], + uniques: [], + checks: [], + columns: [], + policies: [], + enums: [], + roles: [], + privileges: [], + schemas: [], + sequences: [], + tables: [], + viewColumns: [], + views: [], + }; + + res.schemas = schema.schemas + .filter((it) => { + return !it.isExisting && it.schemaName !== 'public' && filter({ type: 'schema', name: it.schemaName }); + }) + .map((it) => ({ + entityType: 'schemas', + name: it.schemaName, + })); + + const tableConfigPairs = schema.tables.map((it) => { + return { config: getTableConfig(it), table: it }; + }).filter((x) => { + return filter({ type: 'table', schema: x.config.schema ?? 'public', name: x.config.name }); + }); + + for (const policy of schema.policies) { + if ( + !('_linkedTable' in policy) + || typeof policy._linkedTable === 'undefined' + ) { + warnings.push({ type: 'policy_not_linked', policy: policy.name }); + continue; + } + + // @ts-ignore + const { schema: configSchema, name: tableName } = getTableConfig(policy._linkedTable); + + const p = policyFrom(policy, dialect); + res.policies.push({ + entityType: 'policies', + schema: configSchema ?? 'public', + table: tableName, + name: p.name, + as: p.as, + for: p.for, + roles: p.roles, + using: p.using, + withCheck: p.withCheck, + }); + } + + res.tables = tableConfigPairs.map((it) => { + const config = it.config; + const schema = config.schema ?? 'public'; + const isRlsEnabled = config.enableRLS || config.policies.length > 0 + || res.policies.some((x) => x.schema === schema && x.table === config.name); + + return { + entityType: 'tables', + schema, + name: config.name, + isRlsEnabled, + } satisfies PostgresEntities['tables']; + }); + + for (const { table, config } of tableConfigPairs) { + const { + name: tableName, + columns: drizzleColumns, + indexes: drizzleIndexes, + foreignKeys: drizzleFKs, + checks: drizzleChecks, + schema: drizzleSchema, + primaryKeys: drizzlePKs, + uniqueConstraints: drizzleUniques, + policies: drizzlePolicies, + } = config; + + const schema = drizzleSchema || 'public'; + + res.columns.push( + ...drizzleColumns.map((column) => { + const name = getColumnCasing(column, casing); + + const isPk = column.primary + || config.primaryKeys.find((pk) => + pk.columns.some((col) => col.name ? col.name === column.name : col.keyAsName === column.keyAsName) + ) !== undefined; + + const notNull = column.notNull || isPk; + + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) + ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 + ? minRangeForIdentityBasedOn(column.columnType) + : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 + ? '-1' + : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = Number(stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? 1); + + const generatedValue: Column['generated'] = generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : String(generated.as), + + type: 'stored', + } + : null; + + const identityValue = identity + ? { + type: identity.type, + name: identity.sequenceName ?? `${tableName}_${name}_seq`, + increment, + startWith, + minValue, + maxValue, + cache, + cycle: identity?.sequenceOptions?.cycle ?? false, + } + : null; + + const { baseColumn, dimensions, typeSchema, sqlType } = unwrapColumn(column); + const columnDefault = defaultFromColumn(baseColumn, column.default, dimensions, dialect); + + return { + entityType: 'columns', + schema: schema, + table: tableName, + name, + type: sqlType.replaceAll('[]', ''), + typeSchema: typeSchema ?? null, + dimensions: dimensions, + pk: column.primary, + pkName: null, + notNull: notNull, + default: columnDefault, + generated: generatedValue, + unique: column.isUnique, + uniqueName: column.uniqueName ?? null, + uniqueNullsNotDistinct: column.uniqueType === 'not distinct', + identity: identityValue, + } satisfies InterimColumn; + }), + ); + + res.pks.push( + ...drizzlePKs.map((pk) => { + const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); + + const name = pk.name || defaultNameForPK(tableName); + + return { + entityType: 'pks', + schema: schema, + table: tableName, + name: name, + columns: columnNames, + nameExplicit: pk.isNameExplicit, + }; + }), + ); + + res.uniques.push( + ...drizzleUniques.map((unq) => { + const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); + const name = unq.isNameExplicit ? unq.name! : uniqueKeyName(table, columnNames); + return { + entityType: 'uniques', + schema: schema, + table: tableName, + name, + nameExplicit: unq.isNameExplicit, + nullsNotDistinct: unq.nullsNotDistinct, + columns: columnNames, + } satisfies UniqueConstraint; + }), + ); + + res.fks.push( + ...drizzleFKs.map((fk) => { + const onDelete = fk.onDelete; + const onUpdate = fk.onUpdate; + const reference = fk.reference(); + + const tableTo = getTableName(reference.foreignTable); + const schemaTo = getTableConfig(reference.foreignTable).schema || 'public'; + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + const name = fk.isNameExplicit() ? fk.getName() : defaultNameForFK(tableName, columnsFrom, tableTo, columnsTo); + + return { + entityType: 'fks', + schema: schema, + table: tableName, + name, + nameExplicit: fk.isNameExplicit(), + tableTo, + schemaTo, + columns: columnsFrom, + columnsTo, + onDelete: onDelete ? transformOnUpdateDelete(onDelete) : null, + onUpdate: onUpdate ? transformOnUpdateDelete(onUpdate) : null, + } satisfies ForeignKey; + }), + ); + + for (const index of drizzleIndexes) { + const columns = index.config.columns; + for (const column of columns) { + if (is(column, IndexedColumn) && column.type !== 'PgVector') continue; + + if (is(column, SQL) && !index.config.name) { + errors.push({ + type: 'index_no_name', + schema: schema, + table: getTableName(index.config.table), + sql: dialect.sqlToQuery(column).sql, + }); + continue; + } + + if ( + is(column, IndexedColumn) + && column.type === 'PgVector' + && !column.indexConfig.opClass + ) { + const columnName = getColumnCasing(column, casing); + errors.push({ + type: 'pgvector_index_noop', + table: tableName, + column: columnName, + indexName: index.config.name!, + method: index.config.method!, + }); + } + } + } + + res.indexes.push( + ...drizzleIndexes.map((value) => { + const columns = value.config.columns; + + let indexColumnNames = columns.map((it) => { + const name = getColumnCasing(it as IndexedColumn, casing); + return name; + }); + + const name = value.config.name ?? indexName(tableName, indexColumnNames); + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + return { + value: dialect.sqlToQuery(it, 'indexes').sql, + isExpression: true, + asc: true, + nullsFirst: false, + opclass: null, + } satisfies Index['columns'][number]; + } else { + it = it as IndexedColumn; + return { + value: getColumnCasing(it as IndexedColumn, casing), + isExpression: false, + asc: it.indexConfig?.order === 'asc', + nullsFirst: it.indexConfig?.nulls + ? it.indexConfig?.nulls === 'first' + ? true + : false + : false, + opclass: it.indexConfig?.opClass + ? { + name: it.indexConfig.opClass, + default: false, + } + : null, + } satisfies Index['columns'][number]; + } + }); + + const withOpt = Object.entries(value.config.with || {}) + .map((it) => `${it[0]}=${it[1]}`) + .join(', '); + + let where = value.config.where ? dialect.sqlToQuery(value.config.where.inlineParams(), 'indexes').sql : ''; + where = where === 'true' ? '' : where; + + return { + entityType: 'indexes', + schema, + table: tableName, + name, + nameExplicit: value.isNameExplicit, + columns: indexColumns, + isUnique: value.config.unique, + where: where ? where : null, + concurrently: value.config.concurrently ?? false, + method: value.config.method ?? 'btree', + with: withOpt, + forPK: false, + forUnique: false, + } satisfies InterimIndex; + }), + ); + + res.policies.push( + ...drizzlePolicies.map((policy) => { + const p = policyFrom(policy, dialect); + return { + entityType: 'policies', + schema: schema, + table: tableName, + name: p.name, + as: p.as, + for: p.for, + roles: p.roles, + using: p.using, + withCheck: p.withCheck, + }; + }), + ); + + res.checks.push( + ...drizzleChecks.map((check) => { + const value = dialect.sqlToQuery(check.value.inlineParams(), 'indexes').sql; + + const checkName = check.name; + return { + entityType: 'checks', + schema, + table: tableName, + name: checkName, + value, + }; + }), + ); + } + + for (const sequence of schema.sequences) { + const name = sequence.seqName!; + const increment = stringFromIdentityProperty(sequence.seqOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(sequence.seqOptions?.minValue) + ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); + const maxValue = stringFromIdentityProperty(sequence.seqOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); + const startWith = stringFromIdentityProperty(sequence.seqOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = Number(stringFromIdentityProperty(sequence.seqOptions?.cache) ?? 1); + res.sequences.push({ + entityType: 'sequences', + name, + schema: sequence.schema ?? 'public', + incrementBy: increment, + startWith, + minValue, + maxValue, + cacheSize: cache, + cycle: sequence.seqOptions?.cycle ?? false, + }); + } + + for (const _role of schema.roles) { + const role = _role as any; + if (role._existing) continue; + + res.roles.push({ + entityType: 'roles', + name: role.name, + superuser: role.superuser ?? false, + inherit: role.inherit ?? true, + createRole: role.createRole ?? false, + createDb: role.createDb ?? false, + canLogin: role.canLogin ?? false, + replication: role.replication ?? false, + bypassRls: role.bypassRls ?? false, + connLimit: role.connLimit ?? -1, + password: role.password ?? null, + validUntil: role.validUntil ?? null, + }); + } + + const combinedViews = [...schema.views, ...schema.matViews].map((it) => { + if (is(it, PgView)) { + return { + ...getViewConfig(it), + materialized: false, + tablespace: undefined, + using: undefined, + withNoData: undefined, + }; + } else { + return { ...getMaterializedViewConfig(it), materialized: true }; + } + }); + + for (const view of combinedViews) { + if (view.isExisting || !filter({ type: 'table', schema: view.schema ?? 'public', name: view.name })) continue; + + const { + name: viewName, + schema, + query, + tablespace, + using, + withNoData, + materialized, + } = view; + + const viewSchema = schema ?? 'public'; + + type MergerWithConfig = keyof ( + & ViewWithConfig + & PgMaterializedViewWithConfig + ); + const opt = view.with as + | { + [K in MergerWithConfig]: ( + & ViewWithConfig + & PgMaterializedViewWithConfig + )[K]; + } + | null; + + const withOpt = opt + ? { + checkOption: getOrNull(opt, 'checkOption'), + securityBarrier: getOrNull(opt, 'securityBarrier'), + securityInvoker: getOrNull(opt, 'securityInvoker'), + autovacuumEnabled: getOrNull(opt, 'autovacuumEnabled'), + autovacuumFreezeMaxAge: getOrNull(opt, 'autovacuumFreezeMaxAge'), + autovacuumFreezeMinAge: getOrNull(opt, 'autovacuumFreezeMinAge'), + autovacuumFreezeTableAge: getOrNull( + opt, + 'autovacuumFreezeTableAge', + ), + autovacuumMultixactFreezeMaxAge: getOrNull( + opt, + 'autovacuumMultixactFreezeMaxAge', + ), + autovacuumMultixactFreezeMinAge: getOrNull( + opt, + 'autovacuumMultixactFreezeMinAge', + ), + autovacuumMultixactFreezeTableAge: getOrNull( + opt, + 'autovacuumMultixactFreezeTableAge', + ), + autovacuumVacuumCostDelay: getOrNull( + opt, + 'autovacuumVacuumCostDelay', + ), + autovacuumVacuumCostLimit: getOrNull( + opt, + 'autovacuumVacuumCostLimit', + ), + autovacuumVacuumScaleFactor: getOrNull( + opt, + 'autovacuumVacuumScaleFactor', + ), + autovacuumVacuumThreshold: getOrNull( + opt, + 'autovacuumVacuumThreshold', + ), + fillfactor: getOrNull(opt, 'fillfactor'), + logAutovacuumMinDuration: getOrNull( + opt, + 'logAutovacuumMinDuration', + ), + parallelWorkers: getOrNull(opt, 'parallelWorkers'), + toastTupleTarget: getOrNull(opt, 'toastTupleTarget'), + userCatalogTable: getOrNull(opt, 'userCatalogTable'), + vacuumIndexCleanup: getOrNull(opt, 'vacuumIndexCleanup'), + vacuumTruncate: getOrNull(opt, 'vacuumTruncate'), + } + : null; + + const hasNonNullOpts = Object.values(withOpt ?? {}).filter((x) => x !== null).length > 0; + + res.views.push({ + entityType: 'views', + definition: dialect.sqlToQuery(query!).sql, + name: viewName, + schema: viewSchema, + with: hasNonNullOpts ? withOpt : null, + withNoData: withNoData ?? null, + materialized, + tablespace: tablespace ?? null, + using: using ?? null, + }); + } + + res.enums = schema.enums.map((e) => { + return { + entityType: 'enums', + name: e.enumName, + schema: e.schema || 'public', + values: e.enumValues, + }; + }); + + return { + schema: res, + errors, + warnings, + }; +}; + +export const fromExports = (exports: Record) => { + const tables: AnyPgTable[] = []; + const enums: PgEnum[] = []; + const schemas: PgSchema[] = []; + const sequences: PgSequence[] = []; + const roles: PgRole[] = []; + const policies: PgPolicy[] = []; + const views: PgView[] = []; + const matViews: PgMaterializedView[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (isPgEnum(t)) { + enums.push(t); + return; + } + if (is(t, PgTable)) { + tables.push(t); + } + + if (is(t, PgSchema)) { + schemas.push(t); + } + + if (isPgView(t)) { + views.push(t); + } + + if (isPgMaterializedView(t)) { + matViews.push(t); + } + + if (isPgSequence(t)) { + sequences.push(t); + } + + if (is(t, PgRole)) { + roles.push(t); + } + + if (is(t, PgPolicy)) { + policies.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { + tables, + enums, + schemas, + sequences, + views, + matViews, + roles, + policies, + relations, + }; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnyPgTable[] = []; + const enums: PgEnum[] = []; + const schemas: PgSchema[] = []; + const sequences: PgSequence[] = []; + const views: PgView[] = []; + const roles: PgRole[] = []; + const policies: PgPolicy[] = []; + const matViews: PgMaterializedView[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = fromExports(i0); + + tables.push(...prepared.tables); + enums.push(...prepared.enums); + schemas.push(...prepared.schemas); + sequences.push(...prepared.sequences); + views.push(...prepared.views); + matViews.push(...prepared.matViews); + roles.push(...prepared.roles); + policies.push(...prepared.policies); + relations.push(...prepared.relations); + } + }); + + return { + tables, + enums, + schemas, + sequences, + views, + matViews, + roles, + policies, + relations, + }; +}; diff --git a/drizzle-kit/src/dialects/postgres/duckdb-introspect.ts b/drizzle-kit/src/dialects/postgres/duckdb-introspect.ts new file mode 100644 index 0000000000..efad2b6685 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/duckdb-introspect.ts @@ -0,0 +1,878 @@ +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import { type DB, trimChar } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + Enum, + ForeignKey, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import { defaultForColumn, isSystemNamespace, parseViewDefinition } from './grammar'; + +// TODO: tables/schema/entities -> filter: (entity: {type: ... , metadata: ... }) => boolean; +// TODO: since we by default only introspect public +export const fromDatabase = async ( + db: DB, + database: string, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +): Promise => { + const schemas: Schema[] = []; + const enums: Enum[] = []; + const tables: PostgresEntities['tables'][] = []; + const columns: InterimColumn[] = []; + const indexes: InterimIndex[] = []; + const pks: PrimaryKey[] = []; + const fks: ForeignKey[] = []; + const uniques: UniqueConstraint[] = []; + const checks: CheckConstraint[] = []; + const sequences: Sequence[] = []; + const roles: Role[] = []; + const privileges: Privilege[] = []; + const policies: Policy[] = []; + const views: View[] = []; + const viewColumns: ViewColumn[] = []; + + // type OP = { + // oid: number; + // name: string; + // default: boolean; + // }; + + type Namespace = { + oid: number; + name: string; + }; + + // TODO: potential improvements + // --- default access method + // SHOW default_table_access_method; + // SELECT current_setting('default_table_access_method') AS default_am; + + const namespacesQuery = db.query( + `SELECT oid, schema_name as name FROM duckdb_schemas() WHERE database_name = '${database}' ORDER BY lower(schema_name)`, + ) + .then((rows) => { + queryCallback('namespaces', rows, null); + return rows; + }).catch((err) => { + queryCallback('namespaces', [], err); + throw err; + }); + + const namespaces = await namespacesQuery; + + const { other } = namespaces.reduce<{ system: Namespace[]; other: Namespace[] }>( + (acc, it) => { + if (isSystemNamespace(it.name)) { + acc.system.push(it); + } else { + acc.other.push(it); + } + return acc; + }, + { system: [], other: [] }, + ); + + const filteredNamespaces = other.filter((it) => filter({ type: 'schema', name: it.name })); + + if (filteredNamespaces.length === 0) { + return { + schemas, + tables, + enums, + columns, + indexes, + pks, + fks, + uniques, + checks, + sequences, + roles, + privileges, + policies, + views, + viewColumns, + } satisfies InterimSchema; + } + + const filteredNamespacesIds = filteredNamespaces.map((it) => it.oid); + + schemas.push(...filteredNamespaces.map((it) => ({ entityType: 'schemas', name: it.name }))); + + const tablesList = await db + .query<{ + oid: number; + schema: string; + name: string; + definition: string | null; + type: 'table' | 'view'; + }>(` + SELECT + table_oid AS "oid", + schema_name AS "schema", + table_name AS "name", + NULL AS "definition", + 'table' AS "type" + FROM + duckdb_tables() + WHERE database_name = '${database}' + AND schema_oid IN (${filteredNamespacesIds.join(', ')}) + + UNION ALL + + SELECT + view_oid AS "oid", + schema_name AS "schema", + view_name AS "name", + sql AS "definition", + 'view' AS "type" + FROM + duckdb_views() + WHERE database_name = '${database}' + AND schema_oid IN (${filteredNamespacesIds.join(', ')}) + ORDER BY schema_name, name + `).then((rows) => { + queryCallback('tables', rows, null); + return rows; + }).catch((err) => { + queryCallback('tables', [], err); + throw err; + }); + + const viewsList = tablesList.filter((it) => it.type === 'view'); + + const filteredTables = tablesList.filter((it) => { + if (!(it.type === 'table' && filter({ type: 'table', schema: it.schema, name: it.name }))) return false; + it.schema = trimChar(it.schema, '"'); // when camel case name e.x. mySchema -> it gets wrapped to "mySchema" + return true; + }); + + const filteredTableIds = filteredTables.map((it) => it.oid); + const viewsIds = viewsList.map((it) => it.oid); + const filteredViewsAndTableIds = [...filteredTableIds, ...viewsIds]; + + const filterByTableIds = filteredTableIds.length > 0 ? `(${filteredTableIds.join(',')})` : ''; + const filterByTableAndViewIds = filteredViewsAndTableIds.length > 0 ? `(${filteredViewsAndTableIds.join(',')})` : ''; + + for (const table of filteredTables) { + tables.push({ + entityType: 'tables', + schema: trimChar(table.schema, "'"), + name: table.name, + isRlsEnabled: false, + }); + } + + // const dependQuery = db.query<{ + // oid: number; + // tableId: number; + // ordinality: number; + + // /* + // a - An “auto” dependency means the dependent object can be dropped separately, + // and will be automatically removed if the referenced object is dropped—regardless of CASCADE or RESTRICT. + // Example: A named constraint on a table is auto-dependent on the table, so it vanishes when the table is dropped + + // i - An “internal” dependency marks objects that were created as part of building another object. + // Directly dropping the dependent is disallowed—you must drop the referenced object instead. + // Dropping the referenced object always cascades to the dependent + // Example: A trigger enforcing a foreign-key constraint is internally dependent on its pg_constraint entry + // */ + // deptype: 'a' | 'i'; + // }>( + // `SELECT + // -- sequence id + // objid as oid, + // refobjid as "tableId", + // refobjsubid as "ordinality", + + // -- a = auto + // deptype + // FROM + // duckdb_dependencies() + // where ${filterByTableIds ? ` refobjid in ${filterByTableIds}` : 'false'}`, + // ).then((rows) => { + // queryCallback('depend', rows, null); + // return rows; + // }).catch((err) => { + // queryCallback('depend', [], err); + // throw err; + // }); + + // const enumsQuery = db + // .query<{ + // oid: number; + // name: string; + // schemaId: number; + // arrayTypeId: number; + // ordinality: number; + // value: string; + // }>(`SELECT + // pg_type.oid as "oid", + // typname as "name", + // typnamespace as "schemaId", + // pg_type.typarray as "arrayTypeId", + // pg_enum.enumsortorder AS "ordinality", + // pg_enum.enumlabel AS "value" + // FROM + // pg_type + // JOIN pg_enum on pg_enum.enumtypid=pg_type.oid + // WHERE + // pg_type.typtype = 'e' + // AND typnamespace IN (${filteredNamespacesIds.join(',')}) + // ORDER BY pg_type.oid, pg_enum.enumsortorder + // `).then((rows) => { + // queryCallback('enums', rows, null); + // return rows; + // }).catch((err) => { + // queryCallback('enums', [], err); + // throw err; + // }); + + // fetch for serials, adrelid = tableid + // const serialsQuery = db + // .query<{ + // oid: number; + // tableId: number; + // ordinality: number; + // expression: string; + // }>(`SELECT + // oid, + // adrelid as "tableId", + // adnum as "ordinality", + // pg_get_expr(adbin, adrelid) as "expression" + // FROM + // pg_attrdef + // WHERE ${filterByTableIds ? ` adrelid in ${filterByTableIds}` : 'false'} + // `).then((rows) => { + // queryCallback('serials', rows, null); + // return rows; + // }).catch((err) => { + // queryCallback('serials', [], err); + // throw err; + // }); + + // const sequencesQuery = db.query<{ + // schema: string; + // oid: number; + // name: string; + // startWith: string; + // minValue: string; + // maxValue: string; + // incrementBy: string; + // cycle: boolean; + // cacheSize: number; + // }>(`SELECT + // n.nspname as "schema", + // c.relname as "name", + // seqrelid as "oid", + // seqstart as "startWith", + // seqmin as "minValue", + // seqmax as "maxValue", + // seqincrement as "incrementBy", + // seqcycle as "cycle", + // seqcache as "cacheSize" + // FROM pg_sequence + // LEFT JOIN pg_class c ON pg_sequence.seqrelid=c.oid + // LEFT JOIN pg_namespace n ON c.relnamespace=n.oid + // WHERE relnamespace IN (${filteredNamespacesIds.join(',')}) + // ORDER BY relnamespace, lower(relname); + // `).then((rows) => { + // queryCallback('sequences', rows, null); + // return rows; + // }).catch((err) => { + // queryCallback('sequences', [], err); + // throw err; + // }); + + const constraintsQuery = db.query<{ + schemaId: number; + tableId: number; + name: string; + type: 'PRIMARY KEY' | 'UNIQUE' | 'FOREIGN KEY' | 'CHECK'; // p - primary key, u - unique, f - foreign key, c - check + definition: string; + tableToName: string; + columnsNames: string[]; + columnsToNames: string[]; + }>(` + SELECT + schema_oid AS "schemaId", + table_oid AS "tableId", + constraint_name AS "name", + constraint_type AS "type", + constraint_text AS "definition", + referenced_table AS "tableToName", + constraint_column_names AS "columnsNames", + referenced_column_names AS "columnsToNames" + FROM + duckdb_constraints() + WHERE ${filterByTableIds ? ` table_oid in ${filterByTableIds}` : 'false'} + AND database_name = '${database}' + ORDER BY constraint_type, lower(name); + `).then((rows) => { + queryCallback('constraints', rows, null); + return rows; + }).catch((err) => { + queryCallback('constraints', [], err); + throw err; + }); + + // for serials match with pg_attrdef via attrelid(tableid)+adnum(ordinal position), for enums with pg_enum above + const columnsQuery = db.query<{ + tableId: number; + name: string; + ordinality: number; + notNull: boolean; + typeId: number; + type: string; + default: string | null; + }>(`SELECT + table_oid AS "tableId", + column_name AS "name", + column_index AS "ordinality", + is_nullable = false AS "notNull", + data_type_id AS "typeId", + lower(data_type) AS "type", + column_default AS "default" + FROM + duckdb_columns() + WHERE + ${filterByTableAndViewIds ? ` table_oid in ${filterByTableAndViewIds}` : 'false'} + AND database_name = '${database}' + ORDER BY column_index; + `).then((rows) => { + queryCallback('columns', rows, null); + return rows; + }).catch((err) => { + queryCallback('columns', [], err); + throw err; + }); + + const [ + // dependList, + // enumsList, + // serialsList, + // sequencesList, + constraintsList, + columnsList, + ] = await Promise + .all([ + // dependQuery, + // enumsQuery, + // serialsQuery, + // sequencesQuery, + constraintsQuery, + columnsQuery, + ]); + + // const groupedEnums = enumsList.reduce((acc, it) => { + // if (!(it.oid in acc)) { + // const schemaName = filteredNamespaces.find((sch) => sch.oid === it.schemaId)!.name; + // acc[it.oid] = { + // oid: it.oid, + // schema: schemaName, + // name: it.name, + // values: [it.value], + // }; + // } else { + // acc[it.oid].values.push(it.value); + // } + // return acc; + // }, {} as Record); + + // const groupedArrEnums = enumsList.reduce((acc, it) => { + // if (!(it.arrayTypeId in acc)) { + // const schemaName = filteredNamespaces.find((sch) => sch.oid === it.schemaId)!.name; + // acc[it.arrayTypeId] = { + // oid: it.oid, + // schema: schemaName, + // name: it.name, + // values: [it.value], + // }; + // } else { + // acc[it.arrayTypeId].values.push(it.value); + // } + // return acc; + // }, {} as Record); + + // for (const it of Object.values(groupedEnums)) { + // enums.push({ + // entityType: 'enums', + // schema: it.schema, + // name: it.name, + // values: it.values, + // }); + // } + + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let tableCount = 0; + let checksCount = 0; + let viewsCount = 0; + + // for (const seq of sequencesList) { + // const depend = dependList.find((it) => it.oid === seq.oid); + + // if (depend && (depend.deptype === 'a' || depend.deptype === 'i')) { + // // TODO: add type field to sequence in DDL + // // skip fo sequences or identity columns + // // console.log('skip for auto created', seq.name); + // continue; + // } + + // sequences.push({ + // entityType: 'sequences', + // schema: seq.schema, + // name: seq.name, + // startWith: parseIdentityProperty(seq.startWith), + // minValue: parseIdentityProperty(seq.minValue), + // maxValue: parseIdentityProperty(seq.maxValue), + // incrementBy: parseIdentityProperty(seq.incrementBy), + // cycle: seq.cycle, + // cacheSize: Number(parseIdentityProperty(seq.cacheSize) ?? 1), + // }); + // } + + // progressCallback('enums', Object.keys(groupedEnums).length, 'done'); + + // type DBColumn = (typeof columnsList)[number]; + + const tableColumns = columnsList.filter((it) => { + const table = tablesList.find((tbl) => tbl.oid === it.tableId); + return !!table; + }); + + // supply serials + for (const column of tableColumns) { + const type = column.type; + + if (!(type === 'smallint' || type === 'bigint' || type === 'integer')) { + continue; + } + + // const expr = serialsList.find( + // (it) => it.tableId === column.tableId && it.ordinality === column.ordinality, + // ); + + // if (expr) { + // const table = tablesList.find((it) => it.oid === column.tableId)!; + + // const isSerial = isSerialExpression(expr.expression, table.schema); + // column.type = isSerial ? type === 'bigint' ? 'bigserial' : type === 'integer' ? 'serial' : 'smallserial' : type; + // } + } + + for (const column of tableColumns) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + + // supply enums + // const enumType = column.typeId in groupedEnums + // ? groupedEnums[column.typeId] + // : column.typeId in groupedArrEnums + // ? groupedArrEnums[column.typeId] + // : null; + + // let columnTypeMapped = enumType ? enumType.name : column.type.replace('[]', ''); + let columnTypeMapped = column.type; + let dimensions = 0; + + // check if column is array + const arrayRegex = /\[(\d+)?\]$/; + if (arrayRegex.test(columnTypeMapped)) { + columnTypeMapped = columnTypeMapped.replace(arrayRegex, ''); + dimensions = 1; + } + + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace(' with time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + const columnDefault = column.default; + + const defaultValue = defaultForColumn( + columnTypeMapped, + columnDefault, + 0, + false, // TODO + ); + + const unique = constraintsList.find((it) => { + return it.type === 'UNIQUE' && it.tableId === column.tableId && it.columnsNames.length === 1 + && it.columnsNames.includes(column.name); + }) ?? null; + + const pk = constraintsList.find((it) => { + return it.type === 'PRIMARY KEY' && it.tableId === column.tableId && it.columnsNames.length === 1 + && it.columnsNames.includes(column.name); + }) ?? null; + + columns.push({ + entityType: 'columns', + schema: table.schema, + table: table.name, + name: column.name, + type: columnTypeMapped, + // typeSchema: enumType ? enumType.schema ?? 'public' : null, + typeSchema: null, + dimensions, + default: defaultValue, + unique: !!unique, + uniqueName: unique ? unique.name : null, + uniqueNullsNotDistinct: unique?.definition.includes('NULLS NOT DISTINCT') ?? false, + notNull: column.notNull, + pk: pk !== null, + pkName: pk !== null ? pk.name : null, + generated: null, + identity: null, + }); + } + + for (const unique of constraintsList.filter((it) => it.type === 'UNIQUE')) { + const table = tablesList.find((it) => it.oid === unique.tableId)!; + const schema = namespaces.find((it) => it.oid === unique.schemaId)!; + + const columns = unique.columnsNames.map((it) => { + const column = columnsList.find((column) => column.tableId === unique.tableId && column.name === it)!; + return column.name; + }); + + uniques.push({ + entityType: 'uniques', + schema: schema.name, + table: table.name, + name: unique.name, + nameExplicit: true, + columns, + nullsNotDistinct: unique.definition.includes('NULLS NOT DISTINCT'), + }); + } + + for (const pk of constraintsList.filter((it) => it.type === 'PRIMARY KEY')) { + const table = tablesList.find((it) => it.oid === pk.tableId)!; + const schema = namespaces.find((it) => it.oid === pk.schemaId)!; + + const columns = pk.columnsNames.map((it) => { + const column = columnsList.find((column) => column.tableId === pk.tableId && column.name === it)!; + return column.name; + }); + + pks.push({ + entityType: 'pks', + schema: schema.name, + table: table.name, + name: pk.name, + columns, + nameExplicit: true, + }); + } + + for (const fk of constraintsList.filter((it) => it.type === 'FOREIGN KEY')) { + const table = tablesList.find((it) => it.oid === fk.tableId)!; + const schema = namespaces.find((it) => it.oid === fk.schemaId)!; + const tableTo = tablesList.find((it) => it.schema === schema.name && it.name === fk.tableToName)!; + + const columns = fk.columnsNames.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableId && column.name === it)!; + return column.name; + }); + + const columnsTo = fk.columnsToNames.map((it) => { + const column = columnsList.find((column) => column.tableId === tableTo.oid && column.name === it)!; + return column.name; + }); + + fks.push({ + entityType: 'fks', + schema: schema.name, + table: table.name, + name: fk.name, + nameExplicit: true, + columns, + tableTo: tableTo.name, + schemaTo: schema.name, + columnsTo, + onUpdate: 'NO ACTION', + onDelete: 'NO ACTION', + }); + } + + for (const check of constraintsList.filter((it) => it.type === 'CHECK')) { + const table = tablesList.find((it) => it.oid === check.tableId)!; + const schema = namespaces.find((it) => it.oid === check.schemaId)!; + + checks.push({ + entityType: 'checks', + schema: schema.name, + table: table.name, + name: check.name, + value: check.definition, + }); + } + + // const idxs = await db.query<{ + // oid: number; + // schema: string; + // name: string; + // accessMethod: string; + // with?: string[]; + // metadata: { + // tableId: number; + // expression: string | null; + // where: string; + // columnOrdinals: number[]; + // options: number[]; + // isUnique: boolean; + // isPrimary: boolean; + // }; + // }>(` + // SELECT + // pg_class.oid, + // n.nspname as "schema", + // relname AS "name", + // am.amname AS "accessMethod", + // reloptions AS "with", + // row_to_json(metadata) as "metadata" + // FROM + // pg_class + // JOIN pg_am am ON am.oid = pg_class.relam + // JOIN pg_namespace n ON relnamespace = n.oid + // LEFT JOIN LATERAL ( + // SELECT + // pg_get_expr(indexprs, indrelid) AS "expression", + // pg_get_expr(indpred, indrelid) AS "where", + // indrelid::int AS "tableId", + // indkey::int[] as "columnOrdinals", + // indoption::int[] as "options", + // indisunique as "isUnique", + // indisprimary as "isPrimary" + // FROM + // pg_index + // WHERE + // pg_index.indexrelid = pg_class.oid + // ) metadata ON TRUE + // WHERE + // relkind = 'i' and ${filterByTableIds ? `metadata."tableId" in ${filterByTableIds}` : 'false'} + // ORDER BY relnamespace, lower(relname); + // `).then((rows) => { + // queryCallback('indexes', rows, null); + // return rows; + // }).catch((err) => { + // queryCallback('indexes', [], err); + // throw err; + // }); + + // for (const idx of idxs) { + // const { metadata } = idx; + + // const expr = splitExpressions(metadata.expression); + + // const table = tablesList.find((it) => it.oid === idx.metadata.tableId)!; + + // const nonColumnsCount = metadata.columnOrdinals.reduce((acc, it) => { + // if (it === 0) acc += 1; + // return acc; + // }, 0); + + // if (expr.length !== nonColumnsCount) { + // throw new Error( + // `expression split doesn't match non-columns count: [${ + // metadata.columnOrdinals.join( + // ', ', + // ) + // }] '${metadata.expression}':${expr.length}:${nonColumnsCount}`, + // ); + // } + + // const opts = metadata.options.map((it) => { + // return { + // descending: (it & 1) === 1, + // nullsFirst: (it & 2) === 2, + // }; + // }); + + // const res = [] as ( + // & ( + // | { type: 'expression'; value: string } + // | { type: 'column'; value: DBColumn } + // ) + // & { options: (typeof opts)[number] } + // )[]; + + // let k = 0; + // for (let i = 0; i < metadata.columnOrdinals.length; i++) { + // const ordinal = metadata.columnOrdinals[i]; + // if (ordinal === 0) { + // res.push({ + // type: 'expression', + // value: expr[k], + // options: opts[i], + // }); + // k += 1; + // } else { + // const column = columnsList.find((column) => { + // return column.tableId === metadata.tableId && column.ordinality === ordinal; + // }); + // if (!column) throw new Error(`missing column: ${metadata.tableId}:${ordinal}`); + + // // ! options and opclass can be undefined when index have "INCLUDE" columns (columns from "INCLUDE" don't have options and opclass) + // const options = opts[i] as typeof opts[number] | undefined; + // if (options) { + // res.push({ + // type: 'column', + // value: column, + // options: opts[i], + // }); + // } + // } + // } + + // const columns = res.map((it) => { + // return { + // asc: !it.options.descending, + // nullsFirst: it.options.nullsFirst, + // opclass: null, + // isExpression: it.type === 'expression', + // value: it.type === 'expression' ? it.value : it.value.name, // column name + // } satisfies Index['columns'][number]; + // }); + + // indexes.push({ + // entityType: 'indexes', + // schema: idx.schema, + // table: table.name, + // name: idx.name, + // nameExplicit: true, + // method: idx.accessMethod, + // isUnique: metadata.isUnique, + // with: idx.with?.join(', ') ?? '', + // where: idx.metadata.where, + // columns: columns, + // concurrently: false, + // forUnique: false, + // forPK: false, + // }); + // } + + progressCallback('columns', columnsCount, 'fetching'); + progressCallback('checks', checksCount, 'fetching'); + progressCallback('indexes', indexesCount, 'fetching'); + progressCallback('tables', tableCount, 'done'); + + for ( + const it of columnsList.filter((it) => { + const view = viewsList.find((x) => x.oid === it.tableId); + return !!view; + }) + ) { + const view = viewsList.find((x) => x.oid === it.tableId)!; + + // const enumType = it.typeId in groupedEnums + // ? groupedEnums[it.typeId] + // : it.typeId in groupedArrEnums + // ? groupedArrEnums[it.typeId] + // : null; + + // let columnTypeMapped = enumType ? enumType.name : it.type.replace('[]', ''); + let columnTypeMapped = it.type.replace('[]', ''); + columnTypeMapped = trimChar(columnTypeMapped, '"'); + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char'); + + const typeDimensions = it.type.split('[]').length - 1; + + viewColumns.push({ + schema: view.schema, + view: view.name, + name: it.name, + type: columnTypeMapped, + notNull: it.notNull, + dimensions: 0, + // typeSchema: enumType ? enumType.schema : null, + typeSchema: null, + typeDimensions, + }); + } + + for (const view of viewsList) { + if (!filter({ type: 'table', schema: view.schema, name: view.name })) continue; + tableCount += 1; + + const definition = parseViewDefinition(view.definition); + + views.push({ + entityType: 'views', + schema: view.schema, + name: view.name, + definition, + with: null, + materialized: false, + tablespace: null, + using: null, + withNoData: null, + }); + } + + // TODO: update counts! + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + return { + schemas, + tables, + enums, + columns, + indexes, + pks, + fks, + uniques, + checks, + sequences, + roles, + privileges, + policies, + views, + viewColumns, + } satisfies InterimSchema; +}; diff --git a/drizzle-kit/src/dialects/postgres/grammar.ts b/drizzle-kit/src/dialects/postgres/grammar.ts new file mode 100644 index 0000000000..4aa8df01f7 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/grammar.ts @@ -0,0 +1,2206 @@ +import { Temporal } from '@js-temporal/polyfill'; +import type { possibleIntervals } from '../../utils'; +import { + hasTimeZoneSuffix, + isDate, + isTime, + isTimestamp, + parseEWKB, + parseIntervalFields, + stringifyArray, + stringifyTuplesArray, + trimChar, + wrapWith, +} from '../../utils'; +import { parseArray, parseExpressionArray } from '../../utils/parse-pgarray'; +import { parse, stringify } from '../../utils/when-json-met-bigint'; +import { hash } from '../common'; +import { escapeForSqlDefault, escapeForTsLiteral, numberForTs, parseParams, unescapeFromSqlDefault } from '../utils'; +import type { Column, DiffEntities, PostgresEntities } from './ddl'; +import type { Import } from './typescript'; + +export interface SqlType { + is(type: string): boolean; + drizzleImport(): Import; + defaultFromDrizzle(value: unknown, mode?: MODE, config?: unknown): Column['default']; + defaultArrayFromDrizzle( + value: any[], + dimensions: number, + mode?: MODE, + config?: unknown, + ): Column['default']; + defaultFromIntrospect(value: string): Column['default']; + defaultArrayFromIntrospect(value: string): Column['default']; // todo: remove? + toTs(type: string, value: string | null): { options?: Record; default: string; customType?: string }; // customType for Custom + toArrayTs( + type: string, + value: string | null, + ): { options?: Record; default: string; customType?: string }; +} + +export const SmallInt: SqlType = { + is: (type: string) => /^\s*smallint(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'smallint', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return trimChar(value, "'"); // 10, but '-10' + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `${v}`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Int: SqlType = { + is: (type: string) => /^\s*integer(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'integer', + defaultFromDrizzle: SmallInt.defaultFromDrizzle, + defaultArrayFromDrizzle: SmallInt.defaultArrayFromDrizzle, + defaultFromIntrospect: SmallInt.defaultFromIntrospect, + defaultArrayFromIntrospect: SmallInt.defaultArrayFromIntrospect, + toTs: SmallInt.toTs, + toArrayTs: SmallInt.toArrayTs, +}; + +export const BigInt: SqlType = { + is: (type: string) => /^\s*bigint(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'bigint', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return trimChar(value, "'"); // 10, but '-10' + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (_, value) => { + if (!value) return { options: { mode: 'number' }, default: '' }; + const { mode, value: def } = numberForTs(value); + return { options: { mode }, default: def }; + }, + toArrayTs: (_, value) => { + if (!value) return { options: { mode: 'number' }, default: '' }; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + return { + options: { mode: 'bigint' }, + default: stringifyArray(res, 'ts', (v) => `${v}n`), + }; + } catch { + return { options: { mode: 'bigint' }, default: `sql\`${value}\`` }; + } + }, +}; + +export const Numeric: SqlType = { + is: (type: string) => /^\s*numeric|decimal(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'numeric', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + // 10.123, but '9223372036854775807' + return `'${trimChar(value, "'")}'`; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const [precision, scale] = parseParams(type); + const options = {} as any; + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + + const trimmed = trimChar(value, "'"); + + const { mode, value: def } = numberForTs(trimmed); + return { options: { mode, ...options }, default: def }; + }, + toArrayTs: (type, value) => { + const [precision, scale] = parseParams(type); + const options = {} as any; + if (precision) options['precision'] = Number(precision); + if (scale) options['scale'] = Number(scale); + + if (!value) return { options, default: '' }; + /* + If we'd want it to be smart - we need to check if numeric array has + any bigints recuresively, it's waaaaay easier to just do sql`` + */ + // try { + // const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + // const res = parseArray(trimmed); + + // return { + // options: { mode: 'bigint', ...options }, + // default: stringifyArray(res, 'ts', (v) => { + + // return `${v}`; + // }), + // }; + // } catch { + return { options, default: `sql\`${value}\`` }; + // } + }, +}; + +export const Real: SqlType = { + is: (type: string) => /^\s*real(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'real', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return trimChar(value, "'"); // 10, but '-10' + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `${v}`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Double: SqlType = { + is: (type: string) => /^\s*(?:double|double precision)(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'doublePrecision', + defaultFromDrizzle: Real.defaultFromDrizzle, + defaultArrayFromDrizzle: Real.defaultArrayFromDrizzle, + defaultFromIntrospect: Real.defaultFromIntrospect, + defaultArrayFromIntrospect: Real.defaultArrayFromIntrospect, + toTs: Real.toTs, + toArrayTs: Real.toArrayTs, +}; + +export const Boolean: SqlType = { + is: (type: string) => /^\s*boolean(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'boolean', + defaultFromDrizzle: (value) => { + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return `'${stringifyArray(value, 'sql', (v) => (v === true ? 't' : 'f'))}'`; + }, + defaultFromIntrospect: (value) => { + return trimChar(value, "'"); + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => ({ default: value ?? '' }), + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return v === 't' ? 'true' : 'false'; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Char: SqlType = { + is: (type: string) => /^\s*char|character(?:\(\d+\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'char', + defaultFromDrizzle: (value) => { + const escaped = escapeForSqlDefault(value as string); + return `'${escaped}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + const escaped = v + .replaceAll("'", "''") + .replaceAll('\\', '\\\\') + .replaceAll('"', '\\"'); + if (v.includes('\\') || v.includes('"') || v.includes(',')) { + return `"${escaped}"`; + } + return escaped; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + if (!value.startsWith("'") && !value.endsWith("'")) return { options, default: `sql\`${value}\`` }; + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(value, "'"))); + return { options, default: escaped }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral(unescapeFromSqlDefault(trimChar(v, "'"), 'arr')); + return escaped; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Varchar: SqlType = { + is: (type: string) => /^\s*varchar|character varying(?:\(\d+\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'varchar', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultArrayFromDrizzle: Char.defaultArrayFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + defaultArrayFromIntrospect: Char.defaultArrayFromIntrospect, + toTs: Char.toTs, + toArrayTs: Char.toArrayTs, +}; + +export const Text: SqlType = { + is: (type: string) => /^\s*text(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'text', + defaultFromDrizzle: Char.defaultFromDrizzle, + defaultArrayFromDrizzle: Char.defaultArrayFromDrizzle, + defaultFromIntrospect: Char.defaultFromIntrospect, + defaultArrayFromIntrospect: Char.defaultArrayFromIntrospect, + toTs: Char.toTs, + toArrayTs: Char.toArrayTs, +}; + +export const toDefaultArray = ( + value: any[], + dimensions: number, + cb: (it: unknown) => string, + depth: number = 0, +): string => { + if (depth === dimensions) { + const res = cb(value); + if (res.includes('"')) return `"${res.replaceAll('"', '\\"')}"`; + return `"${res}"`; + } + + if (Array.isArray(value)) { + const inner = value.map((v) => { + return toDefaultArray(v, dimensions, cb, depth + 1); + }).join(','); + if (depth === 0) return `{${inner}}`; + return `${inner}`; + } + + return cb(value); +}; + +export const Json: SqlType = { + is: (type: string) => /^\s*json(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'json', + defaultFromDrizzle: (value) => { + const stringified = stringify(value, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + }); + return `'${stringified}'`; + }, + defaultArrayFromDrizzle: (def, dimensions) => { + const value = toDefaultArray(def, dimensions, (it) => + stringify(it, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + })); + return `'${value}'`; + }, + defaultFromIntrospect: (value) => value, + defaultArrayFromIntrospect: (value) => value, + toTs: (_, value) => { + if (!value) return { default: '' }; + + const trimmed = trimChar(value, "'"); + try { + const parsed = parse(trimmed); + const stringified = stringify( + parsed, + (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("''", "'"); + }, + undefined, + true, + )!; + return { default: stringified }; + } catch {} + return { default: `sql\`${value}\`` }; + }, + toArrayTs: (_, def) => { + if (!def) return { default: '' }; + return { default: `sql\`${def.replaceAll('\\"', '\\\\"')}\`` }; + }, +}; + +export const Jsonb: SqlType = { + is: (type: string) => /^\s*jsonb(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'jsonb', + defaultFromDrizzle: (value) => { + const stringified = stringify(value, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + }); + return `'${stringified}'`; + }, + defaultArrayFromDrizzle: (def, dimensions) => { + const value = toDefaultArray(def, dimensions, (it) => + stringify(it, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + })); + return `'${value}'`; + }, + /* + TODO: make less hacky, + from: `'{"key": "value"}'`, + to: `'{"key":"value"}'` + */ + defaultFromIntrospect: (value) => value.replaceAll(`": "`, `":"`), + defaultArrayFromIntrospect: (value) => value, + toTs: Json.toTs, + toArrayTs: Json.toArrayTs, +}; + +export const Time: SqlType = { + is: (type: string) => /^\s*time(?:\(\d+\))?(?:\[\])*?\s*$/i.test(type), + drizzleImport: () => 'time', + defaultFromDrizzle: (value) => { + return wrapWith(String(value), "'"); + }, + defaultArrayFromDrizzle: (value) => { + return wrapWith( + stringifyArray(value, 'sql', (v) => String(v)), + "'", + ); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + + if (!value) return { options, default: '' }; + const trimmed = trimChar(value, "'"); + if (!isTime(trimmed)) return { options, default: `sql\`${value}\`` }; + + return { options, default: value }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + return { + options, + default: stringifyArray(res, 'ts', (v) => { + const trimmed = trimChar(v, "'"); + + if (!isTime(trimmed)) return `sql\`${trimmed}\``; + return wrapWith(v, "'"); + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const TimeTz: SqlType = { + is: (type: string) => /^\s*time(?:\(\d+\))?\s+with time zone(?:\[\])*?\s*$/i.test(type), + drizzleImport: () => 'time', + defaultFromDrizzle: (value) => { + const v = String(value); + const def = hasTimeZoneSuffix(v) ? v : v + '+00'; + return wrapWith(def, "'"); + }, + defaultArrayFromDrizzle: (value) => { + return wrapWith( + stringifyArray(value, 'sql', (v) => { + return hasTimeZoneSuffix(v) ? v : v + '+00'; + }), + "'", + ); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + options['withTimezone'] = true; + + if (!value) return { options, default: '' }; + const trimmed = trimChar(value, "'"); + if (!isTime(trimmed)) return { options, default: `sql\`${value}\`` }; + + return { options, default: value }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + options['withTimezone'] = true; + + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + let isDrizzleSql: boolean = false; + const def = stringifyArray(res, 'ts', (v) => { + const trimmed = trimChar(v, "'"); + + if (!isTime(trimmed)) isDrizzleSql = true; + return wrapWith(v, "'"); + }); + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const DateType: SqlType = { + is: (type: string) => /^\s*date(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'date', + defaultFromDrizzle: (value) => { + if (typeof value === 'string') return wrapWith(value, "'"); + if (!(value instanceof Date)) { + throw new Error( + '"date" default value must be instance of Date or String', + ); + } + + const mapped = value.toISOString().split('T')[0]; + return wrapWith(mapped, "'"); + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v === 'string') return v; + if (v instanceof Date) { + return v.toISOString().split('T')[0]; + } + throw new Error( + 'Unexpected default value for "date", must be String or Date', + ); + }); + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + if (!value) return { default: '' }; + const trimmed = trimChar(value, "'"); + if (!isDate(trimmed)) return { default: `sql\`${value}\`` }; + + return { default: value }; + }, + toArrayTs: (type, value) => { + if (!value) return { default: '' }; + + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + const mapped = stringifyArray(res, 'ts', (v) => { + const trimmed = trimChar(v, "'"); + + if (!isDate(trimmed)) isDrizzleSql = true; + return wrapWith(v, "'"); + }); + return { + default: isDrizzleSql ? mapped : `sql\`${value}\``, + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Timestamp: SqlType = { + // TODO + // ORM returns precision with space before type, why? + // timestamp or timestamp[] or timestamp (3) or timestamp (3)[] + is: (type: string) => /^\s*timestamp(?:\s)?(?:\(\d+\))?(?:\[\])*?\s*$/i.test(type), + drizzleImport: () => 'timestamp', + defaultFromDrizzle: (value, _type) => { + if (typeof value === 'string') return wrapWith(value, "'"); + if (!(value instanceof Date)) { + throw new Error( + 'Timestamp default value must be instance of Date or String', + ); + } + + const mapped = value + .toISOString() + .replace('T', ' ') + .replace('Z', ' ') + .slice(0, 23); + return wrapWith(mapped, "'"); + }, + defaultArrayFromDrizzle: (value, _type) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v === 'string') return wrapWith(v, '"'); + + if (v instanceof Date) { + return wrapWith( + v.toISOString().replace('T', ' ').replace('Z', ' ').slice(0, 23), + '"', + ); + } + throw new Error( + 'Unexpected default value for Timestamp, must be String or Date', + ); + }); + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + + if (!value) return { options, default: '' }; + let patched = trimChar(value, "'"); + patched = patched.includes('T') ? patched : patched.replace(' ', 'T') + 'Z'; + + const test = new Date(patched); + + if (isNaN(test.getTime())) return { options, default: `sql\`${value}\`` }; + + return { options, default: `new Date('${patched}')` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + + if (!value) return { options, default: '' }; + + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + const patched = v.includes('T') ? v : v.replace(' ', 'T') + 'Z'; + const check = new Date(patched); + if (isNaN(check.getTime())) isDrizzleSql = true; + return `new Date("${patched}")`; + }); + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const TimestampTz: SqlType = { + // TODO + // ORM returns precision with space before type, why? + // timestamp with time zone or timestamp with time zone[] or timestamp (3) with time zone or timestamp (3) with time zone[] + is: (type: string) => + /^\s*timestamp(?:\s)?(?:\(\d+\))?\s+with time zone(?:\[\])*?\s*$/i.test( + type, + ), + drizzleImport: () => 'timestamp', + defaultFromDrizzle: (value, _type) => { + if (typeof value === 'string') { + const mapped = hasTimeZoneSuffix(value) ? value : value + '+00'; + return wrapWith(mapped, "'"); + } + + if (!(value instanceof Date)) { + throw new Error( + 'Timestamp default value must be instance of Date or String', + ); + } + + const mapped = value.toISOString().replace('T', ' ').replace('Z', '+00'); + + return wrapWith(mapped, "'"); + }, + defaultArrayFromDrizzle: (value, _type) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v === 'string') { + const mapped = hasTimeZoneSuffix(v) ? v : v + '+00'; + return wrapWith(mapped, '"'); + } + + if (v instanceof Date) { + return wrapWith( + v.toISOString().replace('T', ' ').replace('Z', '+00'), + '"', + ); + } + throw new Error( + 'Unexpected default value for Timestamp, must be String or Date', + ); + }); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + options['withTimezone'] = true; + + if (!value) return { options, default: '' }; + let patched = trimChar(value, "'"); + + const test = new Date(patched); + + if (isNaN(test.getTime())) return { options, default: `sql\`${value}\`` }; + + return { options, default: `new Date('${patched}')` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + options['withTimezone'] = true; + + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + let isDrizzleSql: boolean = false; + const def = stringifyArray(res, 'ts', (v) => { + const trimmed = trimChar(v, "'"); + const check = new Date(trimmed); + + if (isNaN(check.getTime())) isDrizzleSql = true; + return `new Date("${trimmed}")`; + }); + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Uuid: SqlType = { + is: (type: string) => /^\s*uuid(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'uuid', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + value = trimChar(value, "'"); + if (value === 'gen_random_uuid()') return { options, default: '.defaultRandom()' }; + return { options, default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Interval: SqlType = { + is: (type: string) => + /^interval(\s+(year|month|day|hour|minute|second)(\s+to\s+(month|day|hour|minute|second))?)?(?:\((\d+)\))?(?:\s*\[\s*\])*\s*$/i + .test(type), + drizzleImport: () => 'interval', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return `"${v}"`; + }); + + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: { precision?: number; fields?: typeof possibleIntervals[number] } = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + const fields = parseIntervalFields(type); + if (fields.fields) options['fields'] = fields.fields; + + if (!value) return { options, default: '' }; + + return { options, default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (type, value) => { + const options: any = {}; + const [precision] = parseParams(type); + if (precision) options['precision'] = Number(precision); + const fields = parseIntervalFields(type); + if (fields.fields) options['fields'] = fields.fields; + + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Inet: SqlType = { + is: (type: string) => + /^inet(?:\((\d+)\))?(\[\])?$/i + .test(type), + drizzleImport: () => 'inet', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => { + if (!value) return { default: '' }; + return { default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Cidr: SqlType = { + is: (type: string) => + /^cidr(?:\((\d+)\))?(\[\])?$/i + .test(type), + drizzleImport: () => 'cidr', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => { + if (!value) return { default: '' }; + return { default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const MacAddr: SqlType = { + is: (type: string) => /^macaddr(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'macaddr', + defaultFromDrizzle: (value) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + return v; + }); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (_, value) => { + if (!value) return { default: '' }; + return { default: `"${trimChar(value, "'")}"` }; + }, + toArrayTs: (_, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + return `"${v}"`; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; +export const MacAddr8: SqlType = { + is: (type: string) => /^macaddr8(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'macaddr8', + defaultFromDrizzle: MacAddr.defaultFromDrizzle, + defaultArrayFromDrizzle: MacAddr.defaultArrayFromDrizzle, + defaultFromIntrospect: MacAddr.defaultFromIntrospect, + defaultArrayFromIntrospect: MacAddr.defaultArrayFromIntrospect, + toTs: MacAddr.toTs, + toArrayTs: MacAddr.toArrayTs, +}; + +export const Vector: SqlType = { + is: (type: string) => /^\s*vector(?:\(\d+\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'vector', + defaultFromDrizzle: (value) => { + return `'[${String(value).replaceAll(' ', '')}]'`; + }, + defaultArrayFromDrizzle: (value, _dimensions) => { + const res = stringifyTuplesArray(value, 'sql', (v: number[]) => { + const res = v.length > 0 ? `"[${String(v).replaceAll(' ', '')}]"` : '"[]"'; + return res; + }); + + return wrapWith(res.replaceAll(' ', ''), "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: { dimensions?: number } = {}; + const [dimensions] = parseParams(type); + if (dimensions) options['dimensions'] = Number(dimensions); + + if (!value) return { options, default: '' }; + + return { options, default: trimChar(value, "'") }; + }, + toArrayTs: (type, value) => { + const options: { dimensions?: number } = {}; + const [dimensions] = parseParams(type); + if (dimensions) options['dimensions'] = Number(dimensions); + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return v; + }, Number(dimensions)), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; +export const HalfVec: SqlType = { + is: (type: string) => /^\s*halfvec(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'halfvec', + defaultFromDrizzle: Vector.defaultFromDrizzle, + defaultArrayFromDrizzle: Vector.defaultArrayFromDrizzle, + defaultFromIntrospect: Vector.defaultFromIntrospect, + defaultArrayFromIntrospect: Vector.defaultArrayFromIntrospect, + toTs: Vector.toTs, + toArrayTs: Vector.toArrayTs, +}; +export const SparseVec: SqlType = { + is: (type: string) => /^\s*sparsevec(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'sparsevec', + defaultFromDrizzle: (value) => { + return wrapWith(String(value), "'"); + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + return `"${String(v).replaceAll(' ', '')}"`; + }); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value; + }, + toTs: (type, value) => { + const options: { dimensions?: number } = {}; + const [dimensions] = parseParams(type); + if (dimensions) options['dimensions'] = Number(dimensions); + + if (!value) return { options, default: '' }; + + return { options, default: value }; + }, + toArrayTs: (type, value) => { + const options: { dimensions?: number } = {}; + const [dimensions] = parseParams(type); + if (dimensions) options['dimensions'] = Number(dimensions); + if (!value) return { options, default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + options, + default: stringifyArray(res, 'ts', (v) => { + return wrapWith(v, "'"); + }), + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Bit: SqlType = { + is: (type: string) => /^\s*bit(?:\(\d+(?:,\d+)?\))?(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'bit', + defaultFromDrizzle: (value, _) => { + return `'${value}'`; + }, + defaultArrayFromDrizzle: (value, _type) => { + return `'${stringifyArray(value, 'sql', (v) => String(v))}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const [dimensions] = parseParams(type); + const options = dimensions ? { dimensions: Number(dimensions) } : {}; + + if (!value) return { options, default: '' }; + + if (/^'[01]+'$/.test(value)) { + return { options, default: value }; + } + + return { options, default: `sql\`${value}\`` }; + }, + toArrayTs: (type, value) => { + const [dimensions] = parseParams(type); + const options = dimensions ? { dimensions: Number(dimensions) } : {}; + + if (!value) return { default: '' }; + + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (!/^[01]+$/.test(v)) isDrizzleSql = true; + return `"${v}"`; + }); + + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Point: SqlType = { + is: (type: string) => /^\s*point(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'point', + defaultFromDrizzle: (value, mode) => { + if (!value) return ''; + + if (mode === 'xy') { + const v: { x: number; y: number } = value as { x: number; y: number }; + return Object.values(v).length > 0 ? `'(${v.x},${v.y})'` : ''; + } + if (mode === 'tuple') { + const v: number[] = value as number[]; + return v.length > 0 ? `'(${v[0]},${v[1]})'` : ''; + } + + throw new Error('unknown point type'); + }, + defaultArrayFromDrizzle: function(value: any[], dimensions: number, mode): Column['default'] { + let res; + + if (mode === 'tuple') { + res = stringifyTuplesArray(value, 'sql', (x: number[]) => { + const res = x.length > 0 ? `(${x[0]},${x[1]})` : '{}'; + return `"${res}"`; + }); + } else if (mode === 'xy') { + res = stringifyArray(value, 'sql', (x: { x: number; y: number }, _depth: number) => { + const res = Object.values(x).length > 0 ? `(${x.x},${x.y})` : '{}'; + return `"${res}"`; + }); + } else throw new Error('unknown point type'); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: function(value: string): string { + return value; + }, + defaultArrayFromIntrospect: function(value: string): string { + return value; + }, + toTs: function( + type: string, + value: string | null, + ): { options?: Record; default: string } { + if (!value) return { default: '' }; + + if (/^'\(\d+,\d+\)'$/.test(value)) { + return { default: trimChar(value, "'").replace('(', '[').replace(')', ']'), options: {} }; + } + + return { default: `sql\`${value}\``, options: {} }; + }, + toArrayTs: function(type: string, value: string | null): { options?: Record; default: string } { + if (!value) return { default: '' }; + + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (!/^\(\d+,\d+\)$/.test(v)) isDrizzleSql = true; + return v.replace('(', '[').replace(')', ']'); + }); + + return { + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Line: SqlType = { + is: (type: string) => /^\s*line(?:\s*\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'line', + defaultFromDrizzle: (value, mode) => { + if (!value) return ''; + + if (mode === 'tuple') { + const v: number[] = value as number[]; + return v.length > 0 ? `'{${v[0]},${v[1]},${v[2]}}'` : ''; + } + + if (mode === 'abc') { + const v: { a: number; b: number; c: number } = value as { + a: number; + b: number; + c: number; + }; + return Object.values(v).length > 0 ? `'{${v.a},${v.b},${v.c}}'` : ''; + } + + throw new Error('unknown line type'); + }, + defaultArrayFromDrizzle: function(value: any[], dimensions: number, mode): Column['default'] { + let res; + + if (mode === 'tuple') { + res = stringifyTuplesArray(value, 'sql', (x: number[]) => { + const res = x.length > 0 ? `{${x[0]},${x[1]},${x[2]}}` : '{}'; + return `"${res}"`; + }); + } else if (mode === 'abc') { + res = stringifyArray(value, 'sql', (x: { a: number; b: number; c: number }, _depth: number) => { + const res = Object.values(x).length > 0 ? `{${x.a},${x.b},${x.c}}` : '{}'; + return `"${res}"`; + }); + } else throw new Error('unknown line type'); + + return wrapWith(res, "'"); + }, + defaultFromIntrospect: function(value: string): string { + return value; + }, + defaultArrayFromIntrospect: function(value: string): string { + return value; + }, + toTs: function( + type: string, + value: string | null, + ): { options?: Record; default: string } { + if (!value) return { default: '' }; + + if (/^'\{\d+,\d+,\d+\}'$/.test(value)) { + return { default: trimChar(value, "'").replace('{', '[').replace('}', ']'), options: {} }; + } + + return { default: `sql\`${value}\``, options: {} }; + }, + toArrayTs: function(type: string, value: string | null): { options?: Record; default: string } { + if (!value) return { default: '' }; + + let isDrizzleSql: boolean = false; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (!/^\{\d+,\d+,\d+\}$/.test(v)) isDrizzleSql = true; + return v.replace('{', '[').replace('}', ']'); + }); + + return { + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const GeometryPoint: SqlType = { + is: (type: string) => /^\s*geometry\(point(?:,\d+)?\)(?:\[\s*\])*\s*$/i.test(type), + drizzleImport: () => 'geometry', + defaultFromDrizzle: (value, mode, config) => { + if (!value) return ''; + + const srid: number | undefined = config ? Number(config) : undefined; + let sridPrefix = srid ? `SRID=${srid};` : ''; + if (mode === 'tuple') { + const v: number[] = value as number[]; + return v.length > 0 ? `'${sridPrefix}POINT(${v[0]} ${v[1]})'` : ''; + } + + if (mode === 'object') { + const v: { x: number; y: number } = value as { x: number; y: number }; + return Object.values(v).length > 0 + ? `'${sridPrefix}POINT(${v.x} ${v.y})'` + : ''; + } + + throw new Error('unknown geometry type'); + }, + defaultArrayFromDrizzle: function( + value: any[], + dimensions: number, + mode, + config, + ): string { + // Parse to ARRAY[] + let res; + const srid: number | undefined = config ? Number(config) : undefined; + let sridPrefix = srid ? `SRID=${srid};` : ''; + if (mode === 'tuple') { + res = stringifyTuplesArray(value, 'geometry-sql', (x: number[]) => { + const res = `${sridPrefix}POINT(${x[0]} ${x[1]})`; + return `'${res}'`; + }); + } else if (mode === 'object') { + res = stringifyArray(value, 'geometry-sql', (x: { x: number; y: number }, _depth: number) => { + const res = `${sridPrefix}POINT(${x.x} ${x.y})`; + return `'${res}'`; + }); + } else throw new Error('unknown geometry type'); + + return res; + }, + defaultFromIntrospect: function(value: string): Column['default'] { + let def: string; + + try { + const { srid, point } = parseEWKB(trimChar(value, "'")); + let sridPrefix = srid ? `SRID=${srid};` : ''; + def = `'${sridPrefix}POINT(${point[0]} ${point[1]})'`; + } catch { + def = value; + } + + return def; + }, + defaultArrayFromIntrospect: function(value: string): Column['default'] { + // If {} array - parse to ARRAY[] + + /** + * Potential values here are: + * DEFAULT {'POINT(10 10)'} -> '{010100000000000000000024400000000000002440}'::geometry(Point,435)[] + * DEFAULT ARRAY['POINT(10 10)'] -> ARRAY['POINT(10 10)'::text] + * DEFAULT ARRAY['POINT(10 10)']::geometry(point) -> ARRAY['010100000000000000000024400000000000002440'::geometry(Point)] + * DEFAULT ARRAY['POINT(10 10)'::text]::geometry(point) -> ARRAY[('POINT(10 10)'::text)::geometry(Point)] + */ + let def = value; + + if (def === "'{}'") return def; + + try { + if (value.startsWith("'{") && value.endsWith("}'")) { + const parsed = parseArray(trimChar(value, "'")); + + def = stringifyArray(parsed, 'geometry-sql', (v) => { + try { + const { srid, point } = parseEWKB(v); + let sridPrefix = srid ? `SRID=${srid};` : ''; + return `'${sridPrefix}POINT(${point[0]} ${point[1]})'`; + } catch { + return v; + } + }); + } else { + const parsed = parseExpressionArray(value); + def = stringifyArray(parsed, 'geometry-sql', (v) => { + v = trimDefaultValueSuffix(trimDefaultValueSuffix(v).replace(/^\((.*)\)$/, '$1')); + try { + const { srid, point } = parseEWKB(trimChar(v, "'")); + let sridPrefix = srid ? `SRID=${srid};` : ''; + return `'${sridPrefix}POINT(${point[0]} ${point[1]})'`; + } catch { + return v; + } + }); + } + } catch {} + + return def; + }, + toTs: function(type: string, value: string | null): { options?: Record; default: string } { + if (!value) return { default: '' }; + + const options: { srid?: number; type: 'point' } = { type: 'point' }; + + const sridOption = splitSqlType(type).options?.split(',')[1]; + if (sridOption) options.srid = Number(sridOption); + + if (!value.includes('POINT(')) { + return { default: `sql\`${value}\``, options }; + } + + const sridInDef = value.startsWith("'SRID=") ? Number(value.split('SRID=')[1].split(';')[0]) : undefined; + if (!sridOption && sridInDef) { + return { default: `sql\`${value}\``, options }; + } + + const [res1, res2] = value.split('POINT(')[1].split(')')[0].split(' '); + + return { default: `[${res1},${res2}]`, options }; + }, + toArrayTs: function(type: string, value: string | null): { options?: Record; default: string } { + if (!value) return { default: '' }; + + const options: { srid?: number; type: 'point' } = { type: 'point' }; + const sridOption = splitSqlType(type).options?.split(',')[1]; + if (sridOption) options.srid = Number(sridOption); + + if (!value) return { default: '', options }; + + if (value === "'{}'") return { default: '[]', options }; + + let isDrizzleSql; + const srids: number[] = []; + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseExpressionArray(trimmed); + + const def = stringifyArray(res, 'ts', (v) => { + if (v.includes('SRID=')) { + srids.push(Number(v.split('SRID=')[1].split(';')[0])); + } + const [res1, res2] = value.split('POINT(')[1].split(')')[0].split(' '); + if (!value.includes('POINT(')) isDrizzleSql = true; + + return `[${res1}, ${res2}]`; + }); + + if (!isDrizzleSql) isDrizzleSql = srids.some((it) => it !== srids[0]); + // if there is no srid in type and user defines srids in default + // we need to return point with srids + if (!isDrizzleSql && !sridOption && srids.length > 0) isDrizzleSql = true; + + return { + options, + default: isDrizzleSql ? `sql\`${value}\`` : def, + }; + } catch { + return { options, default: `sql\`${value}\`` }; + } + }, +}; + +export const Enum: SqlType = { + is: (_type: string) => { + throw Error('Mocked'); + }, + drizzleImport: () => 'pgEnum', + defaultFromDrizzle: (value) => { + if (!value) return ''; + const escaped = (value as string).replaceAll("'", "''"); + return `'${escaped}'`; + }, + defaultArrayFromDrizzle: (value) => { + const res = stringifyArray(value, 'sql', (v) => { + if (typeof v !== 'string') throw new Error(); + const escaped = escapeForSqlDefault(v, 'pg-arr'); + if (v.includes('\\') || v.includes('"') || v.includes(',')) { + return `"${escaped}"`; + } + return escaped; + }); + return `'${res}'`; + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + const [length] = parseParams(type); + if (length) options['length'] = Number(length); + if (!value) return { options, default: '' }; + const escaped = escapeForTsLiteral( + trimChar(value, "'").replaceAll("''", "'"), + ); + return { options, default: escaped }; + }, + toArrayTs: (type, value) => { + if (!value) return { default: '' }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral( + unescapeFromSqlDefault(trimChar(v, "'")), + ); + return escaped; + }), + }; + } catch { + return { default: `sql\`${value}\`` }; + } + }, +}; + +export const Serial: SqlType = { + is: (type: string) => /^(?:serial)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'serial', + defaultFromDrizzle: (value) => { + throw new Error(`Unexpected default for serial type: ${value}`); + }, + defaultArrayFromDrizzle: (v) => { + throw new Error(`Unexpected default for serial type: ${v}`); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: function(value: string): string { + return value; + }, + toTs: () => { + return { default: '' }; + }, + toArrayTs: () => { + return { default: '' }; + }, +}; + +export const BigSerial: SqlType = { + is: (type: string) => /^(?:bigserial)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'bigserial', + defaultFromDrizzle: Serial.defaultFromDrizzle, + defaultArrayFromDrizzle: Serial.defaultArrayFromDrizzle, + defaultFromIntrospect: Serial.defaultFromIntrospect, + defaultArrayFromIntrospect: Serial.defaultArrayFromIntrospect, + toTs: () => { + return { options: { mode: 'number' }, default: '' }; + }, + toArrayTs: () => { + return { options: { mode: 'number' }, default: '' }; + }, +}; +export const SmallSerial: SqlType = { + is: (type: string) => /^(?:smallserial)(?:[\s(].*)?$/i.test(type), + drizzleImport: () => 'smallserial', + defaultFromDrizzle: Serial.defaultFromDrizzle, + defaultArrayFromDrizzle: Serial.defaultArrayFromDrizzle, + defaultFromIntrospect: Serial.defaultFromIntrospect, + defaultArrayFromIntrospect: Serial.defaultArrayFromIntrospect, + toTs: Serial.toTs, + toArrayTs: Serial.toArrayTs, +}; + +export const Custom: SqlType = { + is: (_type: string) => { + throw Error('Mocked'); + }, + drizzleImport: () => 'customType', + defaultFromDrizzle: (value) => { + if (!value) return ''; + return String(value); + }, + defaultArrayFromDrizzle: (value) => { + return String(value); + }, + defaultFromIntrospect: (value) => { + return value; + }, + defaultArrayFromIntrospect: (value) => { + return value as string; + }, + toTs: (type, value) => { + const options: any = {}; + if (!value) return { options, default: '', customType: type }; + const escaped = escapeForTsLiteral(value); + return { default: escaped, customType: type }; + }, + toArrayTs: (type, value) => { + if (!value) return { default: '', customType: type }; + + try { + const trimmed = trimChar(trimChar(value, ['(', ')']), "'"); + const res = parseArray(trimmed); + + return { + default: stringifyArray(res, 'ts', (v) => { + const escaped = escapeForTsLiteral(v); + return escaped; + }), + customType: type, + }; + } catch { + return { default: `sql\`${value}\``, customType: type }; + } + }, +}; + +export const typeFor = (type: string, isEnum: boolean): SqlType => { + if (isEnum) return Enum; + if (SmallInt.is(type)) return SmallInt; + if (Int.is(type)) return Int; + if (BigInt.is(type)) return BigInt; + if (Numeric.is(type)) return Numeric; + if (Real.is(type)) return Real; + if (Double.is(type)) return Double; + if (Boolean.is(type)) return Boolean; + if (Char.is(type)) return Char; + if (Varchar.is(type)) return Varchar; + if (Text.is(type)) return Text; + if (Json.is(type)) return Json; + if (Jsonb.is(type)) return Jsonb; + if (Time.is(type)) return Time; + if (TimeTz.is(type)) return TimeTz; + if (Timestamp.is(type)) return Timestamp; + if (TimestampTz.is(type)) return TimestampTz; + if (Uuid.is(type)) return Uuid; + if (Interval.is(type)) return Interval; + if (Inet.is(type)) return Inet; + if (Cidr.is(type)) return Cidr; + if (MacAddr.is(type)) return MacAddr; + if (MacAddr8.is(type)) return MacAddr8; + if (Vector.is(type)) return Vector; + if (HalfVec.is(type)) return HalfVec; + if (SparseVec.is(type)) return SparseVec; + if (Bit.is(type)) return Bit; + if (Point.is(type)) return Point; + if (Line.is(type)) return Line; + if (DateType.is(type)) return DateType; + if (GeometryPoint.is(type)) return GeometryPoint; + if (Serial.is(type)) return Serial; + if (SmallSerial.is(type)) return SmallSerial; + if (BigSerial.is(type)) return BigSerial; + return Custom; +}; + +export const splitSqlType = (sqlType: string) => { + // timestamp(6) with time zone -> [timestamp, 6, with time zone] + const toMatch = sqlType.replaceAll('[]', ''); + const match = toMatch.match(/^(\w+(?:\s+\w+)*)\(([^)]*)\)(?:\s+with time zone)?$/i); + let type = match ? (match[1] + (match[3] ?? '')) : toMatch; + let options = match ? match[2].replaceAll(', ', ',') : null; + + return { type, options }; +}; + +export const vectorOps = [ + 'vector_l2_ops', + 'vector_ip_ops', + 'vector_cosine_ops', + 'vector_l1_ops', + 'bit_hamming_ops', + 'bit_jaccard_ops', + 'halfvec_l2_ops', + 'sparsevec_l2_ops', +]; + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +export function stringFromIdentityProperty(field: string | number | undefined): string | undefined { + return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); +} + +export function maxRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; +} + +export function minRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; +} + +/* + we can't check for `nextval('${schemaPrefix}${table}_${column}_seq'::regclass)` perfect match + since table or column might be renamed, while sequence preserve name and it will trigger + subsequent ddl diffs + */ +export const isSerialExpression = (expr: string, schema: string) => { + const schemaPrefix = schema === 'public' ? '' : `${schema}.`; + return (expr.startsWith(`nextval('${schemaPrefix}`) || expr.startsWith(`nextval('"${schemaPrefix}`)) + && (expr.endsWith(`_seq'::regclass)`) || expr.endsWith(`_seq"'::regclass)`)); +}; + +export function stringFromDatabaseIdentityProperty(field: any): string | null { + return typeof field === 'string' + ? (field as string) + : typeof field === 'undefined' || field === null + ? null + : typeof field === 'bigint' + ? field.toString() + : String(field); +} + +export function buildArrayString(array: any[], sqlType: string): string { + // we check if array consists only of empty arrays down to 5th dimension + if (array.flat(5).length === 0) { + return '{}'; + } + + const values = array + .map((value) => { + if (typeof value === 'number' || typeof value === 'bigint') { + return value.toString(); + } + + if (typeof value === 'boolean') { + return value ? 't' : 'f'; + } + + if (Array.isArray(value)) { + return buildArrayString(value, sqlType); + } + + if (sqlType.startsWith('numeric')) { + return String(value); + } + + if (value instanceof Date) { + if (sqlType === 'date') { + return `${value.toISOString().split('T')[0]}`; + } else if (sqlType === 'timestamp') { + return `"${value.toISOString().replace('T', ' ').replace('Z', ' ').slice(0, 23)}"`; + } else { + return `"${value.toISOString().replace('T', ' ').replace('Z', '')}"`; + } + } + + if (typeof value === 'object') { + return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; + } + + if (typeof value === 'string') { + if (/^[a-zA-Z0-9./_':-]+$/.test(value)) return value.replaceAll("'", "''"); + return `"${value.replaceAll("'", "''").replaceAll('"', '\\"')}"`; + } + + return `"${value}"`; + }) + .join(','); + + return `{${values}}`; +} + +export type OnAction = PostgresEntities['fks']['onUpdate']; +export const parseOnType = (type: string): OnAction => { + switch (type) { + case 'a': + return 'NO ACTION'; + case 'r': + return 'RESTRICT'; + case 'n': + return 'SET NULL'; + case 'c': + return 'CASCADE'; + case 'd': + return 'SET DEFAULT'; + default: + throw new Error(`Unknown foreign key type: ${type}`); + } +}; + +export const systemNamespaceNames = ['pg_toast', 'pg_catalog', 'information_schema']; +export const isSystemNamespace = (name: string) => { + return name.startsWith('pg_toast') || name === 'pg_default' || name === 'pg_global' || name.startsWith('pg_temp_') + || systemNamespaceNames.indexOf(name) >= 0; +}; + +export const isSystemRole = (name: string) => { + return name === 'postgres' || name.startsWith('pg_'); +}; + +type DefaultMapper = (value: IN | IN[]) => Column['default']; + +export const defaultForVector: DefaultMapper<[number, number, number]> = ( + value, +) => { + const res = stringifyTuplesArray( + value, + 'sql', + (x: number[], depth: number) => { + const res = x.length > 0 ? `[${x[0]},${x[1]},${x[2]}]` : '{}'; + return depth === 0 ? res : `"${res}"`; + }, + ); + return `'${res}'`; +}; + +// TODO: check +// export const splitExpressions = (input: string | null): string[] => { +// if (!input) return []; + +// const wrapped = input.startsWith('(') && input.endsWith(')'); +// input = wrapped ? input.slice(1, input.length - 1) : input; + +// // This regex uses three alternatives: +// // 1. Quoted strings that allow escaped quotes: '([^']*(?:''[^']*)*)' +// // 2. Parenthesized expressions that support one level of nesting: +// // \((?:[^()]+|\([^()]*\))*\) +// // 3. Any character that is not a comma, quote, or parenthesis: [^,'()] +// // +// // It also trims optional whitespace before and after each token, +// // requiring that tokens are followed by a comma or the end of the string. +// // const regex = /\s*((?:'[^']*(?:''[^']*)*'|\((?:[^()]+|\([^()]*\))*\)|[^,'()])+)\s*(?:,|$)/g; +// const regex = /\s*((?:'(?:[^']|'')*'|\((?:[^()]+|\([^()]*\))*\)|[^,'()])+)\s*(?:,|$)/g; +// const result: string[] = []; +// let match: RegExpExecArray | null; + +// while ((match = regex.exec(input)) !== null) { +// result.push(match[1].trim()); +// } + +// return result; +// }; + +export const wrapRecord = (it: Record) => { + return { + bool: (key: string) => { + if (key in it) { + if (it[key] === 'true') { + return true; + } + if (it[key] === 'false') { + return false; + } + + throw new Error(`Invalid options boolean value for ${key}: ${it[key]}`); + } + return null; + }, + num: (key: string) => { + if (key in it) { + const value = Number(it[key]); + if (isNaN(value)) { + throw new Error(`Invalid options number value for ${key}: ${it[key]}`); + } + return value; + } + return null; + }, + str: (key: string) => { + if (key in it) { + return it[key]; + } + return null; + }, + literal: (key: string, allowed: T[]): T | null => { + if (!(key in it)) return null; + const value = it[key]; + + if (allowed.includes(value as T)) { + return value as T; + } + throw new Error(`Invalid options literal value for ${key}: ${it[key]}`); + }, + }; +}; + +/* + CHECK (((email)::text <> 'test@gmail.com'::text)) + Where (email) is column in table +*/ +export const parseCheckDefinition = (value: string): string => { + return value.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); +}; + +export const parseViewDefinition = (value: string | null | undefined): string | null => { + if (!value) return null; + return value.replace(/\s+/g, ' ').replace(';', '').trim(); +}; + +export const defaultNameForIdentitySequence = (table: string, column: string) => { + return `${table}_${column}_seq`; +}; + +export const defaultNameForPK = (table: string) => { + return `${table}_pkey`; +}; + +export const defaultNameForFK = (table: string, columns: string[], tableTo: string, columnsTo: string[]) => { + const desired = `${table}_${columns.join('_')}_${tableTo}_${columnsTo.join('_')}_fkey`; + const res = desired.length > 63 + ? table.length < 63 - 18 // _{hash(12)}_fkey + ? `${table}_${hash(desired)}_fkey` + : `${hash(desired)}_fkey` // 1/~3e21 collision chance within single schema, it's fine + : desired; + return res; +}; + +export const defaultNameForUnique = (table: string, ...columns: string[]) => { + return `${table}_${columns.join('_')}_key`; +}; + +export const defaultNameForIndex = (table: string, columns: string[]) => { + return `${table}_${columns.join('_')}_idx`; +}; + +export const trimDefaultValueSuffix = (value: string) => { + let res = value.endsWith('[]') ? value.slice(0, -2) : value; + res = res.replace(/(::["\w.\s]+(?:\([^)]*\))?(?:\swith(?:out)?\stime\szone)?(?:\[\])?)+$/gi, ''); + return res; +}; + +export const defaultForColumn = ( + type: string, + def: string | boolean | number | null | undefined, + dimensions: number, + isEnum: boolean, +): Column['default'] => { + if ( + def === null + || def === undefined + || type === 'serial' + || type === 'smallserial' + || type === 'bigserial' + ) { + return null; + } + + if (typeof def === 'boolean') { + return String(def); + } + + if (typeof def === 'number') { + return String(def); + } + + let value = trimDefaultValueSuffix(def); + const grammarType = typeFor(type, isEnum); + if (dimensions > 0) return grammarType.defaultArrayFromIntrospect(value); + return grammarType.defaultFromIntrospect(String(value)); +}; + +export const defaultToSQL = ( + it: Pick, +) => { + if (!it.default) return ''; + + const { type: columnType, dimensions, typeSchema } = it; + const value = it.default; + + if (typeSchema) { + const schemaPrefix = typeSchema && typeSchema !== 'public' ? `"${typeSchema}".` : ''; + return `${value}::${schemaPrefix}"${columnType}"${dimensions > 0 ? '[]' : ''}`; + } + + const suffix = dimensions > 0 ? `::${columnType}[]` : ''; + + const defaultValue = it.default ?? ''; + return `${defaultValue}${suffix}`; +}; + +export const isDefaultAction = (action: string) => { + return action.toLowerCase() === 'no action'; +}; + +export const isSerialType = (type: string) => { + return /^(?:serial|bigserial|smallserial)$/i.test(type); +}; + +// map all to utc with saving precision +function formatTimestampTz(date: string) { + if (!isTimestamp(date)) return date; + + // Convert to Temporal.Instant + const instant = Temporal.Instant.from(date); + + const iso = instant.toString({ timeZone: 'UTC' }); + + // const fractionalDigits = iso.split('.')[1]!.length; + + // // decide whether to limit precision + // const formattedPrecision = fractionalDigits > precision + // // @ts-expect-error + // ? instant.toString({ fractionalSecondDigits: precision }) + // : iso; + + return iso; +} +function formatTime(date: string) { + if (!isTime(date)) return date; + + // Convert to Temporal.Instant + const instant = Temporal.Instant.from(`1970-01-01 ${date}`); + + const iso = instant.toString({ timeZone: 'UTC' }); + + // const fractionalDigits = iso.split('.')[1]!.length; + + // // decide whether to limit precision + // const formattedPrecision = fractionalDigits > precision + // // @ts-expect-error + // ? instant.toString({ fractionalSecondDigits: precision }) + // : iso; + + return iso; +} +export const defaultsCommutative = ( + diffDef: DiffEntities['columns']['default'], + type: string, + dimensions: number, +): boolean => { + if (!diffDef) return false; + + let from = diffDef.from; + let to = diffDef.to; + + if (from === to) return true; + if (from === `(${to})`) return true; + if (to === `(${from})`) return true; + + if (type.startsWith('timestamp') && type.includes('with time zone')) { + if (from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + + if (dimensions > 0) { + try { + const fromArray = stringifyArray(parseArray(from), 'sql', (v) => { + return `"${formatTimestampTz(v)}"`; + }); + const toArray = stringifyArray(parseArray(to), 'sql', (v) => { + return `"${formatTimestampTz(v)}"`; + }); + + if (toArray === fromArray) return true; + } catch {} + + return false; + } + + if (formatTimestampTz(to) === formatTimestampTz(from)) return true; + } + + return false; + } + + if (type.startsWith('time') && type.includes('with time zone')) { + if (from && to) { + from = trimChar(from, "'"); + to = trimChar(to, "'"); + + if (dimensions > 0) { + try { + const fromArray = stringifyArray(parseArray(from), 'sql', (v) => { + return `"${formatTime(v)}"`; + }); + const toArray = stringifyArray(parseArray(to), 'sql', (v) => { + return `"${formatTime(v)}"`; + }); + + if (toArray === fromArray) return true; + } catch {} + + return false; + } + + if (formatTime(to) === formatTime(from)) return true; + } + + return false; + } + + // if define '[4.0]', psql will store it as '[4]' + if (type.startsWith('vector')) { + if (from?.replaceAll('.0', '') === to) return true; + } + + return false; +}; + +export const defaults = { + /* + By default, PostgreSQL uses the cluster’s default tablespace (which is named 'pg_default') + + This operation requires an exclusive lock on the materialized view (it rewrites the data file), + and you must have CREATE privilege on the target tablespace. + If you have indexes on the materialized view, note that moving the base table does not automatically move its indexes. + Each index is a separate object and retains its original tablespace​. + + You should move indexes individually, for example: + sql`ALTER INDEX my_matview_idx1 SET TABLESPACE pg_default`; + sql`ALTER INDEX my_matview_idx2 SET TABLESPACE pg_default`; + */ + tablespace: 'pg_default', + + /* + The table access method (the storage engine format) is chosen when the materialized view is created, + using the optional USING clause. + If no method is specified, it uses the default access method (typically the regular heap storage)​ + + sql` + CREATE MATERIALIZED VIEW my_matview + USING heap -- storage access method; "heap" is the default + AS SELECT ...; + ` + + Starting with PostgreSQL 15, you can alter a materialized view’s access method in-place. + PostgreSQL 15 introduced support for ALTER MATERIALIZED VIEW ... SET ACCESS METHOD new_method + */ + accessMethod: 'heap', + + /* + By default, NULL values are treated as distinct entries. + Specifying NULLS NOT DISTINCT on unique indexes / constraints will cause NULL to be treated as not distinct, + or in other words, equivalently. + + https://www.postgresql.org/about/featurematrix/detail/392/ + */ + nullsNotDistinct: false, + + identity: { + startWith: '1', + increment: '1', + min: '1', + maxFor: (type: string) => { + if (type === 'smallint') return '32767'; + if (type === 'integer') return '2147483647'; + if (type === 'bigint') return '9223372036854775807'; + throw new Error(`Unknow identity column type: ${type}`); + }, + cache: 1, + cycle: false, + }, + + index: { + method: 'btree', + }, + + types: { + geometry: { + defSrid: 0, + }, + }, +} as const; diff --git a/drizzle-kit/src/dialects/postgres/introspect.ts b/drizzle-kit/src/dialects/postgres/introspect.ts new file mode 100644 index 0000000000..2fccb713ea --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/introspect.ts @@ -0,0 +1,1249 @@ +import camelcase from 'camelcase'; +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import { type DB, splitExpressions, trimChar } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + Enum, + ForeignKey, + Index, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import { + defaultForColumn, + isSerialExpression, + isSystemNamespace, + parseOnType, + parseViewDefinition, + stringFromDatabaseIdentityProperty as parseIdentityProperty, + wrapRecord, +} from './grammar'; + +// TODO: tables/schema/entities -> filter: (entity: {type: ... , metadata: ... }) => boolean; +// TODO: since we by default only introspect public + +// * use === for oid comparisons to prevent issues with different number types (string vs number) (pg converts oid to number automatically - pgsql cli returns as string) + +export const fromDatabase = async ( + db: DB, + filter: EntityFilter = () => true, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +): Promise => { + const schemas: Schema[] = []; + const enums: Enum[] = []; + const tables: PostgresEntities['tables'][] = []; + const columns: InterimColumn[] = []; + const indexes: InterimIndex[] = []; + const pks: PrimaryKey[] = []; + const fks: ForeignKey[] = []; + const uniques: UniqueConstraint[] = []; + const checks: CheckConstraint[] = []; + const sequences: Sequence[] = []; + const roles: Role[] = []; + const privileges: Privilege[] = []; + const policies: Policy[] = []; + const views: View[] = []; + const viewColumns: ViewColumn[] = []; + + // type OP = { + // oid: number | string; + // name: string; + // default: boolean; + // }; + + type Namespace = { + oid: number | string; + name: string; + }; + + // ! Use `pg_catalog` for system tables, functions and operators (Prevent security vulnerabilities - overwriting system tables, functions and operators) + // ! Do not use `::regnamespace::text` to get schema name, because it does not work with schemas that have uppercase letters (e.g. MySchema -> "MySchema") + + // TODO: potential improvements + // use pg_catalog.has_table_privilege(pg_class.oid, 'SELECT') for tables + // --- default access method + // SHOW default_table_access_method; + // SELECT current_setting('default_table_access_method') AS default_am; + + const accessMethodsQuery = db.query<{ oid: number | string; name: string }>( + `SELECT oid, amname as name FROM pg_catalog.pg_am WHERE amtype OPERATOR(pg_catalog.=) 't' ORDER BY pg_catalog.lower(amname);`, + ).then((rows) => { + queryCallback('accessMethods', rows, null); + return rows; + }).catch((err) => { + queryCallback('accessMethods', [], err); + throw err; + }); + + const tablespacesQuery = db.query<{ + oid: number | string; + name: string; + }>( + `SELECT oid, spcname as "name" FROM pg_catalog.pg_tablespace ORDER BY pg_catalog.lower(spcname)`, + ).then((rows) => { + queryCallback('tablespaces', rows, null); + return rows; + }).catch((err) => { + queryCallback('tablespaces', [], err); + throw err; + }); + + const namespacesQuery = db.query( + 'SELECT oid, nspname as name FROM pg_catalog.pg_namespace ORDER BY pg_catalog.lower(nspname)', + ) + .then((rows) => { + queryCallback('namespaces', rows, null); + return rows; + }).catch((err) => { + queryCallback('namespaces', [], err); + throw err; + }); + + const defaultsQuery = db.query<{ + tableId: number | string; + ordinality: number; + expression: string; + }>(` + SELECT + adrelid AS "tableId", + adnum AS "ordinality", + pg_catalog.pg_get_expr(adbin, adrelid) AS "expression" + FROM + pg_catalog.pg_attrdef; + `).then((rows) => { + queryCallback('defaults', rows, null); + return rows; + }).catch((err) => { + queryCallback('defaults', [], err); + throw err; + }); + + const [ams, tablespaces, namespaces, defaultsList] = await Promise.all([ + accessMethodsQuery, + tablespacesQuery, + namespacesQuery, + defaultsQuery, + ]); + + const { other: filteredNamespaces } = namespaces.reduce<{ system: Namespace[]; other: Namespace[] }>( + (acc, it) => { + if (isSystemNamespace(it.name)) { + acc.system.push(it); + } else { + acc.other.push(it); + } + return acc; + }, + { system: [], other: [] }, + ); + + const filteredNamespacesStringForSQL = filteredNamespaces.map((ns) => `'${ns.name}'`).join(','); + + schemas.push(...filteredNamespaces.map((it) => ({ entityType: 'schemas', name: it.name }))); + + type TableListItem = { + oid: number | string; + schema: string; + name: string; + /* r - table, p - partitioned table, v - view, m - materialized view */ + kind: 'r' | 'p' | 'v' | 'm'; + accessMethod: number | string; + options: string[] | null; + rlsEnabled: boolean; + tablespaceid: number | string; + definition: string | null; + }; + const tablesList = filteredNamespacesStringForSQL + ? await db + .query(` + SELECT + pg_class.oid, + nspname as "schema", + relname AS "name", + relkind AS "kind", + relam as "accessMethod", + reloptions::text[] as "options", + reltablespace as "tablespaceid", + relrowsecurity AS "rlsEnabled", + CASE + WHEN relkind OPERATOR(pg_catalog.=) 'v' OR relkind OPERATOR(pg_catalog.=) 'm' + THEN pg_catalog.pg_get_viewdef(pg_class.oid, true) + ELSE null + END as "definition" + FROM + pg_catalog.pg_class + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) relnamespace + WHERE + relkind IN ('r', 'p', 'v', 'm') + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('tables', rows, null); + return rows; + }).catch((err) => { + queryCallback('tables', [], err); + throw err; + }) + : [] as TableListItem[]; + + const viewsList = tablesList.filter((it) => { + it.schema = trimChar(it.schema, '"'); // when camel case name e.x. mySchema -> it gets wrapped to "mySchema" + return it.kind === 'v' || it.kind === 'm'; + }); + + const filteredTables = tablesList.filter((it) => { + it.schema = trimChar(it.schema, '"'); // when camel case name e.x. mySchema -> it gets wrapped to "mySchema" + return it.kind === 'r' || it.kind === 'p'; + }); + + const filteredTableIds = filteredTables.map((it) => it.oid); + const viewsIds = viewsList.map((it) => it.oid); + const filteredViewsAndTableIds = [...filteredTableIds, ...viewsIds]; + + const filterByTableIds = filteredTableIds.length > 0 ? `(${filteredTableIds.join(',')})` : ''; + const filterByTableAndViewIds = filteredViewsAndTableIds.length > 0 ? `(${filteredViewsAndTableIds.join(',')})` : ''; + + for (const table of filteredTables) { + tables.push({ + entityType: 'tables', + schema: trimChar(table.schema, "'"), + name: table.name, + isRlsEnabled: table.rlsEnabled, + }); + } + + const dependQuery = db.query<{ + oid: number | string; + tableId: number | string; + ordinality: number | string; + + /* + a - An “auto” dependency means the dependent object can be dropped separately, + and will be automatically removed if the referenced object is dropped—regardless of CASCADE or RESTRICT. + Example: A named constraint on a table is auto-dependent on the table, so it vanishes when the table is dropped + + i - An “internal” dependency marks objects that were created as part of building another object. + Directly dropping the dependent is disallowed—you must drop the referenced object instead. + Dropping the referenced object always cascades to the dependent + Example: A trigger enforcing a foreign-key constraint is internally dependent on its pg_constraint entry + */ + deptype: 'a' | 'i'; + }>( + `SELECT + objid as oid, + refobjid as "tableId", + refobjsubid as "ordinality", + deptype + FROM + pg_catalog.pg_depend + WHERE ${filterByTableIds ? ` refobjid IN ${filterByTableIds}` : 'false'};`, + ).then((rows) => { + queryCallback('depend', rows, null); + return rows; + }).catch((err) => { + queryCallback('depend', [], err); + throw err; + }); + + type EnumListItem = { + oid: number | string; + name: string; + schema: string; + arrayTypeId: number | string; + ordinality: number; + value: string; + }; + const enumsQuery = filteredNamespacesStringForSQL + ? db + .query(`SELECT + pg_type.oid as "oid", + typname as "name", + nspname as "schema", + pg_type.typarray as "arrayTypeId", + pg_enum.enumsortorder AS "ordinality", + pg_enum.enumlabel AS "value" + FROM + pg_catalog.pg_type + JOIN pg_catalog.pg_enum ON pg_enum.enumtypid OPERATOR(pg_catalog.=) pg_type.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_type.typnamespace + WHERE + pg_type.typtype OPERATOR(pg_catalog.=) 'e' + AND nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_type.oid, pg_enum.enumsortorder + `).then((rows) => { + queryCallback('enums', rows, null); + return rows; + }).catch((err) => { + queryCallback('enums', [], err); + throw err; + }) + : [] as EnumListItem[]; + + // fetch for serials, adrelid = tableid + const serialsQuery = db + .query<{ + oid: number | string; + tableId: number | string; + ordinality: number; + expression: string; + }>(`SELECT + oid, + adrelid as "tableId", + adnum as "ordinality", + pg_catalog.pg_get_expr(adbin, adrelid) as "expression" + FROM + pg_catalog.pg_attrdef + WHERE ${filterByTableIds ? ` adrelid IN ${filterByTableIds}` : 'false'} + `).then((rows) => { + queryCallback('serials', rows, null); + return rows; + }).catch((err) => { + queryCallback('serials', [], err); + throw err; + }); + + type SequenceListItem = { + schema: string; + oid: number | string; + name: string; + startWith: string; + minValue: string; + maxValue: string; + incrementBy: string; + cycle: boolean; + cacheSize: number; + }; + const sequencesQuery = filteredNamespacesStringForSQL + ? db.query(`SELECT + nspname as "schema", + relname as "name", + seqrelid as "oid", + seqstart as "startWith", + seqmin as "minValue", + seqmax as "maxValue", + seqincrement as "incrementBy", + seqcycle as "cycle", + seqcache as "cacheSize" + FROM pg_catalog.pg_sequence + JOIN pg_catalog.pg_class ON pg_sequence.seqrelid OPERATOR(pg_catalog.=) pg_class.oid + JOIN pg_catalog.pg_namespace ON pg_namespace.oid OPERATOR(pg_catalog.=) pg_class.relnamespace + WHERE nspname IN (${filteredNamespacesStringForSQL}) + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('sequences', rows, null); + return rows; + }).catch((err) => { + queryCallback('sequences', [], err); + throw err; + }) + : [] as SequenceListItem[]; + + // I'm not yet aware of how we handle policies down the pipeline for push, + // and since postgres does not have any default policies, we can safely fetch all of them for now + // and filter them out in runtime, simplifying filterings + const policiesQuery = db.query< + { + schema: string; + table: string; + name: string; + as: Policy['as']; + to: string | string[]; + for: Policy['for']; + using: string | undefined | null; + withCheck: string | undefined | null; + } + >(`SELECT + schemaname as "schema", + tablename as "table", + policyname as "name", + permissive as "as", + roles as "to", + cmd as "for", + qual as "using", + with_check as "withCheck" + FROM pg_catalog.pg_policies + ORDER BY + pg_catalog.lower(schemaname), + pg_catalog.lower(tablename), + pg_catalog.lower(policyname); + `).then((rows) => { + queryCallback('policies', rows, null); + return rows; + }).catch((err) => { + queryCallback('policies', [], err); + throw err; + }); + + const rolesQuery = db.query< + { + rolname: string; + rolsuper: boolean; + rolinherit: boolean; + rolcreaterole: boolean; + rolcreatedb: boolean; + rolcanlogin: boolean; + rolreplication: boolean; + rolconnlimit: number; + rolvaliduntil: string | null; + rolbypassrls: boolean; + } + >( + `SELECT + rolname, + rolsuper, + rolinherit, + rolcreaterole, + rolcreatedb, + rolcanlogin, + rolreplication, + rolconnlimit, + rolvaliduntil, + rolbypassrls + FROM pg_catalog.pg_roles + ORDER BY pg_catalog.lower(rolname);`, + ).then((rows) => { + queryCallback('roles', rows, null); + return rows; + }).catch((error) => { + queryCallback('roles', [], error); + throw error; + }); + + type PrivilegeListItem = { + grantor: string; + grantee: string; + schema: string; + table: string; + type: 'SELECT' | 'INSERT' | 'UPDATE' | 'DELETE' | 'TRUNCATE' | 'REFERENCES' | 'TRIGGER'; + isGrantable: boolean; + }; + const privilegesQuery = filteredNamespacesStringForSQL + ? db.query(` + SELECT + grantor, + grantee, + table_schema AS "schema", + table_name AS "table", + privilege_type AS "type", + CASE is_grantable WHEN 'YES' THEN true ELSE false END AS "isGrantable" + FROM information_schema.role_table_grants + WHERE table_schema IN (${filteredNamespacesStringForSQL}) + ORDER BY + pg_catalog.lower(table_schema), + pg_catalog.lower(table_name), + pg_catalog.lower(grantee); + `).then((rows) => { + queryCallback('privileges', rows, null); + return rows; + }).catch((error) => { + queryCallback('privileges', [], error); + throw error; + }) + : [] as PrivilegeListItem[]; + + const constraintsQuery = db.query<{ + oid: number | string; + schemaId: number | string; + tableId: number | string; + name: string; + type: 'p' | 'u' | 'f' | 'c'; // p - primary key, u - unique, f - foreign key, c - check + definition: string; + indexId: number | string; + columnsOrdinals: number[]; + tableToId: number | string; + columnsToOrdinals: number[]; + onUpdate: 'a' | 'd' | 'r' | 'c' | 'n'; + onDelete: 'a' | 'd' | 'r' | 'c' | 'n'; + }>(` + SELECT + oid, + connamespace AS "schemaId", + conrelid AS "tableId", + conname AS "name", + contype AS "type", + pg_catalog.pg_get_constraintdef(oid) AS "definition", + conindid AS "indexId", + conkey AS "columnsOrdinals", + confrelid AS "tableToId", + confkey AS "columnsToOrdinals", + confupdtype AS "onUpdate", + confdeltype AS "onDelete" + FROM + pg_catalog.pg_constraint + WHERE ${filterByTableIds ? ` conrelid IN ${filterByTableIds}` : 'false'} + ORDER BY conrelid, contype, pg_catalog.lower(conname); + `).then((rows) => { + queryCallback('constraints', rows, null); + return rows; + }).catch((err) => { + queryCallback('constraints', [], err); + throw err; + }); + + // for serials match with pg_attrdef via attrelid(tableid)+adnum(ordinal position), for enums with pg_enum above + const columnsQuery = db.query<{ + tableId: number | string; + kind: 'r' | 'p' | 'v' | 'm'; + name: string; + ordinality: number; + notNull: boolean; + type: string; + dimensions: number; + typeId: number | string; + /* s - stored */ + generatedType: 's' | ''; + /* + 'a' for GENERATED ALWAYS + 'd' for GENERATED BY DEFAULT + */ + identityType: 'a' | 'd' | ''; + metadata: { + seqId: string | null; + generation: string | null; + start: string | null; + increment: string | null; + max: string | null; + min: string | null; + cycle: string; + generated: 'ALWAYS' | 'BY DEFAULT'; + expression: string | null; + } | null; + }>(`SELECT + attrelid AS "tableId", + relkind AS "kind", + attname AS "name", + attnum AS "ordinality", + attnotnull AS "notNull", + attndims as "dimensions", + atttypid as "typeId", + attgenerated as "generatedType", + attidentity as "identityType", + pg_catalog.format_type(atttypid, atttypmod) as "type", + CASE + WHEN attidentity IN ('a', 'd') or attgenerated OPERATOR(pg_catalog.=) 's' THEN ( + SELECT + pg_catalog.row_to_json(c.*) + FROM + ( + SELECT + pg_catalog.pg_get_serial_sequence('"' OPERATOR(pg_catalog.||) "table_schema" OPERATOR(pg_catalog.||) '"."' OPERATOR(pg_catalog.||) "table_name" OPERATOR(pg_catalog.||) '"', "attname")::regclass::oid as "seqId", + "identity_generation" AS generation, + "identity_start" AS "start", + "identity_increment" AS "increment", + "identity_maximum" AS "max", + "identity_minimum" AS "min", + "identity_cycle" AS "cycle", + "generation_expression" AS "expression" + FROM + information_schema.columns c + WHERE + c.column_name OPERATOR(pg_catalog.=) attname + AND c.table_schema OPERATOR(pg_catalog.=) nspname + AND c.table_name OPERATOR(pg_catalog.=) cls.relname + ) c + ) + ELSE NULL + END AS "metadata" + FROM + pg_catalog.pg_attribute attr + JOIN pg_catalog.pg_class cls ON cls.oid OPERATOR(pg_catalog.=) attr.attrelid + JOIN pg_catalog.pg_namespace nsp ON nsp.oid OPERATOR(pg_catalog.=) cls.relnamespace + WHERE + ${filterByTableAndViewIds ? ` attrelid IN ${filterByTableAndViewIds}` : 'false'} + AND attnum OPERATOR(pg_catalog.>) 0 + AND attisdropped OPERATOR(pg_catalog.=) FALSE + ORDER BY attnum; + `).then((rows) => { + queryCallback('columns', rows, null); + return rows; + }).catch((err) => { + queryCallback('columns', [], err); + throw err; + }); + + const [ + dependList, + enumsList, + serialsList, + sequencesList, + policiesList, + rolesList, + privilegesList, + constraintsList, + columnsList, + ] = await Promise + .all([ + dependQuery, + enumsQuery, + serialsQuery, + sequencesQuery, + policiesQuery, + rolesQuery, + privilegesQuery, + constraintsQuery, + columnsQuery, + ]); + + const groupedEnums = enumsList.reduce((acc, it) => { + if (!(it.oid in acc)) { + acc[it.oid] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.oid].values.push(it.value); + } + return acc; + }, {} as Record); + + const groupedArrEnums = enumsList.reduce((acc, it) => { + if (!(it.arrayTypeId in acc)) { + acc[it.arrayTypeId] = { + oid: it.oid, + schema: it.schema, + name: it.name, + values: [it.value], + }; + } else { + acc[it.arrayTypeId].values.push(it.value); + } + return acc; + }, {} as Record); + + for (const it of Object.values(groupedEnums)) { + enums.push({ + entityType: 'enums', + schema: it.schema, + name: it.name, + values: it.values, + }); + } + + let columnsCount = 0; + let indexesCount = 0; + let foreignKeysCount = 0; + let tableCount = 0; + let checksCount = 0; + let viewsCount = 0; + + for (const seq of sequencesList) { + const depend = dependList.find((it) => it.oid === seq.oid); + + if (depend && (depend.deptype === 'a' || depend.deptype === 'i')) { + // TODO: add type field to sequence in DDL + // skip fo sequences or identity columns + // console.log('skip for auto created', seq.name, depend.deptype); + continue; + } + + sequences.push({ + entityType: 'sequences', + schema: seq.schema, + name: seq.name, + startWith: parseIdentityProperty(seq.startWith), + minValue: parseIdentityProperty(seq.minValue), + maxValue: parseIdentityProperty(seq.maxValue), + incrementBy: parseIdentityProperty(seq.incrementBy), + cycle: seq.cycle, + cacheSize: Number(parseIdentityProperty(seq.cacheSize) ?? 1), + }); + } + + progressCallback('enums', Object.keys(groupedEnums).length, 'done'); + + for (const dbRole of rolesList) { + roles.push({ + entityType: 'roles', + name: dbRole.rolname, + superuser: dbRole.rolsuper, + inherit: dbRole.rolinherit, + createRole: dbRole.rolcreaterole, + createDb: dbRole.rolcreatedb, + canLogin: dbRole.rolcanlogin, + replication: dbRole.rolreplication, + connLimit: dbRole.rolconnlimit, + password: null, + validUntil: dbRole.rolvaliduntil, + bypassRls: dbRole.rolbypassrls, + }); + } + + for (const privilege of privilegesList) { + privileges.push({ + entityType: 'privileges', + // TODO: remove name and implement custom pk + name: `${privilege.grantor}_${privilege.grantee}_${privilege.schema}_${privilege.table}_${privilege.type}`, + grantor: privilege.grantor, + grantee: privilege.grantee, + schema: privilege.schema, + table: privilege.table, + type: privilege.type, + isGrantable: privilege.isGrantable, + }); + } + + for (const it of policiesList) { + policies.push({ + entityType: 'policies', + schema: it.schema, + table: it.table, + name: it.name, + as: it.as, + for: it.for, + roles: typeof it.to === 'string' ? it.to.slice(1, -1).split(',') : it.to, + using: it.using ?? null, + withCheck: it.withCheck ?? null, + }); + } + + progressCallback('policies', policiesList.length, 'done'); + + type DBColumn = (typeof columnsList)[number]; + + // supply serials + for (const column of columnsList.filter((x) => x.kind === 'r' || x.kind === 'p')) { + const type = column.type; + + if (!(type === 'smallint' || type === 'bigint' || type === 'integer')) { + continue; + } + + const expr = serialsList.find( + (it) => it.tableId === column.tableId && it.ordinality === column.ordinality, + ); + + if (expr) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + + const isSerial = isSerialExpression(expr.expression, table.schema); + column.type = isSerial ? type === 'bigint' ? 'bigserial' : type === 'integer' ? 'serial' : 'smallserial' : type; + } + } + + for (const column of columnsList.filter((x) => x.kind === 'r' || x.kind === 'p')) { + const table = tablesList.find((it) => it.oid === column.tableId)!; + + // supply enums + const enumType = column.typeId in groupedEnums + ? groupedEnums[column.typeId] + : column.typeId in groupedArrEnums + ? groupedArrEnums[column.typeId] + : null; + + let columnTypeMapped = enumType ? enumType.name : column.type.replaceAll('[]', ''); + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace(' with time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char') + .replace('geometry(Point', 'geometry(point'); + + columnTypeMapped = trimChar(columnTypeMapped, '"'); + + const columnDefault = defaultsList.find( + (it) => it.tableId === column.tableId && it.ordinality === column.ordinality, + ); + + const defaultValue = defaultForColumn( + columnTypeMapped, + columnDefault?.expression, + column.dimensions, + Boolean(enumType), + ); + + const unique = constraintsList.find((it) => { + return it.type === 'u' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const pk = constraintsList.find((it) => { + return it.type === 'p' && it.tableId === column.tableId && it.columnsOrdinals.length === 1 + && it.columnsOrdinals.includes(column.ordinality); + }) ?? null; + + const metadata = column.metadata; + if (column.generatedType === 's' && (!metadata || !metadata.expression)) { + throw new Error( + `Generated ${table.schema}.${table.name}.${column.name} columns missing expression: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + if (column.identityType !== '' && !metadata) { + throw new Error( + `Identity ${table.schema}.${table.name}.${column.name} columns missing metadata: \n${ + JSON.stringify(column.metadata) + }`, + ); + } + + const sequence = metadata?.seqId ? sequencesList.find((it) => it.oid === Number(metadata.seqId)) ?? null : null; + + columns.push({ + entityType: 'columns', + schema: table.schema, + table: table.name, + name: column.name, + type: columnTypeMapped, + typeSchema: enumType ? enumType.schema ?? 'public' : null, + dimensions: column.dimensions, + default: column.generatedType === 's' ? null : defaultValue, + unique: !!unique, + uniqueName: unique ? unique.name : null, + uniqueNullsNotDistinct: unique?.definition.includes('NULLS NOT DISTINCT') ?? false, + notNull: column.notNull, + pk: pk !== null, + pkName: pk !== null ? pk.name : null, + generated: column.generatedType === 's' ? { type: 'stored', as: metadata!.expression! } : null, + identity: column.identityType !== '' + ? { + type: column.identityType === 'a' ? 'always' : 'byDefault', + name: sequence?.name ?? '', + increment: parseIdentityProperty(metadata?.increment), + minValue: parseIdentityProperty(metadata?.min), + maxValue: parseIdentityProperty(metadata?.max), + startWith: parseIdentityProperty(metadata?.start), + cycle: metadata?.cycle === 'YES', + cache: Number(parseIdentityProperty(sequence?.cacheSize ?? 1)), + } + : null, + }); + } + + for (const unique of constraintsList.filter((it) => it.type === 'u')) { + const table = tablesList.find((it) => it.oid === unique.tableId)!; + const schema = namespaces.find((it) => it.oid === unique.schemaId)!; + + const columns = unique.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === unique.tableId && column.ordinality === it)!; + return column.name; + }); + + uniques.push({ + entityType: 'uniques', + schema: schema.name, + table: table.name, + name: unique.name, + nameExplicit: true, + columns, + nullsNotDistinct: unique.definition.includes('NULLS NOT DISTINCT'), + }); + } + + for (const pk of constraintsList.filter((it) => it.type === 'p')) { + const table = tablesList.find((it) => it.oid === pk.tableId)!; + const schema = namespaces.find((it) => it.oid === pk.schemaId)!; + + const columns = pk.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === pk.tableId && column.ordinality === it)!; + return column.name; + }); + + pks.push({ + entityType: 'pks', + schema: schema.name, + table: table.name, + name: pk.name, + columns, + nameExplicit: true, + }); + } + + for (const fk of constraintsList.filter((it) => it.type === 'f')) { + const table = tablesList.find((it) => it.oid === fk.tableId)!; + const schema = namespaces.find((it) => it.oid === fk.schemaId)!; + const tableTo = tablesList.find((it) => it.oid === fk.tableToId)!; + + const columns = fk.columnsOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableId && column.ordinality === it)!; + return column.name; + }); + + const columnsTo = fk.columnsToOrdinals.map((it) => { + const column = columnsList.find((column) => column.tableId === fk.tableToId && column.ordinality === it)!; + return column.name; + }); + + fks.push({ + entityType: 'fks', + schema: schema.name, + table: table.name, + name: fk.name, + nameExplicit: true, + columns, + tableTo: tableTo.name, + schemaTo: tableTo.schema, + columnsTo, + onUpdate: parseOnType(fk.onUpdate), + onDelete: parseOnType(fk.onDelete), + }); + } + + for (const check of constraintsList.filter((it) => it.type === 'c')) { + const table = tablesList.find((it) => it.oid === check.tableId)!; + const schema = namespaces.find((it) => it.oid === check.schemaId)!; + + checks.push({ + entityType: 'checks', + schema: schema.name, + table: table.name, + name: check.name, + value: check.definition, + }); + } + + const idxs = await db.query<{ + oid: number | string; + schema: string; + name: string; + accessMethod: string; + with?: string[]; + metadata: { + tableId: number; + expression: string | null; + where: string; + columnOrdinals: number[]; + opclasses: { oid: number | string; name: string; default: boolean }[]; + options: number[]; + isUnique: boolean; + isPrimary: boolean; + }; + }>(` + SELECT + pg_class.oid, + nspname as "schema", + relname AS "name", + am.amname AS "accessMethod", + reloptions AS "with", + pg_catalog.row_to_json(metadata.*) as "metadata" + FROM + pg_catalog.pg_class + JOIN pg_catalog.pg_am am ON am.oid OPERATOR(pg_catalog.=) pg_class.relam + JOIN pg_catalog.pg_namespace nsp ON nsp.oid OPERATOR(pg_catalog.=) pg_class.relnamespace + JOIN LATERAL ( + SELECT + pg_catalog.pg_get_expr(indexprs, indrelid) AS "expression", + pg_catalog.pg_get_expr(indpred, indrelid) AS "where", + indrelid::int AS "tableId", + indkey::int[] as "columnOrdinals", + indoption::int[] as "options", + indisunique as "isUnique", + indisprimary as "isPrimary", + array( + SELECT + pg_catalog.json_build_object( + 'oid', opclass.oid, + 'name', pg_am.amname, + 'default', pg_opclass.opcdefault + ) + FROM + pg_catalog.unnest(indclass) WITH ORDINALITY AS opclass(oid, ordinality) + JOIN pg_catalog.pg_opclass ON opclass.oid OPERATOR(pg_catalog.=) pg_opclass.oid + JOIN pg_catalog.pg_am ON pg_opclass.opcmethod OPERATOR(pg_catalog.=) pg_am.oid + ORDER BY opclass.ordinality + ) as "opclasses" + FROM + pg_catalog.pg_index + WHERE + pg_index.indexrelid OPERATOR(pg_catalog.=) pg_class.oid + ) metadata ON TRUE + WHERE + relkind OPERATOR(pg_catalog.=) 'i' + AND ${filterByTableIds ? `metadata."tableId" IN ${filterByTableIds}` : 'false'} + ORDER BY pg_catalog.lower(nspname), pg_catalog.lower(relname); + `).then((rows) => { + queryCallback('indexes', rows, null); + return rows; + }).catch((err) => { + queryCallback('indexes', [], err); + throw err; + }); + + for (const idx of idxs) { + const { metadata } = idx; + + // filter for drizzle only? + const forUnique = metadata.isUnique && constraintsList.some((x) => x.type === 'u' && x.indexId === idx.oid); + const forPK = metadata.isPrimary && constraintsList.some((x) => x.type === 'p' && x.indexId === idx.oid); + + const expr = splitExpressions(metadata.expression); + + const table = tablesList.find((it) => it.oid === idx.metadata.tableId)!; + + const nonColumnsCount = metadata.columnOrdinals.reduce((acc, it) => { + if (it === 0) acc += 1; + return acc; + }, 0); + + if (expr.length !== nonColumnsCount) { + throw new Error( + `expression split doesn't match non-columns count: [${ + metadata.columnOrdinals.join( + ', ', + ) + }] '${metadata.expression}':${expr.length}:${nonColumnsCount}`, + ); + } + + const opts = metadata.options.map((it) => { + return { + descending: (it & 1) === 1, + nullsFirst: (it & 2) === 2, + }; + }); + + const res = [] as ( + & ( + | { type: 'expression'; value: string } + | { type: 'column'; value: DBColumn } + ) + & { options: (typeof opts)[number]; opclass: { name: string; default: boolean } } + )[]; + + let k = 0; + for (let i = 0; i < metadata.columnOrdinals.length; i++) { + const ordinal = metadata.columnOrdinals[i]; + if (ordinal === 0) { + res.push({ + type: 'expression', + value: expr[k], + options: opts[i], + opclass: metadata.opclasses[i], + }); + k += 1; + } else { + const column = columnsList.find((column) => { + return column.tableId === metadata.tableId && column.ordinality === ordinal; + }); + if (!column) throw new Error(`missing column: ${metadata.tableId}:${ordinal}`); + + // ! options and opclass can be undefined when index have "INCLUDE" columns (columns from "INCLUDE" don't have options and opclass) + const options = opts[i] as typeof opts[number] | undefined; + const opclass = metadata.opclasses[i] as { name: string; default: boolean } | undefined; + if (options && opclass) { + res.push({ + type: 'column', + value: column, + options: opts[i], + opclass: metadata.opclasses[i], + }); + } + } + } + + const columns = res.map((it) => { + return { + asc: !it.options.descending, + nullsFirst: it.options.nullsFirst, + opclass: it.opclass.default ? null : { + name: it.opclass.name, + default: it.opclass.default, + }, + isExpression: it.type === 'expression', + value: it.type === 'expression' ? it.value : it.value.name, // column name + } satisfies Index['columns'][number]; + }); + + indexes.push({ + entityType: 'indexes', + schema: idx.schema, + table: table.name, + name: idx.name, + nameExplicit: true, + method: idx.accessMethod, + isUnique: metadata.isUnique, + with: idx.with?.join(', ') ?? '', + where: idx.metadata.where, + columns: columns, + concurrently: false, + forUnique, + forPK, + }); + } + + progressCallback('columns', columnsCount, 'fetching'); + progressCallback('checks', checksCount, 'fetching'); + progressCallback('indexes', indexesCount, 'fetching'); + progressCallback('tables', tableCount, 'done'); + + for (const it of columnsList.filter((x) => x.kind === 'm' || x.kind === 'v')) { + const view = viewsList.find((x) => x.oid === it.tableId)!; + + const typeDimensions = it.type.split('[]').length - 1; + const enumType = it.typeId in groupedEnums + ? groupedEnums[it.typeId] + : it.typeId in groupedArrEnums + ? groupedArrEnums[it.typeId] + : null; + + let columnTypeMapped = enumType ? enumType.name : it.type.replace('[]', ''); + columnTypeMapped = trimChar(columnTypeMapped, '"'); + if (columnTypeMapped.startsWith('numeric(')) { + columnTypeMapped = columnTypeMapped.replace(',', ', '); + } + + columnTypeMapped = columnTypeMapped + .replace('character varying', 'varchar') + .replace(' without time zone', '') + // .replace("timestamp without time zone", "timestamp") + .replace('character', 'char') + .replace('geometry(Point)', 'geometry(point)'); + + columnTypeMapped += '[]'.repeat(it.dimensions); + + viewColumns.push({ + schema: view.schema, + view: view.name, + name: it.name, + type: columnTypeMapped, + typeDimensions, + notNull: it.notNull, + dimensions: it.dimensions, + typeSchema: enumType ? enumType.schema : null, + }); + } + + for (const view of viewsList) { + tableCount += 1; + + const accessMethod = view.accessMethod === 0 ? null : ams.find((it) => it.oid === view.accessMethod); + const tablespace = view.tablespaceid === 0 ? null : tablespaces.find((it) => it.oid === view.tablespaceid)!.name; + + const definition = parseViewDefinition(view.definition); + const withOpts = wrapRecord( + view.options?.reduce((acc, it) => { + const opt = it.split('='); + if (opt.length !== 2) { + throw new Error(`Unexpected view option: ${it}`); + } + + const key = camelcase(opt[0].trim()); + const value = opt[1].trim(); + acc[key] = value; + return acc; + }, {} as Record) ?? {}, + ); + + const opts = { + checkOption: withOpts.literal('checkOption', ['local', 'cascaded']), + securityBarrier: withOpts.bool('securityBarrier'), + securityInvoker: withOpts.bool('securityInvoker'), + fillfactor: withOpts.num('fillfactor'), + toastTupleTarget: withOpts.num('toastTupleTarget'), + parallelWorkers: withOpts.num('parallelWorkers'), + autovacuumEnabled: withOpts.bool('autovacuumEnabled'), + vacuumIndexCleanup: withOpts.literal('vacuumIndexCleanup', ['auto', 'on', 'off']), + vacuumTruncate: withOpts.bool('vacuumTruncate'), + autovacuumVacuumThreshold: withOpts.num('autovacuumVacuumThreshold'), + autovacuumVacuumScaleFactor: withOpts.num('autovacuumVacuumScaleFactor'), + autovacuumVacuumCostDelay: withOpts.num('autovacuumVacuumCostDelay'), + autovacuumVacuumCostLimit: withOpts.num('autovacuumVacuumCostLimit'), + autovacuumFreezeMinAge: withOpts.num('autovacuumFreezeMinAge'), + autovacuumFreezeMaxAge: withOpts.num('autovacuumFreezeMaxAge'), + autovacuumFreezeTableAge: withOpts.num('autovacuumFreezeTableAge'), + autovacuumMultixactFreezeMinAge: withOpts.num('autovacuumMultixactFreezeMinAge'), + autovacuumMultixactFreezeMaxAge: withOpts.num('autovacuumMultixactFreezeMaxAge'), + autovacuumMultixactFreezeTableAge: withOpts.num('autovacuumMultixactFreezeTableAge'), + logAutovacuumMinDuration: withOpts.num('logAutovacuumMinDuration'), + userCatalogTable: withOpts.bool('userCatalogTable'), + }; + + const hasNonNullOpt = Object.values(opts).some((x) => x !== null); + views.push({ + entityType: 'views', + schema: view.schema, + name: view.name, + definition, + with: hasNonNullOpt ? opts : null, + materialized: view.kind === 'm', + tablespace, + using: accessMethod?.name ?? null, + withNoData: null, + }); + } + + // TODO: update counts! + progressCallback('columns', columnsCount, 'done'); + progressCallback('indexes', indexesCount, 'done'); + progressCallback('fks', foreignKeysCount, 'done'); + progressCallback('checks', checksCount, 'done'); + progressCallback('views', viewsCount, 'done'); + + const resultSchemas = schemas.filter((x) => filter({ type: 'schema', name: x.name })); + const resultTables = tables.filter((x) => filter({ type: 'table', schema: x.schema, name: x.name })); + const resultEnums = enums.filter((x) => resultSchemas.some((s) => s.name === x.schema)); + const resultColumns = columns.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultIndexes = indexes.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultPKs = pks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultFKs = fks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultUniques = uniques.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultChecks = checks.filter((x) => resultTables.some((t) => t.schema === x.schema && t.name === x.table)); + const resultSequences = sequences.filter((x) => resultSchemas.some((t) => t.name === x.schema)); + // TODO: drizzle link + const resultRoles = roles.filter((x) => filter({ type: 'role', name: x.name })); + const resultViews = views.filter((x) => filter({ type: 'table', schema: x.schema, name: x.name })); + const resultViewColumns = viewColumns.filter((x) => + resultViews.some((v) => v.schema === x.schema && v.name === x.view) + ); + + return { + schemas: resultSchemas, + tables: resultTables, + enums: resultEnums, + columns: resultColumns, + indexes: resultIndexes, + pks: resultPKs, + fks: resultFKs, + uniques: resultUniques, + checks: resultChecks, + sequences: resultSequences, + roles: resultRoles, + privileges, + policies, + views: resultViews, + viewColumns: resultViewColumns, + } satisfies InterimSchema; +}; + +export const fromDatabaseForDrizzle = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +) => { + const res = await fromDatabase(db, filter, progressCallback); + res.schemas = res.schemas.filter((it) => it.name !== 'public'); + res.indexes = res.indexes.filter((it) => !it.forPK && !it.forUnique); + res.privileges = []; + + return res; +}; diff --git a/drizzle-kit/src/dialects/postgres/serializer.ts b/drizzle-kit/src/dialects/postgres/serializer.ts new file mode 100644 index 0000000000..fe5ee11c11 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/serializer.ts @@ -0,0 +1,78 @@ +import type { CasingType } from '../../cli/validations/common'; +import { postgresSchemaError, postgresSchemaWarning } from '../../cli/views'; +import { prepareFilenames } from '../../utils/utils-node'; +import type { PostgresDDL } from './ddl'; +import { createDDL, interimToDDL } from './ddl'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; +import type { PostgresSnapshot } from './snapshot'; +import { drySnapshot, snapshotValidator } from './snapshot'; + +export const prepareSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: PostgresDDL; + ddlCur: PostgresDDL; + snapshot: PostgresSnapshot; + snapshotPrev: PostgresSnapshot; + custom: PostgresSnapshot; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + + const res = await prepareFromSchemaFiles(filenames); + + // TODO: do we wan't to export everything or ignore .existing and respect entity filters in config + const { schema, errors, warnings } = fromDrizzleSchema(res, casing, () => true); + + if (warnings.length > 0) { + console.log(warnings.map((it) => postgresSchemaWarning(it)).join('\n\n')); + } + + if (errors.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const { ddl: ddlCur, errors: errors2 } = interimToDDL(schema); + + if (errors2.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '8', + dialect: 'postgres', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies PostgresSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: PostgresSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom }; +}; diff --git a/drizzle-kit/src/dialects/postgres/snapshot.ts b/drizzle-kit/src/dialects/postgres/snapshot.ts new file mode 100644 index 0000000000..c163d17239 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/snapshot.ts @@ -0,0 +1,563 @@ +import { randomUUID } from 'crypto'; +import type { TypeOf } from 'zod'; +import { any, array as zodArray, boolean, enum as enumType, literal, number, object, record, string } from 'zod'; +import { originUUID } from '../../utils'; +import { array, validator } from '../simpleValidator'; +import type { PostgresDDL, PostgresEntity } from './ddl'; +import { createDDL } from './ddl'; + +const indexV2 = object({ + name: string(), + columns: record( + string(), + object({ + name: string(), + }), + ), + isUnique: boolean(), +}).strict(); + +const columnV2 = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + references: string().optional(), +}).strict(); + +const tableV2 = object({ + name: string(), + columns: record(string(), columnV2), + indexes: record(string(), indexV2), +}).strict(); + +const enumSchemaV1 = object({ + name: string(), + values: record(string(), string()), +}).strict(); + +const enumSchema = object({ + name: string(), + schema: string(), + values: string().array(), +}).strict(); + +export const pgSchemaV2 = object({ + version: literal('2'), + tables: record(string(), tableV2), + enums: record(string(), enumSchemaV1), +}).strict(); + +// ------- V1 -------- +const references = object({ + foreignKeyName: string(), + table: string(), + column: string(), + onDelete: string().optional(), + onUpdate: string().optional(), +}).strict(); + +const columnV1 = object({ + name: string(), + type: string(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + references: references.optional(), +}).strict(); + +const tableV1 = object({ + name: string(), + columns: record(string(), columnV1), + indexes: record(string(), indexV2), +}).strict(); + +export const pgSchemaV1 = object({ + version: literal('1'), + tables: record(string(), tableV1), + enums: record(string(), enumSchemaV1), +}).strict(); + +const indexColumn = object({ + expression: string(), + isExpression: boolean(), + asc: boolean(), + nulls: string().optional(), + opclass: string().optional(), +}); + +export type IndexColumnType = TypeOf; + +const index = object({ + name: string(), + columns: indexColumn.array(), + isUnique: boolean(), + with: record(string(), any()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV4 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV5 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const indexV6 = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + with: record(string(), string()).optional(), + method: string().default('btree'), + where: string().optional(), + concurrently: boolean().default(false), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + schemaTo: string().optional(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +export const sequenceSchema = object({ + name: string(), + increment: string().optional(), + minValue: string().optional(), + maxValue: string().optional(), + startWith: string().optional(), + cache: string().optional(), + cycle: boolean().optional(), + schema: string(), +}).strict(); + +export const identitySchema = sequenceSchema.omit({ schema: true }).merge( + object({ type: enumType(['always', 'byDefault']) }), +); + +export const roleSchema = object({ + name: string(), + createDb: boolean().optional(), + createRole: boolean().optional(), + inherit: boolean().optional(), +}).strict(); + +export const sequenceSquashed = object({ + name: string(), + schema: string(), + values: string(), +}).strict(); + +const columnV7 = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: identitySchema.optional(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const columnSquashed = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean(), + notNull: boolean(), + default: any().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + type: literal('stored'), + as: string(), + }).optional(), + identity: string().optional(), +}).strict(); + +const tableV3 = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), + nullsNotDistinct: boolean(), +}).strict(); + +export const policy = object({ + name: string(), + as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), + for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), + to: string().array().optional(), + using: string().optional(), + withCheck: string().optional(), + on: string().optional(), + schema: string().optional(), +}).strict(); + +export const policySquashed = object({ + name: string(), + values: string(), +}).strict(); + +const viewWithOption = object({ + checkOption: enumType(['local', 'cascaded']).optional(), + securityBarrier: boolean().optional(), + securityInvoker: boolean().optional(), +}).strict(); + +const matViewWithOption = object({ + fillfactor: number().optional(), + toastTupleTarget: number().optional(), + parallelWorkers: number().optional(), + autovacuumEnabled: boolean().optional(), + vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), + vacuumTruncate: boolean().optional(), + autovacuumVacuumThreshold: number().optional(), + autovacuumVacuumScaleFactor: number().optional(), + autovacuumVacuumCostDelay: number().optional(), + autovacuumVacuumCostLimit: number().optional(), + autovacuumFreezeMinAge: number().optional(), + autovacuumFreezeMaxAge: number().optional(), + autovacuumFreezeTableAge: number().optional(), + autovacuumMultixactFreezeMinAge: number().optional(), + autovacuumMultixactFreezeMaxAge: number().optional(), + autovacuumMultixactFreezeTableAge: number().optional(), + logAutovacuumMinDuration: number().optional(), + userCatalogTable: boolean().optional(), +}).strict(); + +export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); + +export const view = object({ + name: string(), + schema: string(), + columns: record(string(), column), + definition: string().optional(), + materialized: boolean(), + with: mergedViewWithOption.optional(), + isExisting: boolean(), + withNoData: boolean().optional(), + using: string().optional(), + tablespace: string().optional(), +}).strict(); + +const tableV4 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV4), + foreignKeys: record(string(), fk), +}).strict(); + +const tableV5 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV5), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const tableV6 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), indexV6), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const tableV7 = object({ + name: string(), + schema: string(), + columns: record(string(), columnV7), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +const table = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + policies: record(string(), policy).default({}), + checkConstraints: record(string(), checkConstraint).default({}), + isRLSEnabled: boolean().default(false).optional(), +}).strict(); + +const schemaHash = object({ + id: string(), + prevIds: zodArray(string()), +}); + +const schemaHashV7 = object({ + id: string(), + prevId: string(), +}); + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ + isArray: boolean().optional(), + dimensions: number().optional(), + rawType: string().optional(), + isDefaultAnExpression: boolean().optional(), + }).optional(), + ), + }).optional(), + ), +}).optional(); + +export const pgSchemaInternalV3 = object({ + version: literal('3'), + dialect: literal('pg'), + tables: record(string(), tableV3), + enums: record(string(), enumSchemaV1), +}).strict(); + +export const pgSchemaInternalV4 = object({ + version: literal('4'), + dialect: literal('pg'), + tables: record(string(), tableV4), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), +}).strict(); + +// "table" -> "schema.table" for schema proper support +export const pgSchemaInternalV5 = object({ + version: literal('5'), + dialect: literal('pg'), + tables: record(string(), tableV5), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaInternalV6 = object({ + version: literal('6'), + dialect: literal('postgresql'), + tables: record(string(), tableV6), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaExternal = object({ + version: literal('5'), + dialect: literal('pg'), + tables: zodArray(table), + enums: zodArray(enumSchemaV1), + schemas: zodArray(object({ name: string() })), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), +}).strict(); + +export const pgSchemaInternalV7 = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), tableV7), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + sequences: record(string(), sequenceSchema), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const pgSchemaInternal = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), table), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + views: record(string(), view).default({}), + sequences: record(string(), sequenceSchema).default({}), + roles: record(string(), roleSchema).default({}), + policies: record(string(), policy).default({}), + _meta: object({ + schemas: record(string(), string()), + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +const tableSquashed = object({ + name: string(), + schema: string(), + columns: record(string(), columnSquashed), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()), + policies: record(string(), string()), + checkConstraints: record(string(), string()), + isRLSEnabled: boolean().default(false), +}).strict(); + +const tableSquashedV4 = object({ + name: string(), + schema: string(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), +}).strict(); + +export const pgSchemaSquashedV4 = object({ + version: literal('4'), + dialect: literal('pg'), + tables: record(string(), tableSquashedV4), + enums: record(string(), enumSchemaV1), + schemas: record(string(), string()), +}).strict(); + +export const pgSchemaSquashedV6 = object({ + version: literal('6'), + dialect: literal('postgresql'), + tables: record(string(), tableSquashed), + enums: record(string(), enumSchema), + schemas: record(string(), string()), +}).strict(); + +export const pgSchemaSquashed = object({ + version: literal('7'), + dialect: literal('postgresql'), + tables: record(string(), tableSquashed), + enums: record(string(), enumSchema), + schemas: record(string(), string()), + views: record(string(), view), + sequences: record(string(), sequenceSquashed), + roles: record(string(), roleSchema).default({}), + policies: record(string(), policySquashed).default({}), +}).strict(); + +export const pgSchemaV3 = pgSchemaInternalV3.merge(schemaHash); +export const pgSchemaV4 = pgSchemaInternalV4.merge(schemaHash); +export const pgSchemaV5 = pgSchemaInternalV5.merge(schemaHash); +export const pgSchemaV6 = pgSchemaInternalV6.merge(schemaHash); +export const pgSchemaV7 = pgSchemaInternal.merge(schemaHashV7); +export const pgSchema = pgSchemaInternal.merge(schemaHash); + +export type PgSchemaV1 = TypeOf; +export type PgSchemaV2 = TypeOf; +export type PgSchemaV3 = TypeOf; +export type PgSchemaV4 = TypeOf; +export type PgSchemaV5 = TypeOf; +export type PgSchemaV6 = TypeOf; +export type PgSchemaV7 = TypeOf; +export type PgSchema = TypeOf; + +export type Index = TypeOf; +export type TableV5 = TypeOf; +export type Column = TypeOf; + +export const toJsonSnapshot = (ddl: PostgresDDL, prevIds: string[], renames: string[]): PostgresSnapshot => { + return { dialect: 'postgres', id: randomUUID(), prevIds, version: '8', ddl: ddl.entities.list(), renames }; +}; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['8'], + dialect: ['postgres'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => { + const res = ddl.entities.validate(it); + if (!res) { + console.log(it); + } + return res; + }), + renames: array((_) => true), +}); + +export type PostgresSnapshot = typeof snapshotValidator.shape; + +export const drySnapshot = snapshotValidator.strict( + { + version: '8', + dialect: 'postgres', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], + } satisfies PostgresSnapshot, +); diff --git a/drizzle-kit/src/dialects/postgres/statements.ts b/drizzle-kit/src/dialects/postgres/statements.ts new file mode 100644 index 0000000000..14e1254be5 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/statements.ts @@ -0,0 +1,472 @@ +import type { Simplify } from '../../utils'; +import type { + CheckConstraint, + Column, + DiffEntities, + Enum, + ForeignKey, + Index, + Policy, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + Table, + UniqueConstraint, + View, +} from './ddl'; + +export interface JsonCreateTable { + type: 'create_table'; + table: Table; +} + +export interface JsonDropTable { + type: 'drop_table'; + table: Table; + key: string; +} + +export interface JsonRenameTable { + type: 'rename_table'; + schema: string; + from: string; + to: string; +} + +export interface JsonCreateEnum { + type: 'create_enum'; + enum: Enum; +} + +export interface JsonDropEnum { + type: 'drop_enum'; + enum: Enum; +} + +export interface JsonMoveEnum { + type: 'move_enum'; + from: { name: string; schema: string | null }; + to: { name: string; schema: string | null }; +} + +export interface JsonRenameEnum { + type: 'rename_enum'; + schema: string; + from: string; + to: string; +} + +export interface JsonRecreateEnum { + type: 'recreate_enum'; + to: Enum; + columns: Column[]; + from: Enum; +} + +export interface JsonAlterEnum { + type: 'alter_enum'; + to: Enum; + from: Enum; + diff: { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; + }[]; +} + +export interface JsonCreateRole { + type: 'create_role'; + role: Role; +} + +export interface JsonDropRole { + type: 'drop_role'; + role: Role; +} +export interface JsonRenameRole { + type: 'rename_role'; + from: Role; + to: Role; +} + +export interface JsonAlterRole { + type: 'alter_role'; + diff: DiffEntities['roles']; + role: Role; +} + +export interface JsonGrantPrivilege { + type: 'grant_privilege'; + privilege: Privilege; +} + +export interface JsonRevokePrivilege { + type: 'revoke_privilege'; + privilege: Privilege; +} + +export interface JsonRegrantPrivilege { + type: 'regrant_privilege'; + privilege: Privilege; + diff: DiffEntities['privileges']; +} + +export interface JsonDropValueFromEnum { + type: 'alter_type_drop_value'; + deletedValues: string[]; + enum: Enum; + columns: Column[]; +} + +export interface JsonCreateSequence { + type: 'create_sequence'; + sequence: Sequence; +} + +export interface JsonDropSequence { + type: 'drop_sequence'; + sequence: Sequence; +} + +export interface JsonMoveSequence { + type: 'move_sequence'; + from: { name: string; schema: string | null }; + to: { name: string; schema: string | null }; +} + +export interface JsonRenameSequence { + type: 'rename_sequence'; + from: Sequence; + to: Sequence; +} + +export interface JsonAlterSequence { + type: 'alter_sequence'; + diff: DiffEntities['sequences']; + sequence: Sequence; +} + +export interface JsonDropColumn { + type: 'drop_column'; + column: Column; +} + +export interface JsonAddColumn { + type: 'add_column'; + column: Column; + isPK: boolean; + isCompositePK: boolean; +} + +export interface JsonCreatePolicy { + type: 'create_policy'; + policy: Policy; +} + +export interface JsonDropPolicy { + type: 'drop_policy'; + policy: Policy; +} + +export interface JsonRenamePolicy { + type: 'rename_policy'; + from: Policy; + to: Policy; +} + +export interface JsonAlterRLS { + type: 'alter_rls'; + schema: string; + name: string; + isRlsEnabled: boolean; +} + +export interface JsonAlterPolicy { + type: 'alter_policy'; + diff: DiffEntities['policies']; + policy: Policy; +} +export interface JsonRecreatePolicy { + type: 'recreate_policy'; + policy: Policy; + diff: DiffEntities['policies']; +} + +export interface JsonCreateIndex { + type: 'create_index'; + index: Index; +} + +export interface JsonRecreateIndex { + type: 'recreate_index'; + index: Index; + diff: DiffEntities['indexes']; +} + +export interface JsonCreateFK { + type: 'create_fk'; + fk: ForeignKey; +} + +export interface JsonDropFK { + type: 'drop_fk'; + fk: ForeignKey; +} + +export interface JsonRecreateFK { + type: 'recreate_fk'; + fk: ForeignKey; + diff: DiffEntities['fks']; +} + +export interface JsonCreateUnique { + type: 'add_unique'; + unique: UniqueConstraint; +} + +export interface JsonDeleteUnique { + type: 'drop_unique'; + unique: UniqueConstraint; +} + +export interface JsonAlterUnique { + type: 'alter_unique'; + diff: DiffEntities['uniques']; +} + +export interface JsonAddCheck { + type: 'add_check'; + check: CheckConstraint; +} + +export interface JsonDropCheck { + type: 'drop_check'; + check: CheckConstraint; +} + +export interface JsonAlterCheck { + type: 'alter_check'; + diff: DiffEntities['checks']; +} + +export interface JsonAddPrimaryKey { + type: 'add_pk'; + pk: PrimaryKey; +} + +export interface JsonDropPrimaryKey { + type: 'drop_pk'; + pk: PrimaryKey; +} + +export interface JsonRenameConstraint { + type: 'rename_constraint'; + schema: string; + table: string; + from: string; + to: string; +} + +export interface JsonAlterPrimaryKey { + type: 'alter_pk'; + pk: PrimaryKey; + diff: DiffEntities['pks']; +} + +export interface JsonMoveTable { + type: 'move_table'; + name: string; + from: string; + to: string; +} + +export interface JsonAlterTableRemoveFromSchema { + type: 'remove_from_schema'; + table: string; + schema: string; +} + +export interface JsonAlterTableSetNewSchema { + type: 'set_new_schema'; + table: string; + from: string; + to: string; +} + +export interface JsonDropIndex { + type: 'drop_index'; + index: Index; +} + +export interface JsonRenameIndex { + type: 'rename_index'; + schema: string; + from: string; + to: string; +} + +export interface JsonRenameColumn { + type: 'rename_column'; + from: Column; + to: Column; +} + +export interface JsonAlterColumn { + type: 'alter_column'; + to: Column; + wasEnum: boolean; + isEnum: boolean; + diff: DiffEntities['columns']; +} + +export interface JsonRecreateColumn { + type: 'recreate_column'; + diff: DiffEntities['columns']; + isPK: boolean; +} + +export interface JsonAlterColumnSetPrimaryKey { + type: 'alter_column_set_pk'; + table: string; + schema: string; + column: string; +} + +export interface JsonAlterColumnChangeGenerated { + type: 'alter_column_change_generated'; + column: Column; +} +export interface JsonAlterColumnChangeIdentity { + type: 'alter_column_change_identity'; + column: Column; +} + +export interface JsonCreateSchema { + type: 'create_schema'; + name: string; +} + +export interface JsonDropSchema { + type: 'drop_schema'; + name: string; +} + +export interface JsonRenameSchema { + type: 'rename_schema'; + from: Schema; + to: Schema; +} + +export interface JsonCreateView { + type: 'create_view'; + view: View; +} + +export interface JsonDropView { + type: 'drop_view'; + view: View; +} + +export interface JsonRenameView { + type: 'rename_view'; + from: View; + to: View; +} + +export interface JsonMoveView { + type: 'move_view'; + fromSchema: string; + toSchema: string; + view: View; +} + +export interface JsonAlterView { + type: 'alter_view'; + diff: DiffEntities['views']; + view: View; +} + +export interface JsonRecreateView { + type: 'recreate_view'; + from: View; + to: View; +} + +export type JsonStatement = + | JsonCreateTable + | JsonDropTable + | JsonRenameTable + | JsonRenameColumn + | JsonAlterColumn + | JsonRecreateColumn + | JsonMoveView + | JsonAlterView + | JsonRecreateView + | JsonCreateEnum + | JsonDropEnum + | JsonMoveEnum + | JsonRenameEnum + | JsonRecreateEnum + | JsonAlterEnum + | JsonDropColumn + | JsonAddColumn + | JsonCreateIndex + | JsonDropIndex + | JsonRenameIndex + | JsonAddPrimaryKey + | JsonDropPrimaryKey + | JsonRenameConstraint + | JsonAlterPrimaryKey + | JsonCreateFK + | JsonDropFK + | JsonRecreateFK + | JsonCreateUnique + | JsonDeleteUnique + | JsonAlterUnique + | JsonDropCheck + | JsonAddCheck + | JsonCreateSchema + | JsonDropSchema + | JsonRenameSchema + | JsonMoveTable + | JsonAlterTableRemoveFromSchema + | JsonAlterTableSetNewSchema + | JsonAlterSequence + | JsonDropSequence + | JsonCreateSequence + | JsonMoveSequence + | JsonRenameSequence + | JsonDropPolicy + | JsonCreatePolicy + | JsonAlterPolicy + | JsonRecreatePolicy + | JsonRenamePolicy + | JsonAlterRLS + | JsonRenameRole + | JsonCreateRole + | JsonDropRole + | JsonAlterRole + | JsonGrantPrivilege + | JsonRevokePrivilege + | JsonRegrantPrivilege + | JsonCreateView + | JsonDropView + | JsonRenameView + | JsonAlterCheck + | JsonDropValueFromEnum + | JsonRecreateIndex; + +export const prepareStatement = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + args: Omit, +): Simplify => { + return { + type, + ...args, + } as TStatement; +}; diff --git a/drizzle-kit/src/dialects/postgres/typescript.ts b/drizzle-kit/src/dialects/postgres/typescript.ts new file mode 100644 index 0000000000..d1909b3054 --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/typescript.ts @@ -0,0 +1,1042 @@ +import { getTableName, is } from 'drizzle-orm'; +import type { Relation, Relations } from 'drizzle-orm/_relations'; +import { createTableRelationsHelpers, extractTablesRelationalConfig, Many, One } from 'drizzle-orm/_relations'; +import type { AnyPgTable } from 'drizzle-orm/pg-core'; +import '../../@types/utils'; +import { toCamelCase } from 'drizzle-orm/casing'; +import type { Casing } from '../../cli/validations/common'; +import { assertUnreachable, trimChar } from '../../utils'; +import { escapeForTsLiteral, inspect } from '../utils'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + Policy, + PostgresDDL, + PrimaryKey, + UniqueConstraint, + ViewColumn, +} from './ddl'; +import { tableFromDDL } from './ddl'; +import { defaultNameForIdentitySequence, defaults, typeFor } from './grammar'; + +// TODO: omit defaults opclass... improvement +const imports = [ + 'smallint', + 'integer', + 'bigint', + 'numeric', + 'decimal', + 'real', + 'doublePrecision', + 'boolean', + 'char', + 'varchar', + 'text', + 'json', + 'jsonb', + + 'serial', + 'smallserial', + 'bigserial', + + 'time', + 'timestamp', + 'date', + 'interval', + 'cidr', + 'inet', + 'macaddr', + 'macaddr8', + 'bigint', + 'uuid', + 'vector', + 'halfvec', + 'sparsevec', + 'point', + 'line', + 'geometry', + 'bit', + 'pgEnum', + 'gelEnum', + 'customType', +] as const; +export type Import = typeof imports[number]; + +const pgImportsList = new Set([ + 'pgTable', + 'gelTable', + ...imports, +]); + +const objToStatement2 = (json: { [s: string]: unknown }) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(toCamelCase(value)); + } + + assertUnreachable(casing); +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +export const relationsToTypeScriptForStudio = ( + schema: Record>>, + relations: Record>>>, +) => { + const relationalSchema: Record = { + ...Object.fromEntries( + Object.entries(schema) + .map(([key, val]) => { + // have unique keys across schemas + const mappedTableEntries = Object.entries(val).map((tableEntry) => { + return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; + }); + + return mappedTableEntries; + }) + .flat(), + ), + ...relations, + }; + + const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); + + let result = ''; + + function findColumnKey(table: AnyPgTable, columnName: string) { + for (const tableEntry of Object.entries(table)) { + const key = tableEntry[0]; + const value = tableEntry[1]; + + if (value.name === columnName) { + return key; + } + } + } + + Object.values(relationsConfig.tables).forEach((table) => { + const tableName = table.tsName.split('.')[1]; + const relations = table.relations; + let hasRelations = false; + let relationsObjAsStr = ''; + let hasOne = false; + let hasMany = false; + + Object.values(relations).forEach((relation) => { + hasRelations = true; + + if (is(relation, Many)) { + hasMany = true; + relationsObjAsStr += `\t\t${relation.fieldName}: many(${ + relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] + }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; + } + + if (is(relation, One)) { + hasOne = true; + relationsObjAsStr += `\t\t${relation.fieldName}: one(${ + relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] + }, { fields: [${ + relation.config?.fields.map( + (c) => + `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ + findColumnKey(relation.sourceTable, c.name) + }`, + ) + }], references: [${ + relation.config?.references.map( + (c) => + `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ + findColumnKey(relation.referencedTable, c.name) + }`, + ) + }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; + } + }); + + if (hasRelations) { + result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ + hasOne && hasMany ? ', ' : '' + }${hasMany ? 'many' : ''}}) => ({ + ${relationsObjAsStr} + }));\n`; + } + }); + + return result; +}; + +function generateIdentityParams(column: Column) { + if (column.identity === null) return ''; + const identity = column.identity; + + const tuples = []; + if (identity.name !== defaultNameForIdentitySequence(column.table, column.name)) { + tuples.push(['name', `"${identity.name}"`]); + } + + if (identity.startWith && defaults.identity.startWith !== identity.startWith) { + tuples.push(['startWith', identity.startWith]); + } + if (identity.increment && defaults.identity.increment !== identity.increment) { + tuples.push(['increment', identity.increment]); + } + if (identity.minValue && defaults.identity.min !== identity.minValue) tuples.push(['minValue', identity.minValue]); + if (identity.maxValue && defaults.identity.maxFor(column.type) !== identity.maxValue) { + tuples.push(['maxValue', identity.maxValue]); + } + if (identity.cache && defaults.identity.cache !== identity.cache) tuples.push(['cache', identity.cache]); + if (identity.cycle) tuples.push(['cycle', identity.cycle]); + + const params = tuples.length > 0 ? `{ ${tuples.map((x) => `${x[0]}: ${x[1]}`).join(' ,')} }` : ''; + + if (identity?.type === 'always') { + return `.generatedAlwaysAsIdentity(${params})`; + } + return `.generatedByDefaultAsIdentity(${params})`; +} + +export const paramNameFor = (name: string, schema?: string | null) => { + const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; + return `${name}${schemaSuffix}`; +}; + +// prev: schemaToTypeScript +export const ddlToTypeScript = ( + ddl: PostgresDDL, + columnsForViews: ViewColumn[], + casing: Casing, + mode: 'pg' | 'gel', +) => { + const tableFn = `${mode}Table`; + for (const fk of ddl.fks.list()) { + relations.add(`${fk.table}-${fk.tableTo}`); + } + + const schemas = Object.fromEntries( + ddl.schemas.list().filter((it) => it.name !== 'public').map((it) => { + return [it.name, withCasing(it.name, casing)]; + }), + ); + + const enumTypes = new Set(ddl.enums.list().map((x) => `${x.schema}.${x.name}`)); + + const imports = new Set(); + const vcs = columnsForViews.map((it) => ({ entityType: 'viewColumns' as const, ...it })); + const entities = [...ddl.entities.list(), ...vcs]; + for (const x of entities) { + if (x.entityType === 'schemas' && x.name !== 'public') imports.add('pgSchema'); + if (x.entityType === 'enums' && x.schema === 'public') imports.add('pgEnum'); + if (x.entityType === 'tables') imports.add(tableFn); + + if (x.entityType === 'indexes') { + if (x.isUnique) imports.add('uniqueIndex'); + else imports.add('index'); + } + + if (x.entityType === 'fks') { + imports.add('foreignKey'); + + if (isCyclic(x) && !isSelf(x)) imports.add('type AnyPgColumn'); + } + if (x.entityType === 'pks') imports.add('primaryKey'); + if (x.entityType === 'uniques') imports.add('unique'); + if (x.entityType === 'checks') imports.add('check'); + if (x.entityType === 'views' && x.schema === 'public') { + if (x.materialized) imports.add('pgMaterializedView'); + else imports.add('pgView'); + } + + if (x.entityType === 'columns' || x.entityType === 'viewColumns') { + let patched = x.type.replace('[]', ''); + const isEnum = Boolean(x.typeSchema); + const grammarType = typeFor(patched, isEnum); + imports.add(grammarType.drizzleImport()); + if (pgImportsList.has(patched)) imports.add(patched); + } + + if (x.entityType === 'sequences' && x.schema === 'public') imports.add('pgSequence'); + if (x.entityType === 'enums' && x.schema === 'public') imports.add('pgEnum'); + if (x.entityType === 'policies') imports.add('pgPolicy'); + if (x.entityType === 'roles') imports.add('pgRole'); + } + + const enumStatements = ddl.enums.list().map((it) => { + const enumSchema = schemas[it.schema]; + // const func = schema || schema === "public" ? "pgTable" : schema; + const paramName = paramNameFor(it.name, enumSchema); + + const func = enumSchema ? `${enumSchema}.enum` : 'pgEnum'; + + const values = Object.values(it.values) + .map((it) => escapeForTsLiteral(it)) + .join(', '); + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; + }) + .join('') + .concat('\n'); + + const sequencesStatements = ddl.sequences.list().map((it) => { + const seqSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, seqSchema); + + const func = seqSchema ? `${seqSchema}.sequence` : 'pgSequence'; + + let params = ''; + if (it.startWith) params += `startWith: "${it.startWith}", `; + if (it.incrementBy) params += `increment: "${it.incrementBy}", `; + if (it.minValue) params += `minValue: "${it.minValue}", `; + if (it.maxValue) params += `maxValue: "${it.maxValue}", `; + if (it.cacheSize) params += `cache: "${it.cacheSize}", `; + + if (it.cycle) params += `cycle: true, `; + else params += `cycle: false, `; + + params = params ? `, { ${trimChar(params.trim(), ',')} }` : ''; + + return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${params})\n`; + }) + .join('') + .concat(''); + + const schemaStatements = Object.entries(schemas).map((it) => { + return `export const ${it[1]} = pgSchema("${it[0]}");\n`; + }).join(''); + + const rolesNameToTsKey: Record = {}; + const rolesStatements = ddl.roles.list().map((it) => { + const identifier = withCasing(it.name, casing); + rolesNameToTsKey[it.name] = identifier; + const params = { + ...(it.createDb ? { createDb: true } : {}), + ...(it.createRole ? { createRole: true } : {}), + ...(it.inherit ? {} : { inherit: false }), + }; + const paramsString = inspect(params); + const comma = paramsString ? ', ' : ''; + + return `export const ${identifier} = pgRole("${it.name}"${comma}${paramsString});\n`; + }) + .join(''); + + const tableStatements = ddl.tables.list().map((it) => { + const tableSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, tableSchema); + const table = tableFromDDL(it, ddl); + const columns = ddl.columns.list({ schema: table.schema, table: table.name }); + const fks = ddl.fks.list({ schema: table.schema, table: table.name }); + + let func = tableSchema ? `${tableSchema}.table` : tableFn; + func += table.isRlsEnabled ? '.withRLS' : ''; + + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; + statement += createTableColumns( + columns, + table.pk, + fks, + schemas, + casing, + ); + statement += '}'; + + // more than 2 fields or self reference or cyclic + // Andrii: I switched this one off until we will get custom names in .references() + const filteredFKs = table.fks.filter((it) => { + return it.columns.length > 1 || isSelf(it); + }); + + const hasCallback = table.indexes.length > 0 + || filteredFKs.length > 0 + || table.policies.length > 0 + || (table.pk && table.pk.columns.length > 1) + || table.uniques.length > 0 + || table.checks.length > 0; + + if (hasCallback) { + statement += ', '; + statement += '(table) => [\n'; + // TODO: or pk has non-default name + statement += table.pk && table.pk.columns.length > 1 ? createTablePK(table.pk, casing) : ''; + statement += createTableFKs(filteredFKs, schemas, casing); + statement += createTableIndexes(table.name, table.indexes, casing); + statement += createTableUniques(table.uniques, casing); + statement += createTablePolicies(table.policies, casing, rolesNameToTsKey); + statement += createTableChecks(table.checks, casing); + statement += ']'; + } + statement += ');'; + return statement; + }); + + const viewsStatements = Object.values(ddl.views.list()) + .map((it) => { + const viewSchema = schemas[it.schema]; + const paramName = paramNameFor(it.name, viewSchema); + + // TODO: casing? + const func = it.schema !== 'public' + ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) + : it.materialized + ? 'pgMaterializedView' + : 'pgView'; + + const withOption = Object.fromEntries(Object.entries(it.with ?? {}).filter((x) => x[1] !== null)); + const as = `sql\`${it.definition}\``; + const tablespace = it.tablespace ?? ''; + + const viewColumns = columnsForViews.filter((x) => x.schema === it.schema && x.view === it.name); + const columns = createViewColumns( + viewColumns, + enumTypes, + casing, + ); + + let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; + statement += tablespace ? `.tablespace("${tablespace}")` : ''; + statement += Object.keys(withOption).length > 0 ? `.with(${JSON.stringify(withOption)})` : ''; + statement += `.as(${as});`; + + return statement; + }) + .join('\n\n'); + + const uniquePgImports = [...imports]; + + const importsTs = `import { ${ + uniquePgImports.join( + ', ', + ) + } } from "drizzle-orm/pg-core" +import { sql } from "drizzle-orm"\n\n`; + + let decalrations = schemaStatements; + decalrations += rolesStatements; + decalrations += enumStatements; + decalrations += sequencesStatements; + decalrations += '\n'; + decalrations += tableStatements.join('\n\n'); + decalrations += '\n'; + decalrations += viewsStatements; + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(ddl.tables) + .map((it) => withCasing(it.name, casing)) + .join(',\n') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.table}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.table}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.table === fk.tableTo; +}; + +const column = ( + type: string, + dimensions: number, + name: string, + enumTypes: Set, + typeSchema: string | null, + casing: Casing, + def: Column['default'], +) => { + const isEnum = Boolean(typeSchema); + const grammarType = typeFor(type, isEnum); + + const { options, default: defaultValue, customType } = dimensions > 0 + ? grammarType.toArrayTs(type, def ?? null) + : grammarType.toTs(type, def ?? null); + + const dbName = dbColumnName({ name, casing }); + const opts = inspect(options); + const comma = (dbName && opts) ? ', ' : ''; + + let columnStatement = `${withCasing(name, casing)}: ${ + isEnum ? withCasing(paramNameFor(type, typeSchema), casing) : grammarType.drizzleImport() + }${customType ? `({ dataType: () => '${customType}' })` : ''}(${dbName}${comma}${opts})`; + columnStatement += '.array()'.repeat(dimensions); + + if (defaultValue) columnStatement += `.default(${defaultValue})`; + return columnStatement; + + if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { + let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ + dbColumnName({ name, casing }) + })`; + return out; + } + + // if (lowered === 'serial') { + // return `${withCasing(name, casing)}: serial(${dbColumnName({ name, casing })})`; + // } + + // if (lowered === 'smallserial') { + // return `${withCasing(name, casing)}: smallserial(${dbColumnName({ name, casing })})`; + // } + + // if (lowered === 'bigserial') { + // return `${withCasing(name, casing)}: bigserial(${ + // dbColumnName({ name, casing, withMode: true }) + // }{ mode: "bigint" })`; + // } + + // if (lowered === 'integer') { + // let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'smallint') { + // let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'bigint') { + // let out = `// You can use { mode: "bigint" } if numbers are exceeding js number limitations\n\t`; + // const mode = def && def.type === 'bigint' ? 'bigint' : 'number'; + // out += `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing, withMode: true })}{ mode: '${mode}' })`; + // return out; + // } + + // if (lowered === 'boolean') { + // let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'double precision') { + // let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'real') { + // let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'uuid') { + // let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'numeric') { + // let params: { precision?: number; scale?: number; mode?: any } = {}; + + // if (options) { + // const [p, s] = options.split(','); + // if (p) params['precision'] = Number(p); + // if (s) params['scale'] = Number(s); + // } + + // let mode = def !== null && def.type === 'bigint' + // ? 'bigint' + // : def !== null && def.type === 'string' + // ? 'number' + // : 'number'; + + // if (mode) params['mode'] = mode; + + // let out = Object.keys(params).length > 0 + // ? `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing, withMode: true })}${ + // JSON.stringify(params) + // })` + // : `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`; + + // return out; + // } + + // if (lowered === 'timestamp') { + // const withTimezone = lowered.includes('with time zone'); + // // const split = lowered.split(" "); + // const precision = options + // ? Number(options) + // : null; + + // const params = timeConfig({ + // precision, + // withTimezone, + // mode: "'string'", + // }); + + // let out = params + // ? `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing, withMode: true })}${params})` + // : `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`; + + // return out; + // } + + // if (lowered === 'time') { + // const withTimezone = lowered.includes('with time zone'); + + // let precision = options + // ? Number(options) + // : null; + + // const params = timeConfig({ precision, withTimezone }); + + // let out = params + // ? `${withCasing(name, casing)}: time(${dbColumnName({ name, casing, withMode: true })}${params})` + // : `${withCasing(name, casing)}: time(${dbColumnName({ name, casing })})`; + + // return out; + // } + + // if (lowered.startsWith('interval')) { + // // const withTimezone = lowered.includes("with time zone"); + // // const split = lowered.split(" "); + // // let precision = split.length >= 2 ? Number(split[1].substring(1, 2)) : null; + // // precision = precision ? precision : null; + + // const suffix = options ? `(${options})` : ''; + // const params = intervalConfig(`${lowered}${suffix}`); + // let out = options + // ? `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing, withMode: true })}${params})` + // : `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing })})`; + + // return out; + // } + + // if (lowered === 'date') { + // let out = `${withCasing(name, casing)}: date(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('text')) { + // let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('jsonb')) { + // let out = `${withCasing(name, casing)}: jsonb(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('json')) { + // let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('inet')) { + // let out = `${withCasing(name, casing)}: inet(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('cidr')) { + // let out = `${withCasing(name, casing)}: cidr(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('macaddr8')) { + // let out = `${withCasing(name, casing)}: macaddr8(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('macaddr')) { + // let out = `${withCasing(name, casing)}: macaddr(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === 'varchar') { + // let out: string; + // if (options) { // size + // out = `${withCasing(name, casing)}: varchar(${ + // dbColumnName({ name, casing, withMode: true }) + // }{ length: ${options} })`; + // } else { + // out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing })})`; + // } + + // return out; + // } + + // if (lowered === ('point')) { + // let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('line')) { + // let out: string = `${withCasing(name, casing)}: line(${dbColumnName({ name, casing })})`; + // return out; + // } + + // if (lowered === ('geometry')) { + // let out: string = ''; + + // let isGeoUnknown = false; + + // if (lowered.length !== 8) { + // const geometryOptions = options ? options.split(',') : []; + // if (geometryOptions.length === 1 && geometryOptions[0] !== '') { + // out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ + // geometryOptions[0] + // }" })`; + // } else if (geometryOptions.length === 2) { + // out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ + // geometryOptions[0] + // }", srid: ${geometryOptions[1]} })`; + // } else { + // isGeoUnknown = true; + // } + // } else { + // out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing })})`; + // } + + // if (isGeoUnknown) { + // // TODO: + // let unknown = + // `// failed to parse geometry type because found more than 2 options inside geometry function '${type}'\n// Introspect is currently supporting only type and srid options\n`; + // unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; + // return unknown; + // } + // return out; + // } + + // if (lowered === ('vector')) { + // let out: string; + // if (options) { + // out = `${withCasing(name, casing)}: vector(${ + // dbColumnName({ name, casing, withMode: true }) + // }{ dimensions: ${options} })`; + // } else { + // out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing })})`; + // } + + // return out; + // } + + // if (lowered === ('char')) { + // let out: string; + // if (options) { + // out = `${withCasing(name, casing)}: char(${ + // dbColumnName({ name, casing, withMode: true }) + // }{ length: ${options} })`; + // } else { + // out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing })})`; + // } + + // return out; + // } + + // if (lowered.startsWith('bit')) { + // return `${withCasing(name, casing)}: bit(${dbColumnName({ name, casing })}{ dimensions: ${options}})`; + // } + + let unknown = `// TODO: failed to parse database type '${type}'\n`; + unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; + return unknown; +}; + +const createViewColumns = ( + columns: ViewColumn[], + enumTypes: Set, + casing: Casing, +) => { + let statement = ''; + + columns.forEach((it) => { + const columnStatement = column( + it.type, + it.dimensions, + it.name, + enumTypes, + it.typeSchema, + casing, + null, + ); + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + statement += '.array()'.repeat(it.dimensions + it.typeDimensions); + statement += it.notNull ? '.notNull()' : ''; + statement += ',\n'; + }); + return statement; +}; + +const createTableColumns = ( + columns: Column[], + primaryKey: PrimaryKey | null, + fks: ForeignKey[], + schemas: Record, + casing: Casing, +): string => { + let statement = ''; + + // no self refs and no cyclic + const oneColumnsFKs = Object.values(fks) + .filter((it) => { + return !isSelf(it); + }) + .filter((it) => it.columns.length === 1); + + const fkByColumnName = oneColumnsFKs.reduce((res, it) => { + const arr = res[it.columns[0]] || []; + arr.push(it); + res[it.columns[0]] = arr; + return res; + }, {} as Record); + + for (const it of columns) { + const { name, type, dimensions, default: def, identity, generated, typeSchema } = it; + const stripped = type.replaceAll('[]', ''); + const isEnum = Boolean(typeSchema); + const grammarType = typeFor(stripped, isEnum); + + const { options, default: defaultValue, customType } = dimensions > 0 + ? grammarType.toArrayTs(type, def ?? null) + : grammarType.toTs(type, def ?? null); + + const dbName = dbColumnName({ name, casing }); + const opts = inspect(options); + const comma = (dbName && opts) ? ', ' : ''; + + const pk = primaryKey && primaryKey.columns.length === 1 && primaryKey.columns[0] === it.name + ? primaryKey + : null; + + let columnStatement = `${withCasing(name, casing)}: ${ + isEnum ? withCasing(paramNameFor(type, typeSchema), casing) : grammarType.drizzleImport() + }${customType ? `({ dataType: () => '${customType}' })` : ''}(${dbName}${comma}${opts})`; + columnStatement += '.array()'.repeat(dimensions); + if (defaultValue) columnStatement += defaultValue.startsWith('.') ? defaultValue : `.default(${defaultValue})`; + if (pk) columnStatement += '.primaryKey()'; + if (it.notNull && !it.identity && !pk) columnStatement += '.notNull()'; + if (identity) columnStatement += generateIdentityParams(it); + if (generated) columnStatement += `.generatedAlwaysAs(sql\`${generated.as}\`)`; + + statement += '\t'; + statement += columnStatement; + // Provide just this in column function + + const fks = fkByColumnName[it.name]; + // Andrii: I switched it off until we will get a custom naem setting in references + if (fks) { + const fksStatement = fks + .map((it) => { + const onDelete = it.onDelete && it.onDelete !== 'NO ACTION' ? it.onDelete.toLowerCase() : null; + const onUpdate = it.onUpdate && it.onUpdate !== 'NO ACTION' ? it.onUpdate.toLowerCase() : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(it) ? ': AnyPgColumn' : ''; + + const paramsStr = objToStatement2(params); + const tableSchema = schemas[it.schemaTo || '']; + const paramName = paramNameFor(it.tableTo, tableSchema); + if (paramsStr) { + return `.references(()${typeSuffix} => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; + } + return `.references(()${typeSuffix} => ${ + withCasing( + paramName, + casing, + ) + }.${withCasing(it.columnsTo[0], casing)})`; + }) + .join(''); + statement += fksStatement; + } + + statement += ',\n'; + } + + return statement; +}; + +const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { + let statement = ''; + + idxs.forEach((it) => { + // TODO: cc: @AndriiSherman we have issue when index is called as table called + // let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; + // idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; + // idxKey = withCasing(idxKey, casing); + // const indexGeneratedName = indexName( + // tableName, + // it.columns.map((it) => it.value), + // ); + + const name = it.nameExplicit ? it.name : ''; + // const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += it.isUnique ? '\tuniqueIndex(' : '\tindex('; + statement += name ? `"${name}")` : ')'; + statement += `${it.concurrently ? `.concurrently()` : ''}`; + + statement += `.using("${it.method}", ${ + it.columns + .map((it) => { + if (it.isExpression) { + return `sql\`${it.isExpression}\``; + } else { + return `table.${withCasing(it.value, casing)}${it.asc ? '.asc()' : '.desc()'}${ + it.nullsFirst ? '.nullsFirst()' : '.nullsLast()' + }${ + it.opclass && !it.opclass.default + ? `.op("${it.opclass.name}")` + : '' + }`; + } + }) + .join(', ') + })`; + statement += it.where ? `.where(sql\`${it.where}\`)` : ''; + + statement += it.with && Object.keys(it.with).length > 0 ? `.with(${it.with})` : ''; + statement += `,\n`; + }); + + return statement; +}; + +const createTablePK = (it: PrimaryKey, casing: Casing): string => { + let statement = '\tprimaryKey({ columns: ['; + statement += `${ + it.columns + .map((c) => { + return `table.${withCasing(c, casing)}`; + }) + .join(', ') + }`; + statement += `]${it.nameExplicit ? `, name: "${it.name}"` : ''}}),\n`; + return statement; +}; + +// get a map of db role name to ts key +// if to by key is in this map - no quotes, otherwise - quotes + +const createTablePolicies = ( + policies: Policy[], + casing: Casing, + rolesNameToTsKey: Record = {}, +): string => { + let statement = ''; + + policies.forEach((it) => { + const mappedItTo = it.roles.map((v) => { + return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; + }); + + const tuples = []; + if (it.as === 'RESTRICTIVE') tuples.push(['as', `"${it.as.toLowerCase()}"`]); + if (it.for !== 'ALL') tuples.push(['for', `"${it.for.toLowerCase()}"`]); + if (!(mappedItTo.length === 1 && mappedItTo[0] === '"public"')) { + tuples.push([ + 'to', + `[${mappedItTo.map((x) => `${x}`).join(', ')}]`, + ]); + } + if (it.using !== null) tuples.push(['using', `sql\`${it.using}\``]); + if (it.withCheck !== null) tuples.push(['withCheck', `sql\`${it.withCheck}\``]); + const opts = tuples.length > 0 ? `, { ${tuples.map((x) => `${x[0]}: ${x[1]}`).join(', ')} }` : ''; + statement += `\n\tpgPolicy("${it.name}"${opts}),\n`; + }); + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: Casing, +): string => { + let statement = ''; + + unqs.forEach((it) => { + statement += '\tunique('; + statement += it.nameExplicit ? `"${it.name}")` : ')'; + statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; + statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; + statement += `,`; + }); + + return statement; +}; + +const createTableChecks = ( + checkConstraints: CheckConstraint[], + _casing: Casing, +) => { + let statement = ''; + + checkConstraints.forEach((it) => { + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,`; + }); + + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { + let statement = ''; + + fks.forEach((it) => { + const tableSchema = it.schemaTo === 'public' ? '' : schemas[it.schemaTo]; + const paramName = paramNameFor(it.tableTo, tableSchema); + + const isSelf = it.tableTo === it.table; + const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; + statement += `\tforeignKey({\n`; + statement += `\t\tcolumns: [${it.columns.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; + statement += `\t\tforeignColumns: [${ + it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') + }],\n`; + statement += it.nameExplicit ? `\t\tname: "${it.name}"\n` : ''; + statement += `\t})`; + + statement += it.onUpdate && it.onUpdate !== 'NO ACTION' ? `.onUpdate("${it.onUpdate}")` : ''; + statement += it.onDelete && it.onDelete !== 'NO ACTION' ? `.onDelete("${it.onDelete}")` : ''; + statement += `,\n`; + }); + return statement; +}; diff --git a/drizzle-kit/src/dialects/postgres/versions.ts b/drizzle-kit/src/dialects/postgres/versions.ts new file mode 100644 index 0000000000..756b87934e --- /dev/null +++ b/drizzle-kit/src/dialects/postgres/versions.ts @@ -0,0 +1,451 @@ +import { createDDL, type Index } from '../../dialects/postgres/ddl'; +import { + defaultNameForIndex, + defaultNameForPK, + defaultNameForUnique, + defaults, + trimDefaultValueSuffix, +} from '../../dialects/postgres/grammar'; +import type { + Column, + Index as LegacyIndex, + PgSchema, + PgSchemaV4, + PgSchemaV5, + PgSchemaV6, + PgSchemaV7, + PostgresSnapshot, + TableV5, +} from '../../dialects/postgres/snapshot'; +import { getOrNull } from '../../dialects/utils'; + +export const upToV8 = (it: Record): { snapshot: PostgresSnapshot; hints: string[] } => { + if (Number(it.version) < 7) return upToV8(updateUpToV7(it)); + const json = it as PgSchemaV7; + + const hints = [] as string[]; + + const ddl = createDDL(); + + for (const schema of Object.values(json.schemas)) { + ddl.schemas.push({ name: schema }); + } + + for (const seq of Object.values(json.sequences)) { + ddl.sequences.push({ + schema: seq.schema!, + name: seq.name, + startWith: seq.startWith ?? null, + incrementBy: seq.increment ?? null, + minValue: seq.minValue ?? null, + maxValue: seq.maxValue ?? null, + cacheSize: seq.cache ? Number(seq.cache) : null, + cycle: seq.cycle ?? null, + }); + } + + for (const table of Object.values(json.tables)) { + const schema = table.schema || 'public'; + + const isRlsEnabled = table.isRLSEnabled || Object.keys(table.policies).length > 0 + || Object.values(json.policies).some((it) => it.on === table.name && (it.schema ?? 'public') === schema); + + ddl.tables.push({ + schema, + name: table.name, + isRlsEnabled: isRlsEnabled, + }); + + for (const column of Object.values(table.columns)) { + if (column.primaryKey) { + ddl.pks.push({ + schema, + table: table.name, + columns: [column.name], + name: defaultNameForPK(table.name), + nameExplicit: false, + }); + } + + const [baseType, dimensions] = extractBaseTypeAndDimensions(column.type); + + let fixedType = baseType.startsWith('numeric(') ? baseType.replace(', ', ',') : baseType; + + ddl.columns.push({ + schema, + table: table.name, + name: column.name, + type: fixedType, + notNull: column.notNull, + typeSchema: column.typeSchema ?? null, // TODO: if public - empty or missing? + dimensions, + generated: column.generated ?? null, + identity: column.identity + ? { + name: column.identity.name, + type: column.identity.type, + startWith: column.identity.startWith ?? null, + minValue: column.identity.minValue ?? null, + maxValue: column.identity.maxValue ?? null, + increment: column.identity.increment ?? null, + cache: column.identity.cache ? Number(column.identity.cache) : null, + cycle: column.identity.cycle ?? null, + } + : null, + default: typeof column.default === 'undefined' ? null : trimDefaultValueSuffix(String(column.default)), + }); + } + + for (const pk of Object.values(table.compositePrimaryKeys)) { + const nameExplicit = `${table.name}_${pk.columns.join('_')}_pk` !== pk.name; + if (!nameExplicit) { + hints.push(`update pk name: ${pk.name} -> ${defaultNameForPK(table.name)}`); + } + ddl.pks.push({ + schema: schema, + table: table.name, + name: pk.name, + columns: pk.columns, + nameExplicit, // TODO: ?? + }); + } + + for (const unique of Object.values(table.uniqueConstraints)) { + const nameExplicit = `${table.name}_${unique.columns.join('_')}_unique` !== unique.name; + if (!nameExplicit) { + hints.push(`update unique name: ${unique.name} -> ${defaultNameForUnique(table.name, ...unique.columns)}`); + } + + ddl.uniques.push({ + schema, + table: table.name, + columns: unique.columns, + name: unique.name, + nameExplicit: nameExplicit, + nullsNotDistinct: unique.nullsNotDistinct ?? defaults.nullsNotDistinct, + }); + } + + for (const check of Object.values(table.checkConstraints)) { + ddl.checks.push({ + schema, + table: table.name, + name: check.name, + value: check.value, + }); + } + + for (const idx of Object.values(table.indexes)) { + const columns: Index['columns'][number][] = idx.columns.map((it) => { + return { + value: it.expression, + isExpression: it.isExpression, + asc: it.asc, + nullsFirst: it.nulls ? it.nulls !== 'last' : false, + opclass: it.opclass + ? { + name: it.opclass, + default: false, + } + : null, + }; + }); + + const nameExplicit = columns.some((it) => it.isExpression === true) + || `${table.name}_${columns.map((it) => it.value).join('_')}_index` !== idx.name; + + if (!nameExplicit) { + hints.push( + `rename index name: ${idx.name} -> ${defaultNameForIndex(table.name, idx.columns.map((x) => x.expression))}`, + ); + } + + ddl.indexes.push({ + schema, + table: table.name, + name: idx.name, + columns, + isUnique: idx.isUnique, + method: idx.method, + concurrently: idx.concurrently, + where: idx.where ?? null, + with: idx.with && Object.keys(idx.with).length > 0 + ? Object.entries(idx.with).map((it) => `${it[0]}=${it[1]}`).join(',') + : '', + nameExplicit, + }); + } + + for (const fk of Object.values(table.foreignKeys)) { + const nameExplicit = + `${fk.tableFrom}_${fk.columnsFrom.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fk` !== fk.name; + const name = fk.name.length < 63 ? fk.name : fk.name.slice(0, 63); + ddl.fks.push({ + schema, + name, + nameExplicit, + table: fk.tableFrom, + columns: fk.columnsFrom, + schemaTo: fk.schemaTo || 'public', + tableTo: fk.tableTo, + columnsTo: fk.columnsTo, + onDelete: fk.onDelete?.toUpperCase() as any ?? 'NO ACTION', + onUpdate: fk.onUpdate?.toUpperCase() as any ?? 'NO ACTION', + }); + } + + for (const policy of Object.values(table.policies)) { + ddl.policies.push({ + schema, + table: table.name, + name: policy.name, + as: policy.as ?? 'PERMISSIVE', + for: policy.for ?? 'ALL', + roles: policy.to ?? [], + using: policy.using ?? null, + withCheck: policy.withCheck ?? null, + }); + } + } + + for (const en of Object.values(json.enums)) { + ddl.enums.push({ schema: en.schema, name: en.name, values: en.values }); + } + + for (const role of Object.values(json.roles)) { + ddl.roles.push({ + name: role.name, + createRole: role.createRole, + createDb: role.createDb, + inherit: role.inherit, + bypassRls: null, + canLogin: null, + connLimit: null, + password: null, + replication: null, + superuser: null, + validUntil: null, + }); + } + + for (const policy of Object.values(json.policies)) { + ddl.policies.push({ + schema: policy.schema ?? 'public', + table: policy.on!, + name: policy.name, + as: policy.as ?? 'PERMISSIVE', + roles: policy.to ?? [], + for: policy.for ?? 'ALL', + using: policy.using ?? null, + withCheck: policy.withCheck ?? null, + }); + } + + for (const v of Object.values(json.views)) { + if (v.isExisting) continue; + + const opt = v.with; + ddl.views.push({ + schema: v.schema, + name: v.name, + definition: v.definition ?? null, + tablespace: v.tablespace ?? null, + withNoData: v.withNoData ?? null, + using: v.using ?? null, + with: opt + ? { + checkOption: getOrNull(opt, 'checkOption'), + securityBarrier: getOrNull(opt, 'securityBarrier'), + securityInvoker: getOrNull(opt, 'securityInvoker'), + autovacuumEnabled: getOrNull(opt, 'autovacuumEnabled'), + autovacuumFreezeMaxAge: getOrNull(opt, 'autovacuumFreezeMaxAge'), + autovacuumFreezeMinAge: getOrNull(opt, 'autovacuumFreezeMinAge'), + autovacuumFreezeTableAge: getOrNull(opt, 'autovacuumFreezeTableAge'), + autovacuumMultixactFreezeMaxAge: getOrNull(opt, 'autovacuumMultixactFreezeMaxAge'), + autovacuumMultixactFreezeMinAge: getOrNull(opt, 'autovacuumMultixactFreezeMinAge'), + autovacuumMultixactFreezeTableAge: getOrNull(opt, 'autovacuumMultixactFreezeTableAge'), + autovacuumVacuumCostDelay: getOrNull(opt, 'autovacuumVacuumCostDelay'), + autovacuumVacuumCostLimit: getOrNull(opt, 'autovacuumVacuumCostLimit'), + autovacuumVacuumScaleFactor: getOrNull(opt, 'autovacuumVacuumScaleFactor'), + autovacuumVacuumThreshold: getOrNull(opt, 'autovacuumVacuumThreshold'), + fillfactor: getOrNull(opt, 'fillfactor'), + logAutovacuumMinDuration: getOrNull(opt, 'logAutovacuumMinDuration'), + parallelWorkers: getOrNull(opt, 'parallelWorkers'), + toastTupleTarget: getOrNull(opt, 'toastTupleTarget'), + userCatalogTable: getOrNull(opt, 'userCatalogTable'), + vacuumIndexCleanup: getOrNull(opt, 'vacuumIndexCleanup'), + vacuumTruncate: getOrNull(opt, 'vacuumTruncate'), + } + : null, + materialized: v.materialized, + }); + } + + const renames = [ + ...Object.entries(json._meta.tables).map(([k, v]) => `${v}->${k}`), + ...Object.entries(json._meta.schemas).map(([k, v]) => `${v}->${k}`), + ...Object.entries(json._meta.columns).map(([k, v]) => `${v}->${k}`), + ]; + + return { + snapshot: { + id: json.id, + prevIds: [json.prevId], + version: '8', + dialect: 'postgres', + ddl: ddl.entities.list(), + renames, + }, + hints, + }; +}; + +export const extractBaseTypeAndDimensions = (it: string): [string, number] => { + const dimensionRegex = /\[[^\]]*\]/g; // matches any [something], including [] + const count = (it.match(dimensionRegex) || []).length; + const baseType = it.replace(dimensionRegex, ''); + return [baseType, count]; +}; + +// Changed index format stored in snapshot for PostgreSQL in 0.22.0 +export const updateUpToV7 = (it: Record): PgSchema => { + if (Number(it.version) < 6) return updateUpToV7(updateUpToV6(it)); + const schema = it as PgSchemaV6; + + const tables = Object.fromEntries( + Object.entries(schema.tables).map((it) => { + const table = it[1]; + const mappedIndexes = Object.fromEntries( + Object.entries(table.indexes).map((idx) => { + const { columns, ...rest } = idx[1]; + const mappedColumns = columns.map((it) => { + return { + expression: it, + isExpression: false, + asc: true, + nulls: 'last', + opClass: undefined, + }; + }); + return [idx[0], { columns: mappedColumns, with: {}, ...rest }]; + }), + ); + return [it[0], { ...table, indexes: mappedIndexes, policies: {}, isRLSEnabled: false, checkConstraints: {} }]; + }), + ); + + return { + ...schema, + version: '7', + dialect: 'postgresql', + sequences: {}, + tables: tables, + policies: {}, + views: {}, + roles: {}, + }; +}; + +export const updateUpToV6 = (it: Record): PgSchemaV6 => { + if (Number(it.version) < 5) return updateUpToV6(updateToV5(it)); + const schema = it as PgSchemaV6; + + const tables = Object.fromEntries( + Object.entries(schema.tables).map((it) => { + const table = it[1]; + const schema = table.schema || 'public'; + return [`${schema}.${table.name}`, table]; + }), + ); + const enums = Object.fromEntries( + Object.entries(schema.enums).map((it) => { + const en = it[1]; + return [ + `public.${en.name}`, + { + name: en.name, + schema: 'public', + values: Object.values(en.values), + }, + ]; + }), + ); + return { + ...schema, + version: '6', + dialect: 'postgresql', + tables: tables, + enums, + }; +}; + +// major migration with of folder structure, etc... +export const updateToV5 = (it: Record): PgSchemaV5 => { + if (Number(it.version) < 4) throw new Error('Snapshot version <4'); + const obj = it as PgSchemaV4; + + const mappedTables: Record = {}; + for (const [key, table] of Object.entries(obj.tables)) { + const mappedColumns: Record = {}; + for (const [ckey, column] of Object.entries(table.columns)) { + let newDefault: any = column.default; + let newType: string = column.type; + if (column.type.toLowerCase() === 'date') { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .split('T')[0] + }'`; + } else { + newDefault = column.default.split('T')[0]; + } + } + } else if (column.type.toLowerCase().startsWith('timestamp')) { + if (typeof column.default !== 'undefined') { + if (column.default.startsWith("'") && column.default.endsWith("'")) { + newDefault = `'${ + column.default + .substring(1, column.default.length - 1) + .replace('T', ' ') + .slice(0, 23) + }'`; + } else { + newDefault = column.default.replace('T', ' ').slice(0, 23); + } + } + newType = column.type + .toLowerCase() + .replace('timestamp (', 'timestamp('); + } else if (column.type.toLowerCase().startsWith('time')) { + newType = column.type.toLowerCase().replace('time (', 'time('); + } else if (column.type.toLowerCase().startsWith('interval')) { + newType = column.type.toLowerCase().replace(' (', '('); + } + mappedColumns[ckey] = { ...column, default: newDefault, type: newType }; + } + + mappedTables[key] = { + ...table, + columns: mappedColumns, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }; + } + + return { + version: '5', + dialect: obj.dialect, + id: obj.id, + prevIds: obj.prevIds, + tables: mappedTables, + enums: obj.enums, + schemas: obj.schemas, + _meta: { + schemas: {} as Record, + tables: {} as Record, + columns: {} as Record, + }, + }; +}; diff --git a/drizzle-kit/src/dialects/pull-utils.ts b/drizzle-kit/src/dialects/pull-utils.ts new file mode 100644 index 0000000000..b5ebb1874c --- /dev/null +++ b/drizzle-kit/src/dialects/pull-utils.ts @@ -0,0 +1,185 @@ +import { Minimatch } from 'minimatch'; +import type { EntitiesFilter, ExtensionsFilter, SchemasFilter, TablesFilter } from 'src/cli/validations/cli'; +import { assertUnreachable } from 'src/utils'; +import type { Dialect } from 'src/utils/schemaValidator'; + +export type Schema = { type: 'schema'; name: string }; +export type Table = { type: 'table'; schema: string | false; name: string }; +export type Role = { type: 'role'; name: string }; + +/* + there's a double edge sword with having narrow list here + on one hand we can filter other entities through these 3 types + + on the other hand when debugged - you see schema/table filter invocation + for all other types like enums, sequences, etc. + + I will leave this as is and in introspect I will rely on introspected schemas and tables + to filter list of dependent entities, that'd probably be the go to +*/ +export type KitEntity = Schema | Table | Role; + +export type EntityFilter = (it: KitEntity) => boolean; + +export const prepareEntityFilter = ( + dialect: Dialect, + params: { + tables: TablesFilter; + schemas: SchemasFilter; + entities: EntitiesFilter; + extensions: ExtensionsFilter; + }, + /* .existing() in drizzle schema */ + existingEntities: (Schema | Table)[], +): EntityFilter => { + const tablesConfig = typeof params.tables === 'undefined' + ? [] + : typeof params.tables === 'string' + ? [params.tables] + : params.tables; + + const schemasConfig = typeof params.schemas === 'undefined' + ? [] + : typeof params.schemas === 'string' + ? [params.schemas] + : params.schemas; + + const existingSchemas = existingEntities.filter((x) => x.type === 'schema').map((x) => x.name); + const schemasFilter = prepareSchemasFitler(schemasConfig, existingSchemas); + + const postgisTablesGlobs = ['!geography_columns', '!geometry_columns', '!spatial_ref_sys']; + for (const ext of params.extensions ?? []) { + if (ext === 'postgis') tablesConfig.push(...postgisTablesGlobs); + else assertUnreachable(ext); + } + + const existingViews = existingEntities.filter((x) => x.type === 'table').map((x) => ({ + schema: x.schema, + name: x.name, + })); + const tablesFilter = prepareTablesFilter(tablesConfig, existingViews); + + const rolesFilter = prepareRolesFilter(params.entities); + + const filter = (it: KitEntity) => { + if (it.type === 'schema') return schemasFilter(it); + if (it.type === 'table') { + if (it.schema === false) return tablesFilter(it); + return schemasFilter({ type: 'schema', name: it.schema }) && tablesFilter(it); + } + if (it.type === 'role') return rolesFilter(it); + + assertUnreachable(it); + }; + + return (it) => { + const res = filter(it); + // console.log(res, it); + return res; + }; +}; + +const prepareSchemasFitler = (globs: string[], schemasExisting: string[]) => { + const filterForExisting = (it: Schema) => { + return !schemasExisting.some((x) => it.name === x); + }; + + const matchers = globs.map((it) => { + return new Minimatch(it); + }); + + if (matchers.length === 0 && schemasExisting.length === 0) return () => true; + if (matchers.length === 0) return filterForExisting; + + return (it: Schema) => { + if (!filterForExisting(it)) return false; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate && !matcher.match(it.name)) { + flags.push(false); + } else if (matcher.match(it.name)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; +}; + +const prepareTablesFilter = (globs: string[], existingViews: { schema: string | false; name: string }[]) => { + const existingFilter = (it: Table) => { + if (it.schema === false) return existingViews.some((x) => x.name === it.name); + return !existingViews.some((x) => x.schema === it.schema && x.name === it.name); + }; + + const matchers = globs.map((it) => { + return new Minimatch(it); + }); + if (matchers.length === 0 && existingViews.length === 0) return () => true; + if (matchers.length === 0) return existingFilter; + + const filter = (it: Table) => { + if (!existingFilter(it)) return false; + + let flags: boolean[] = []; + + for (let matcher of matchers) { + if (matcher.negate && !matcher.match(it.name)) { + flags.push(false); + } else if (matcher.match(it.name)) { + flags.push(true); + } + } + + if (flags.length > 0) { + return flags.every(Boolean); + } + return false; + }; + return filter; +}; + +const prepareRolesFilter = (entities: EntitiesFilter) => { + if (!entities || !entities.roles) return () => false; + + const roles = entities.roles; + const include: string[] = typeof roles === 'object' ? roles.include ?? [] : []; + const exclude: string[] = typeof roles === 'object' ? roles.exclude ?? [] : []; + const provider = typeof roles === 'object' ? roles.provider : undefined; + + if (provider === 'supabase') { + exclude.push( + 'anon', + 'authenticator', + 'authenticated', + 'service_role', + 'supabase_auth_admin', + 'supabase_storage_admin', + 'dashboard_user', + 'supabase_admin', + ); + } + + if (provider === 'neon') { + exclude.push('authenticated', 'anonymous'); + } + + const useRoles: boolean = typeof roles === 'boolean' ? roles : include.length > 0 || exclude.length > 0; + + if (!useRoles) return () => false; + if (!include.length && !exclude.length) return () => true; + + const rolesFilter: (it: { type: 'role'; name: string }) => boolean = (it) => { + const notExcluded = !exclude.length || !exclude.includes(it.name); + const included = !include.length || include.includes(it.name); + + return notExcluded && included; + }; + + return rolesFilter; +}; diff --git a/drizzle-kit/src/dialects/simpleValidator.ts b/drizzle-kit/src/dialects/simpleValidator.ts new file mode 100644 index 0000000000..1cf157e50e --- /dev/null +++ b/drizzle-kit/src/dialects/simpleValidator.ts @@ -0,0 +1,154 @@ +import type { Simplify } from '../utils'; + +export const array = (validate: (it: unknown) => boolean) => { + return { + type: {} as T, + validate, + }; +}; + +type StringLiteral = T extends string[] ? (string extends T[number] ? never : T[number]) : never; + +type SchemaType = + | 'string' + | 'string[]' + | 'number' + | 'boolean' + | 'array' + | 'record' + | number + | string[] + | ReturnType + | { [key: string]: SchemaType }; + +type InferType = T extends 'string' ? string + : T extends 'number' ? number + : T extends 'boolean' ? boolean + : T extends 'array' ? Array + : T extends 'record' ? Record + : T extends Array ? StringLiteral + : T extends string ? T + : T extends number ? T + : T extends boolean ? T + : T extends ReturnType> ? I[] + : T extends Record ? { [K in keyof T]: InferType } | null + : never; + +type ResultShape> = Simplify< + { + [K in keyof S]: InferType; + } +>; + +type ValidationResult = { + success: boolean; + data: T | null; + errors?: string[]; +}; + +const validatorFor = (schema: Record, path: string | undefined) => { + const validators = {} as Record string | string[] | null>; + for (const [key, value] of Object.entries(schema)) { + if (value === 'string') { + validators[key] = (it: unknown) => { + return typeof it === 'string' ? null : `Field '${path}${key}' must be a string`; + }; + } else if (value === 'number') { + validators[key] = (it: unknown) => { + return typeof it === 'number' ? null : `Field '${path}${key}' must be a number`; + }; + } else if (value === 'boolean') { + validators[key] = (it: unknown) => { + return typeof it === 'boolean' ? null : `Field '${path}${key}' must be a boolean`; + }; + } else if (value === 'array') { + validators[key] = (it: unknown) => { + return Array.isArray(it) ? null : `Field '${path}${key}' must be an array`; + }; + } else if (value === 'record') { + validators[key] = (it: unknown) => { + return typeof it === 'object' ? null : `Field '${path}${key}' must be an object`; + }; + } else if (Array.isArray(value)) { + // literal ["v1", "v2"] or [10, 20] + validators[key] = (it: unknown) => { + const msg = value.length === 1 + ? `Field '${key}' must be exactly '${path}${value[0]}'` + : `Field '${key}' must be exactly either of ['${value.join(', ')}']`; + return value.some((entry) => entry === it) ? null : msg; + }; + } else if (typeof value === 'object') { + if ('type' in value && typeof value['type'] === 'object' && Object.keys(value['type']).length === 0) { + validators[key] = (it: unknown) => { + if (!Array.isArray(it)) return `Field '${path}${key}' must be an array`; + + for (let item of it) { + const res = value['validate'](item); + if (!res) return `${path}${key} array contains invalid value:\n${JSON.stringify(item, null, 2)}`; + } + + return null; + }; + } else { + const validateRecord = validatorFor(value as Record, `${key}.`); + validators[key] = (it: unknown) => { + if (it === null) return null; + return validateRecord(it as any); + }; + } + } + } + + const validate = (input: Record): string[] => { + const errors: string[] = []; + for (const [key, validate] of Object.entries(validators)) { + const value = input[key]; + if (value === undefined) { + errors.push(`Missing required field: ${path}${key}`); + continue; + } + + const res = validate(value, path); + if (!res) continue; + + if (typeof res === 'string') { + errors.push(res); + } else { + errors.push(...res); + } + } + return errors; + }; + + return validate; +}; + +export function validator>( + schema: S, +): { + shape: ResultShape; + parse: (obj: unknown) => Simplify>>; + strict: (obj: unknown) => Simplify>; +} { + const validate = validatorFor(schema, ''); + + return { + shape: {} as any, + strict: (input: unknown) => { + const errors = validate(input as any); + if (errors.length > 0) { + throw new Error('Validation failed'); + } + return input as any; + }, + parse: (input: unknown) => { + const errors = validate(input as any); + const success = errors.length === 0; + return { + success, + data: success ? input as any : null, + errors: errors.length > 0 ? errors : undefined, + }; + }, + }; +} diff --git a/drizzle-kit/src/dialects/singlestore/diff.ts b/drizzle-kit/src/dialects/singlestore/diff.ts new file mode 100644 index 0000000000..6cfa065c7e --- /dev/null +++ b/drizzle-kit/src/dialects/singlestore/diff.ts @@ -0,0 +1,51 @@ +import { mockResolver } from '../../utils/mocks'; +import type { Resolver } from '../common'; +import type { Column, MysqlDDL, Table, View } from '../mysql/ddl'; +import { ddlDiff as mysqlDdlDiff } from '../mysql/diff'; +import type { JsonStatement } from '../mysql/statements'; + +export const ddlDiffDry = async (from: MysqlDDL, to: MysqlDDL) => { + const s = new Set(); + return ddlDiff(from, to, mockResolver(s), mockResolver(s), mockResolver(s), 'default'); +}; + +export const ddlDiff = async ( + ddl1: MysqlDDL, + ddl2: MysqlDDL, + tablesResolver: Resolver
, + columnsResolver: Resolver, + viewsResolver: Resolver, + mode: 'default' | 'push', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { jsonStatement: JsonStatement; sqlStatements: string[] }[]; + renames: string[]; +}> => { + const res = await mysqlDdlDiff(ddl1, ddl2, tablesResolver, columnsResolver, viewsResolver, mode); + + const statements: JsonStatement[] = []; + const sqlStatements: string[] = []; + + for (const it of res.groupedStatements) { + const st = it.jsonStatement; + if (st.type === 'create_index' && st.index.isUnique) continue; + if (st.type === 'alter_column') { + if (st.diff.type) continue; + if (st.diff.autoIncrement) continue; + if (st.diff.default && st.column.notNull) continue; + if (st.diff.notNull) continue; + } + if (st.type === 'create_pk' || st.type === 'drop_pk') continue; + + statements.push(it.jsonStatement); + sqlStatements.push(...it.sqlStatements); + } + + return { + statements, + sqlStatements, + groupedStatements: res.groupedStatements, + renames: res.renames, + }; +}; diff --git a/drizzle-kit/src/dialects/singlestore/drizzle.ts b/drizzle-kit/src/dialects/singlestore/drizzle.ts new file mode 100644 index 0000000000..5ae9f6e260 --- /dev/null +++ b/drizzle-kit/src/dialects/singlestore/drizzle.ts @@ -0,0 +1,215 @@ +import type { Casing } from 'drizzle-orm'; +import { is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { AnySingleStoreColumn, AnySingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { getTableConfig, SingleStoreDialect, SingleStoreTable, uniqueKeyName } from 'drizzle-orm/singlestore-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { escapeSingleQuotes } from 'src/utils'; +import { safeRegister } from '../../utils/utils-node'; +import { getColumnCasing, sqlToStr } from '../drizzle'; +import type { Column, InterimSchema } from '../mysql/ddl'; +import { typeFor } from '../mysql/grammar'; + +const handleEnumType = (type: string) => { + let str = type.split('(')[1]; + str = str.substring(0, str.length - 1); + const values = str.split(',').map((v) => `'${escapeSingleQuotes(v.substring(1, v.length - 1))}'`); + return `enum(${values.join(',')})`; +}; + +export const defaultFromColumn = (column: AnySingleStoreColumn, casing?: Casing): Column['default'] => { + if (typeof column.default === 'undefined') return null; + + if (is(column.default, SQL)) { + return sqlToStr(column.default, casing); + } + + const grammarType = typeFor(column.getSQLType().toLocaleLowerCase()); + return grammarType.defaultFromDrizzle(column.default); +}; + +export const upper = (value: T | undefined): Uppercase | null => { + if (!value) return null; + return value.toUpperCase() as Uppercase; +}; + +export const fromDrizzleSchema = ( + tables: AnySingleStoreTable[], + casing: CasingType | undefined, +): InterimSchema => { + const dialect = new SingleStoreDialect({ casing }); + const result: InterimSchema = { + tables: [], + columns: [], + pks: [], + fks: [], + indexes: [], + checks: [], + views: [], + viewColumns: [], + }; + + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + schema, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + + if (schema) continue; + + result.tables.push({ + entityType: 'tables', + name: tableName, + }); + + for (const column of columns) { + const name = getColumnCasing(column, casing); + const notNull: boolean = column.notNull; + const sqlType = column.getSQLType(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated + ? { + as: is(column.generated.as, SQL) + ? dialect.sqlToQuery(column.generated.as as SQL).sql + : typeof column.generated.as === 'function' + ? dialect.sqlToQuery(column.generated.as() as SQL).sql + : (column.generated.as as any), + type: column.generated.mode ?? 'stored', + } + : null; + + result.columns.push({ + entityType: 'columns', + table: tableName, + name, + type: sqlType.startsWith('enum') ? handleEnumType(sqlType) : sqlType, + notNull, + autoIncrement, + onUpdateNow: (column as any).hasOnUpdateNow ?? false, // TODO: ?? + // @ts-expect-error + // TODO update description + // 'virtual' | 'stored' for all dialects + // 'virtual' | 'persisted' for mssql + // We should remove this option from common Column and store it per dialect common + // Was discussed with Andrew + // Type erorr because of common in drizzle orm for all dialects (includes virtual' | 'stored' | 'persisted') + generated, + isPK: column.primary, + isUnique: column.isUnique, + default: defaultFromColumn(column, casing), + }); + } + + for (const pk of primaryKeys) { + const originalColumnNames = pk.columns.map((c) => c.name); + const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); + + let name = pk.getName(); + if (casing !== undefined) { + for (let i = 0; i < originalColumnNames.length; i++) { + name = name.replace(originalColumnNames[i], columnNames[i]); + } + } + + result.pks.push({ + entityType: 'pks', + table: tableName, + name: name, + columns: columnNames, + }); + } + + for (const unique of uniqueConstraints) { + const columns = unique.columns.map((c) => { + if (is(c, SQL)) { + const sql = dialect.sqlToQuery(c).sql; + return { value: sql, isExpression: true }; + } + return { value: getColumnCasing(c, casing), isExpression: false }; + }); + + const name = unique.name ?? uniqueKeyName(table, unique.columns.filter((c) => !is(c, SQL)).map((c) => c.name)); + + result.indexes.push({ + entityType: 'indexes', + table: tableName, + name: name, + columns: columns, + isUnique: true, + algorithm: null, + lock: null, + using: null, + nameExplicit: !!unique.name, + }); + } + + for (const index of indexes) { + const columns = index.config.columns; + const name = index.config.name; + + result.indexes.push({ + entityType: 'indexes', + table: tableName, + name, + columns: columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + return { value: sql, isExpression: true }; + } else { + return { value: `${getColumnCasing(it, casing)}`, isExpression: false }; + } + }), + algorithm: index.config.algorithm ?? null, + lock: index.config.lock ?? null, + isUnique: index.config.unique ?? false, + using: index.config.using ?? null, + nameExplicit: true, + }); + } + } + + return result; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnySingleStoreTable[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + const i0: Record = require(`${it}`); + const prepared = prepareFromExports(i0); + + tables.push(...prepared.tables); + relations.push(...prepared.relations); + } + }); + + return { tables: Array.from(new Set(tables)), relations }; +}; + +export const prepareFromExports = (exports: Record) => { + const tables: AnySingleStoreTable[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, SingleStoreTable)) { + tables.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { tables, relations }; +}; diff --git a/drizzle-kit/src/dialects/singlestore/serializer.ts b/drizzle-kit/src/dialects/singlestore/serializer.ts new file mode 100644 index 0000000000..114138dc5a --- /dev/null +++ b/drizzle-kit/src/dialects/singlestore/serializer.ts @@ -0,0 +1,78 @@ +import type { CasingType } from '../../cli/validations/common'; +import { prepareFilenames } from '../../utils/utils-node'; +import { createDDL, interimToDDL, type MysqlDDL } from '../mysql/ddl'; +import { drySnapshot, type SingleStoreSnapshot, snapshotValidator } from '../singlestore/snapshot'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; + +export const prepareSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: MysqlDDL; + ddlCur: MysqlDDL; + snapshot: SingleStoreSnapshot; + snapshotPrev: SingleStoreSnapshot; + custom: SingleStoreSnapshot; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + const res = await prepareFromSchemaFiles(filenames); + + const interim = fromDrizzleSchema( + res.tables, + casing, + ); + + // TODO: errors + // if (warnings.length > 0) { + // console.log(warnings.map((it) => schemaWarning(it)).join('\n\n')); + // } + + // if (errors.length > 0) { + // console.log(errors.map((it) => schemaError(it)).join('\n')); + // process.exit(1); + // } + + const { ddl: ddlCur, errors: _errors2 } = interimToDDL(interim); + + // TODO: handle errors + // if (errors2.length > 0) { + // console.log(errors2.map((it) => schemaError(it)).join('\n')); + // process.exit(1); + // } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '2', + dialect: 'singlestore', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies SingleStoreSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: SingleStoreSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom }; +}; diff --git a/drizzle-kit/src/dialects/singlestore/snapshot.ts b/drizzle-kit/src/dialects/singlestore/snapshot.ts new file mode 100644 index 0000000000..2c2bbf3c54 --- /dev/null +++ b/drizzle-kit/src/dialects/singlestore/snapshot.ts @@ -0,0 +1,174 @@ +import { randomUUID } from 'crypto'; +import { any, array as zArray, boolean, enum as enumType, literal, object, record, string, type TypeOf } from 'zod'; +import { originUUID } from '../../utils'; +import type { MysqlDDL, MysqlEntity } from '../mysql/ddl'; +import { createDDL } from '../mysql/ddl'; +import { array, validator } from '../simpleValidator'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + isUnique: boolean(), + using: enumType(['btree', 'hash']).optional(), + algorithm: enumType(['default', 'inplace', 'copy']).optional(), + lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), // compatibility with postgres schema? + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: any().optional(), + onUpdate: any().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const compositePK = object({ + name: string(), + columns: string().array(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), +}).strict(); + +// const viewMeta = object({ +// algorithm: enumType(['undefined', 'merge', 'temptable']), +// sqlSecurity: enumType(['definer', 'invoker']), +// withCheckOption: enumType(['local', 'cascaded']).optional(), +// }).strict(); + +/* export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict().merge(viewMeta); +type SquasherViewMeta = Omit, 'definer'>; */ + +export const kitInternals = object({ + tables: record( + string(), + object({ + columns: record( + string(), + object({ isDefaultAnExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), + indexes: record( + string(), + object({ + columns: record( + string(), + object({ isExpression: boolean().optional() }).optional(), + ), + }).optional(), + ).optional(), +}).optional(); + +// use main dialect +const dialect = literal('singlestore'); + +const schemaHashV1 = object({ + id: string(), + prevId: string(), +}); + +const schemaHash = object({ + id: string(), + prevIds: zArray(string()), +}); + +export const schemaInternal = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), table), + /* views: record(string(), view).default({}), */ + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: kitInternals, +}).strict(); + +export const schemaV1 = schemaInternal.merge(schemaHashV1); +export const schema = schemaInternal.merge(schemaHash); + +const tableSquashed = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), +}).strict(); + +/* const viewSquashed = view.omit({ + algorithm: true, + sqlSecurity: true, + withCheckOption: true, +}).extend({ meta: string() }); */ + +export const schemaSquashed = object({ + version: literal('1'), + dialect: dialect, + tables: record(string(), tableSquashed), + /* views: record(string(), viewSquashed), */ +}).strict(); + +export type Dialect = TypeOf; +export type Column = TypeOf; +export type Table = TypeOf; +export type SingleStoreSchema = TypeOf; +export type SingleStoreSchemaInternal = TypeOf; +export type SingleStoreKitInternals = TypeOf; +export type SingleStoreSchemaSquashed = TypeOf; +export type Index = TypeOf; +export type PrimaryKey = TypeOf; +export type UniqueConstraint = TypeOf; + +export type SchemaV1 = TypeOf; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['2'], + dialect: ['singlestore'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => ddl.entities.validate(it)), + renames: array((_) => true), +}); + +export type SingleStoreSnapshot = typeof snapshotValidator.shape; + +export const toJsonSnapshot = (ddl: MysqlDDL, prevIds: string[], renames: string[]): SingleStoreSnapshot => { + return { dialect: 'singlestore', id: randomUUID(), prevIds, version: '2', ddl: ddl.entities.list(), renames }; +}; + +export const drySnapshot = snapshotValidator.strict( + { + version: '2', + dialect: 'singlestore', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], + } satisfies SingleStoreSnapshot, +); diff --git a/drizzle-kit/src/dialects/sqlite/convertor.ts b/drizzle-kit/src/dialects/sqlite/convertor.ts new file mode 100644 index 0000000000..2ac0775404 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/convertor.ts @@ -0,0 +1,265 @@ +import type { Simplify } from '../../utils'; +import type { JsonStatement } from './statements'; + +export const convertor = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + convertor: (statement: Simplify>) => string | string[], +) => { + return { + type, + can: (st: JsonStatement) => { + return st.type === type; + }, + convert: convertor, + }; +}; + +const createTable = convertor('create_table', (st) => { + const { + name: tableName, + columns, + fks: referenceData, + pk, + uniques: uniqueConstraints, + checks: checkConstraints, + } = st.table; + + let statement = ''; + statement += `CREATE TABLE \`${tableName}\` (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + /* + https://www.sqlite.org/lang_createtable.html#the_generated_always_as_clause + + According to the SQL standard, PRIMARY KEY should always imply NOT NULL. + Unfortunately, due to a bug in some early versions, this is not the case in SQLite. + Unless the column is an INTEGER PRIMARY KEY or the table is a WITHOUT ROWID table + or a STRICT table or the column is declared NOT NULL, + SQLite allows NULL values in a PRIMARY KEY column. + SQLite could be fixed to conform to the standard, but doing so + might break legacy applications. Hence, it has been decided to merely document the fact + that SQLite allows NULLs in most PRIMARY KEY columns. + */ + const isColumnPk = pk && pk.columns.length === 1 && pk.columns[0] === column.name && pk.table === column.table; + const omitNotNull = isColumnPk && column.type.toLowerCase().startsWith('int'); + + const primaryKeyStatement = isColumnPk && !pk.nameExplicit + ? ' PRIMARY KEY' + : ''; + const notNullStatement = column.notNull && !omitNotNull ? ' NOT NULL' : ''; + + const unique = uniqueConstraints.find((u) => + u.columns.length === 1 && u.columns[0] === column.name && u.table === column.table + ); + const unqiueConstraintPrefix = unique + ? unique.nameExplicit ? ` CONSTRAINT \`${unique.name}\` UNIQUE` : ' UNIQUE' + : ''; + + // in SQLite we escape single quote by doubling it, `'`->`''`, but we don't do it here + // because it is handled by drizzle orm serialization or on drizzle studio side + const defaultStatement = column.default ? ` DEFAULT ${column.default ?? ''}` : ''; + + const autoincrementStatement = column.autoincrement ? ' AUTOINCREMENT' : ''; + + const generatedStatement = column.generated + ? ` GENERATED ALWAYS AS ${column.generated.as} ${column.generated.type.toUpperCase()}` + : ''; + + statement += '\t'; + statement += + `\`${column.name}\` ${column.type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}${unqiueConstraintPrefix}`; + + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (pk && (pk.columns.length > 1 || pk.nameExplicit)) { + statement += ',\n\t'; + statement += `CONSTRAINT \`${pk.name}\` PRIMARY KEY(${pk.columns.map((it) => `\`${it}\``).join(', ')})`; + } + + for (let i = 0; i < referenceData.length; i++) { + const { + name, + tableTo, + columns, + columnsTo, + onDelete, + onUpdate, + } = referenceData[i]; + + const onDeleteStatement = onDelete !== 'NO ACTION' ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate !== 'NO ACTION' ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columns.map((it) => `\`${it}\``).join(','); + const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); + + statement += ','; + statement += '\n\t'; + statement += + `CONSTRAINT \`${name}\` FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onUpdateStatement}${onDeleteStatement}`; + } + + for (const uniqueConstraint of uniqueConstraints.filter((u) => u.columns.length > 1)) { + statement += ',\n'; + statement += `\tCONSTRAINT \`${uniqueConstraint.name}\` UNIQUE(\`${uniqueConstraint.columns.join(`\`,\``)}\`)`; + } + + if ( + typeof checkConstraints !== 'undefined' + && checkConstraints.length > 0 + ) { + for (const check of checkConstraints) { + statement += ',\n'; + statement += `\tCONSTRAINT "${check.name}" CHECK(${check.value})`; + } + } + + statement += `\n`; + statement += `);`; + statement += `\n`; + return statement; +}); + +const dropTable = convertor('drop_table', (st) => { + return `DROP TABLE \`${st.tableName}\`;`; +}); + +const renameTable = convertor('rename_table', (st) => { + return `ALTER TABLE \`${st.from}\` RENAME TO \`${st.to}\`;`; +}); + +const createView = convertor('create_view', (st) => { + const { definition, name } = st.view; + return `CREATE VIEW \`${name}\` AS ${definition};`; +}); + +const dropView = convertor('drop_view', (st) => { + return `DROP VIEW \`${st.view.name}\`;`; +}); + +const alterTableAddColumn = convertor('add_column', (st) => { + const { fk, column } = st; + const { table: tableName, name, type, notNull, generated } = st.column; + + const defaultStatement = column.default !== null ? ` DEFAULT ${column.default ?? ''}` : ''; + + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + + const referenceStatement = `${ + fk + ? !fk.nameExplicit + ? ` REFERENCES ${fk.tableTo}(${fk.columnsTo})` + : ` CONSTRAINT \`${fk.name}\` REFERENCES ${fk.tableTo}(${fk.columnsTo})` + : '' + }`; + + const generatedStatement = generated + ? ` GENERATED ALWAYS AS ${generated.as} ${generated.type.toUpperCase()}` + : ''; + + return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${defaultStatement}${generatedStatement}${notNullStatement}${referenceStatement};`; +}); + +const alterTableRenameColumn = convertor('rename_column', (st) => { + return `ALTER TABLE \`${st.table}\` RENAME COLUMN \`${st.from}\` TO \`${st.to}\`;`; +}); + +const alterTableDropColumn = convertor('drop_column', (st) => { + return `ALTER TABLE \`${st.column.table}\` DROP COLUMN \`${st.column.name}\`;`; +}); + +const alterTableRecreateColumn = convertor('recreate_column', (st) => { + const drop = alterTableDropColumn.convert(st) as string; + const add = alterTableAddColumn.convert(st) as string; + + return [drop, add]; +}); + +const createIndex = convertor('create_index', (st) => { + const { columns, isUnique, where, name, table } = st.index; + + const idx = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const onStatement = columns.map((it) => it.isExpression ? it.value : `\`${it.value}\``).join(','); + const whereStatement = where ? ` WHERE ${where}` : ''; + + return `CREATE ${idx} \`${name}\` ON \`${table}\` (${onStatement})${whereStatement};`; +}); + +const dropIndex = convertor('drop_index', (st) => { + return `DROP INDEX IF EXISTS \`${st.index.name}\`;`; +}); + +const recreateTable = convertor('recreate_table', (st) => { + const { name } = st.to; + const { columns: columnsFrom } = st.from; + + const columnNames = columnsFrom.filter((it) => { + const newColumn = st.to.columns.find((col) => col.name === it.name); + return !it.generated && newColumn && !newColumn.generated; + }).map((it) => `\`${it.name}\``).join(', '); + const newTableName = `__new_${name}`; + + const sqlStatements: string[] = []; + + sqlStatements.push(`PRAGMA foreign_keys=OFF;`); + + const tmpTable = { + ...st.to, + name: newTableName, + checks: st.to.checks.map((it) => ({ ...it, table: newTableName })), + }; + sqlStatements.push(createTable.convert({ table: tmpTable }) as string); + + sqlStatements.push( + `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${st.to.name}\`;`, + ); + sqlStatements.push(dropTable.convert({ tableName: name }) as string); + sqlStatements.push(renameTable.convert({ from: newTableName, to: name }) as string); + + sqlStatements.push(`PRAGMA foreign_keys=ON;`); + + return sqlStatements; +}); + +const convertors = [ + createTable, + dropTable, + renameTable, + createView, + dropView, + alterTableAddColumn, + alterTableRenameColumn, + alterTableDropColumn, + alterTableRecreateColumn, + createIndex, + dropIndex, + recreateTable, +]; + +export function fromJson(statements: JsonStatement[]) { + const grouped = statements + .map((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + if (!convertor) { + return null; + } + + const sqlStatements = convertor.convert(statement as any); + const statements = typeof sqlStatements === 'string' ? [sqlStatements] : sqlStatements; + return { jsonStatement: statement, sqlStatements: statements }; + }) + .filter((it) => it !== null); + + const result = { + sqlStatements: grouped.map((it) => it.sqlStatements).flat(), + groupedStatements: grouped, + }; + return result; +} diff --git a/drizzle-kit/src/dialects/sqlite/ddl.ts b/drizzle-kit/src/dialects/sqlite/ddl.ts new file mode 100644 index 0000000000..8ea1e76524 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/ddl.ts @@ -0,0 +1,303 @@ +import type { SchemaForPull } from 'src/cli/commands/pull-common'; +import { create } from '../dialect'; +import { nameForPk, nameForUnique } from './grammar'; + +export const createDDL = () => { + return create({ + tables: {}, + columns: { + table: 'required', + type: 'string', + notNull: 'boolean', + autoincrement: 'boolean?', + default: 'string?', + generated: { + type: ['stored', 'virtual'], + as: 'string', + }, + }, + indexes: { + table: 'required', + columns: [{ + value: 'string', + isExpression: 'boolean', + }], + isUnique: 'boolean', + where: 'string?', + origin: [ + 'manual', // ='c' CREATE INDEX + 'auto', // ='u' UNIQUE auto created + ], // https://www.sqlite.org/pragma.html#pragma_index_list + }, + fks: { + table: 'required', + columns: 'string[]', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: 'string', + onDelete: 'string', + nameExplicit: 'boolean', + }, + pks: { + table: 'required', + columns: 'string[]', + nameExplicit: 'boolean', + }, + uniques: { + table: 'required', + columns: 'string[]', + nameExplicit: 'boolean', + }, + checks: { + table: 'required', + value: 'string', + }, + views: { + definition: 'string?', + isExisting: 'boolean', + error: 'string?', + }, + }); +}; + +export type SQLiteDDL = ReturnType; + +export type SqliteEntities = SQLiteDDL['_']['types']; +export type SqliteEntity = SqliteEntities[keyof SqliteEntities]; +export type SqliteDefinition = SQLiteDDL['_']['definition']; +export type SqliteDiffEntities = SQLiteDDL['_']['diffs']; + +export type DiffColumn = SqliteDiffEntities['alter']['columns']; + +export type Table = SqliteEntities['tables']; +export type Column = SqliteEntities['columns']; +export type CheckConstraint = SqliteEntities['checks']; +export type Index = SqliteEntities['indexes']; +export type IndexColumn = Index['columns'][number]; +export type ForeignKey = SqliteEntities['fks']; +export type PrimaryKey = SqliteEntities['pks']; +export type UniqueConstraint = SqliteEntities['uniques']; +export type View = SqliteEntities['views']; +export type ViewColumn = { view: string; name: string; type: string; notNull: boolean }; + +export type TableFull = { + name: string; + columns: Column[]; + indexes: Index[]; + checks: CheckConstraint[]; + uniques: UniqueConstraint[]; + pk: PrimaryKey | null; + fks: ForeignKey[]; +}; + +export const tableFromDDL = (name: string, ddl: SQLiteDDL): TableFull => { + const filter = { table: name } as const; + const columns = ddl.columns.list(filter); + const pk = ddl.pks.one(filter); + const fks = ddl.fks.list(filter); + const uniques = ddl.uniques.list(filter); + const checks = ddl.checks.list(filter); + const indexes = ddl.indexes.list(filter); + return { + name, + columns, + pk, + fks, + uniques, + checks, + indexes, + }; +}; + +export type ConflictTable = { + type: 'conflict_table'; + table: string; +}; + +export type TableNoColumns = { + type: 'table_no_columns'; + table: string; +}; + +export type ConflictView = { + type: 'conflict_view'; + view: string; +}; + +export type ConflictColumn = { + type: 'conflict_column'; + table: string; + column: string; +}; +export type ConflictIndex = { + type: 'conflict_index'; + name: string; +}; + +export type ConflictFK = { + type: 'conflict_fk'; + name: string; +}; +export type ConflictPK = { + type: 'conflict_pk'; + name: string; +}; +export type ConflictUnique = { + type: 'conflict_unique'; + name: string; +}; + +export type ConflictCheck = { + type: 'conflict_check'; + name: string; +}; + +export type SchemaError = + | ConflictTable + | ConflictView + | ConflictColumn + | ConflictPK + | ConflictFK + | ConflictUnique + | ConflictCheck + | ConflictIndex + | TableNoColumns; + +const count = (arr: T[], predicate: (it: T) => boolean) => { + let count = 0; + for (const it of arr) { + if (predicate(it)) count += 1; + } + return count; +}; + +export type InterimColumn = Column & { + pk: boolean; + pkName: string | null; +} & { isUnique: boolean; uniqueName: string | null }; +export type InterimSchema = { + tables: Table[]; + columns: InterimColumn[]; + indexes: Index[]; + checks: CheckConstraint[]; + uniques: UniqueConstraint[]; + pks: PrimaryKey[]; + fks: ForeignKey[]; + views: View[]; +}; + +export const interimToDDL = (schema: InterimSchema): { ddl: SQLiteDDL; errors: SchemaError[] } => { + const ddl = createDDL(); + const errors: SchemaError[] = []; + + for (const table of schema.tables) { + if (count(schema.columns, (it) => it.table === table.name) === 0) { + errors.push({ type: 'table_no_columns', table: table.name }); + continue; + } + const res = ddl.tables.push(table); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_table', table: res.data.name }); + } + } + + for (const column of schema.columns) { + const { isUnique: _1, uniqueName: _2, pk: _3, pkName: _4, ...rest } = column; + const res = ddl.columns.push(rest); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_column', table: column.table, column: column.name }); + } + } + + for (const fk of schema.fks) { + const res = ddl.fks.push(fk); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_fk', name: fk.name }); + } + } + for (const pk of schema.pks) { + const res = ddl.pks.push(pk); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_pk', name: pk.name }); + } + } + + for (const column of schema.columns.filter((it) => it.pk)) { + const name = column.pkName !== null ? column.pkName : nameForPk(column.table); + const exists = ddl.pks.one({ table: column.table }) !== null; + if (exists) continue; + + ddl.pks.push({ + table: column.table, + name, + nameExplicit: column.pkName !== null, + columns: [column.name], + }); + } + + for (const index of schema.indexes) { + const { status } = ddl.indexes.push(index, ['name']); // indexes have to have unique names across all schema + if (status === 'CONFLICT') { + errors.push({ type: 'conflict_index', name: index.name }); + } + } + + for (const unique of schema.uniques) { + const res = ddl.uniques.push(unique); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_unique', name: unique.name }); + } + } + + for (const it of schema.columns.filter((it) => it.isUnique)) { + const u = { + entityType: 'uniques', + name: it.uniqueName ?? nameForUnique(it.table, [it.name]), + columns: [it.name], + table: it.table, + nameExplicit: !!it.uniqueName, + } satisfies UniqueConstraint; + + const res = ddl.uniques.push(u); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_unique', name: u.name }); + } + } + + for (const check of schema.checks) { + const res = ddl.checks.push(check); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_check', name: res.data.name }); + } + } + + for (const view of schema.views) { + const res = ddl.views.push(view); + if (res.status === 'CONFLICT') { + errors.push({ type: 'conflict_view', view: view.name }); + } + } + + return { ddl, errors }; +}; + +export function sqliteToRelationsPull(schema: SQLiteDDL): SchemaForPull { + return Object.values(schema.tables.list()).map((table) => { + const rawTable = tableFromDDL(table.name, schema); + return { + foreignKeys: rawTable.fks, + uniques: [ + ...Object.values(rawTable.uniques).map((unq) => ({ + columns: unq.columns, + })), + ...Object.values(rawTable.indexes).map((idx) => ({ + columns: idx.columns.map((idxc) => { + if (!idxc.isExpression && idx.isUnique) { + return idxc.value; + } + }).filter((item) => item !== undefined), + })), + ], + }; + }); +} diff --git a/drizzle-kit/src/dialects/sqlite/diff.ts b/drizzle-kit/src/dialects/sqlite/diff.ts new file mode 100644 index 0000000000..897f29cd55 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/diff.ts @@ -0,0 +1,431 @@ +import { mockResolver } from 'src/utils/mocks'; +import { prepareMigrationRenames } from '../../utils'; +import type { Resolver } from '../common'; +import { diff } from '../dialect'; +import { groupDiffs, preserveEntityNames } from '../utils'; +import { fromJson } from './convertor'; +import type { Column, IndexColumn, SQLiteDDL, SqliteEntities } from './ddl'; +import { tableFromDDL } from './ddl'; +import type { JsonCreateViewStatement, JsonDropViewStatement, JsonStatement } from './statements'; +import { prepareAddColumns, prepareRecreateColumn, prepareStatement } from './statements'; + +export const ddlDiffDry = async (left: SQLiteDDL, right: SQLiteDDL, mode: 'push' | 'default') => { + const empty = new Set(); + return ddlDiff(left, right, mockResolver(empty), mockResolver(empty), mode); +}; + +export const ddlDiff = async ( + ddl1: SQLiteDDL, + ddl2: SQLiteDDL, + tablesResolver: Resolver, + columnsResolver: Resolver, + mode: 'push' | 'default', +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + groupedStatements: { + jsonStatement: JsonStatement; + sqlStatements: string[]; + }[]; + renames: string[]; + warnings: string[]; +}> => { + const tablesDiff = diff(ddl1, ddl2, 'tables'); + + const { + created: createdTables, + deleted: deletedTables, + renamedOrMoved: renamedTables, + } = await tablesResolver({ + created: tablesDiff.filter((it) => it.$diffType === 'create'), + deleted: tablesDiff.filter((it) => it.$diffType === 'drop'), + }); + + for (const renamed of renamedTables) { + ddl1.tables.update({ + set: { + name: renamed.to.name, + }, + where: { + name: renamed.from.name, + }, + }); + + ddl1.fks.update({ + set: { + tableTo: renamed.to.name, + }, + where: { + tableTo: renamed.from.name, + }, + }); + ddl2.fks.update({ + set: { + tableTo: renamed.to.name, + }, + where: { + tableTo: renamed.from.name, + }, + }); + ddl1.fks.update({ + set: { + table: renamed.to.name, + }, + where: { + table: renamed.from.name, + }, + }); + + ddl1.entities.update({ + set: { + table: renamed.to.name, + }, + where: { + table: renamed.from.name, + }, + }); + } + + const columnsDiff = diff(ddl1, ddl2, 'columns').filter((it) => + !createdTables.some((table) => table.name === it.table) + ); // filter out columns for newly created tables + + const groupedByTable = groupDiffs(columnsDiff); + + const columnRenames = [] as { from: Column; to: Column }[]; + const columnsToCreate = [] as Column[]; + const columnsToDelete = [] as Column[]; + + for (let it of groupedByTable) { + const { renamedOrMoved: renamed, created, deleted } = await columnsResolver({ + deleted: it.deleted, + created: it.inserted, + }); + + columnsToCreate.push(...created); + columnsToDelete.push(...deleted); + columnRenames.push(...renamed); + } + + for (const rename of columnRenames) { + ddl1.columns.update({ + set: { + name: rename.to.name, + }, + where: { + table: rename.from.table, + name: rename.from.name, + }, + }); + + // DDL2 updates are needed for Drizzle Studio + const update1 = { + set: { + columns: (it: IndexColumn) => { + if (!it.isExpression && it.value === rename.from.name) { + it.value = rename.to.name; + } + return it; + }, + }, + where: { + table: rename.from.table, + }, + } as const; + + ddl1.indexes.update(update1); + ddl2.indexes.update(update1); + + const update2 = { + set: { + columns: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + table: rename.from.table, + }, + } as const; + ddl1.fks.update(update2); + ddl2.fks.update(update2); + + const update3 = { + set: { + columnsTo: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + tableTo: rename.from.table, + }, + } as const; + ddl1.fks.update(update3); + ddl2.fks.update(update3); + + const update4 = { + set: { + columns: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + table: rename.from.table, + }, + }; + ddl1.pks.update(update4); + ddl2.pks.update(update4); + + const update5 = { + set: { + columns: (it: string) => it === rename.from.name ? rename.to.name : it, + }, + where: { + table: rename.from.table, + }, + }; + ddl1.uniques.update(update5); + ddl2.uniques.update(update5); + + const update6 = { + set: { + value: rename.to.name, + }, + where: { + table: rename.from.table, + value: rename.from.name, + }, + } as const; + ddl1.checks.update(update6); + ddl2.checks.update(update6); + } + + const createdFilteredColumns = columnsToCreate.filter((it) => !it.generated || it.generated.type === 'virtual'); + + preserveEntityNames(ddl1.uniques, ddl2.uniques, mode); + preserveEntityNames(ddl1.pks, ddl2.pks, mode); + preserveEntityNames(ddl1.fks, ddl2.fks, mode); + + const pksDiff = diff(ddl1, ddl2, 'pks').filter((it) => !deletedTables.some((table) => table.name === it.table)); + const uniquesDiff = diff(ddl1, ddl2, 'uniques').filter((it) => + !deletedTables.some((table) => table.name === it.table) + ); + const indexesDiff = diff(ddl1, ddl2, 'indexes'); + const checksDiff = diff(ddl1, ddl2, 'checks'); + const fksDiff = diff(ddl1, ddl2, 'fks') + // it is possible to `ADD COLUMN t integer REFERENCE ...` + .filter((it) => + it.columns.length > 0 + && !createdFilteredColumns.some((column) => column.table === it.table && column.name === it.columns[0]) + ) + // filter deleted tables + .filter((it) => !deletedTables.some((table) => table.name === it.table)); + + const indexesByTable = groupDiffs(indexesDiff); + + // ignore created/dropped views with isExisting, we can't rename views in SQLite + const viewsDiff = diff(ddl1, ddl2, 'views').filter((it) => !it.isExisting); + + const createdViews = viewsDiff.filter((it) => it.$diffType === 'create'); + const deletedViews = viewsDiff.filter((it) => it.$diffType === 'drop'); + + const updates = diff.alters(ddl1, ddl2); + + const uniquesAlters = updates.filter((it) => it.entityType === 'uniques').filter((it) => { + if (it.nameExplicit) { + delete it.nameExplicit; + } + + return ddl2.uniques.hasDiff(it); + }); + + const pksAlters = updates.filter((it) => it.entityType === 'pks').filter((it) => { + if (it.nameExplicit) { + delete it.nameExplicit; + } + + return ddl2.pks.hasDiff(it); + }); + + const fksAlters = updates.filter((it) => it.entityType === 'fks').filter((it) => { + if (it.nameExplicit) { + delete it.nameExplicit; + } + + return ddl2.fks.hasDiff(it); + }); + + const checksAlters = updates.filter((it) => it.entityType === 'checks'); + + const alteredColumnsBecameGenerated = updates.filter((it) => it.entityType === 'columns').filter((it) => + it.generated?.to?.type === 'stored' + ); + const newStoredColumns = columnsToCreate.filter((it) => it.generated && it.generated.type === 'stored'); + + const setOfTablesToRecereate = new Set( + [ + ...checksDiff, + ...uniquesDiff, + ...pksDiff, + ...fksDiff, + ...indexesDiff.filter((it) => it.isUnique && it.origin === 'auto'), // we can't drop/create auto generated unique indexes;, + ...alteredColumnsBecameGenerated, // "It is not possible to ALTER TABLE ADD COLUMN a STORED column. https://www.sqlite.org/gencol.html" + ...newStoredColumns, // "It is not possible to ALTER TABLE ADD COLUMN a STORED column. https://www.sqlite.org/gencol.html" + ].map((it) => it.table), + ); + + for (const it of createdTables) { + setOfTablesToRecereate.delete(it.name); + } + for (const it of deletedTables) { + setOfTablesToRecereate.delete(it.name); + } + + for (const it of updates) { + if ( + it.entityType === 'columns' + && (it.type || it.default || it.notNull || it.autoincrement) + ) { + setOfTablesToRecereate.add(it.table); + } + if (pksAlters.length > 0 && it.entityType === 'pks') setOfTablesToRecereate.add(it.table); + if (fksAlters.length > 0 && it.entityType === 'fks') setOfTablesToRecereate.add(it.table); + if (uniquesAlters.length > 0 && it.entityType === 'uniques') setOfTablesToRecereate.add(it.table); + if (checksAlters.length > 0 && it.entityType === 'checks') setOfTablesToRecereate.add(it.table); + } + + const tablesToRecreate = Array.from(setOfTablesToRecereate); + + // TODO: handle + // const viewsToRecreateBecauseOfTables = tablesToRecreate.map((it) => { + // return ddl2.views.one({}); + // }); + + const jsonRecreateTables = tablesToRecreate.map((it) => { + return prepareStatement('recreate_table', { + to: tableFromDDL(it, ddl2), + from: tableFromDDL(it, ddl1), + alteredColumnsBecameGenerated: alteredColumnsBecameGenerated.filter((acbg) => acbg.table === it), + newStoredColumns: newStoredColumns.filter((column) => column.table === it), + checkDiffs: checksDiff.filter((checkDiff) => checkDiff.table === it), + checksAlters: checksAlters.filter((checkAlter) => checkAlter.table === it), + columnAlters: updates.filter((it) => it.entityType === 'columns').filter((column) => column.table === it), + fksAlters: fksAlters.filter((fkAlters) => fkAlters.table === it), + fksDiff: fksDiff.filter((fkDiff) => fkDiff.table === it), + indexesDiff: indexesDiff.filter((indexDiff) => indexDiff.table === it), + pksAlters: pksAlters.filter((pkAlters) => pkAlters.table === it), + pksDiff: pksDiff.filter((pkDiff) => pkDiff.table === it), + uniquesAlters: uniquesAlters.filter((uniqueAlters) => uniqueAlters.table === it), + uniquesDiff: uniquesDiff.filter((uniqueDiff) => uniqueDiff.table === it), + }); + }); + + const jsonTableAlternations = updates.filter((it) => it.entityType === 'columns') + .filter( + (it) => !setOfTablesToRecereate.has(it.table), + ).map((it) => + prepareRecreateColumn( + it, + ddl2.columns.one({ table: it.table, name: it.name })!, + ddl2.fks.one({ table: it.table }), + ) + ); + + const jsonCreateTables = createdTables.map((it) => { + return prepareStatement('create_table', { table: tableFromDDL(it.name, ddl2) }); + }); + + // create indexes for created and recreated tables too + const jsonCreateIndexes = [...jsonRecreateTables] + .map((it) => it.to.indexes) + .concat(indexesByTable.filter((it) => !setOfTablesToRecereate.has(it.table)).map((it) => it.inserted)) + .map((it) => it.map((index) => prepareStatement('create_index', { index }))) + .flat(); + + const jsonDropIndexes = indexesByTable.map((it) => + it.deleted.map((index) => prepareStatement('drop_index', { index })) + ).flat(); + const jsonDropTables = deletedTables.map((it) => prepareStatement('drop_table', { tableName: it.name })); + const jsonRenameTables = renamedTables.map((it) => + prepareStatement('rename_table', { from: it.from.name, to: it.to.name }) + ); + + const jsonRenameColumnsStatements = columnRenames.map((it) => + prepareStatement('rename_column', { table: it.from.table, from: it.from.name, to: it.to.name }) + ); + + // we need to add column for table, which is going to be recreated to match columns during recreation + const columnDeletes = columnsToDelete.filter((it) => !setOfTablesToRecereate.has(it.table)); + + const jsonDropColumnsStatemets = columnDeletes.filter((x) => { + return !jsonDropTables.some((t) => t.tableName === x.table); + }).map((it) => prepareStatement('drop_column', { column: it })); + + const warnings: string[] = []; + for (const _ of newStoredColumns) { + warnings.push( + `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, + ); + } + + const groupedNewColumns = Object.values(createdFilteredColumns.reduce((acc, prev) => { + const entry = prev.table in acc ? acc[prev.table] : { table: prev.table, columns: [] }; + acc[prev.table] = entry; + entry.columns.push(prev); + return acc; + }, {} as Record)); + + const jsonAddColumnsStatemets = groupedNewColumns + .map((it) => prepareAddColumns(it.columns, ddl2.fks.list({ table: it.table }))) + .flat(); + + const createViews: JsonCreateViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + + createViews.push(...createdViews.map((it) => prepareStatement('create_view', { view: it }))); + dropViews.push(...deletedViews.map((it) => prepareStatement('drop_view', { view: it }))); + + for (const view of updates.filter((it) => it.entityType === 'views')) { + if (view.isExisting || (view.definition && mode !== 'push')) { + const entity = ddl2.views.one({ name: view.name })!; + dropViews.push(prepareStatement('drop_view', { view: entity })); + createViews.push(prepareStatement('create_view', { view: entity })); + } + } + + // TODO: + // [x] create table with unique column + // [ ] create table with unique column unique index (will create 2 indexes) + // [ ] create table with non-unique column and unique index + // [x] drop 'c' unique index ok + // [x] drop 'u' unique index ok, recreate table + // [x] drizzle generate does not have 'u' unique indexes and should not create ones never + // [ ] drizzle push should respect 'u' indexes(commutativity), never auto create indexes from 'unique' of a column + + const jsonStatements: JsonStatement[] = []; + jsonStatements.push(...jsonCreateTables); + jsonStatements.push(...jsonRenameTables); // rename tables before tables recreate + jsonStatements.push(...jsonRenameColumnsStatements); // rename columns before tables recreate + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonTableAlternations); + + jsonStatements.push(...jsonRecreateTables); + jsonStatements.push(...jsonDropIndexes); + jsonStatements.push(...jsonCreateIndexes); + + jsonStatements.push(...jsonDropTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + jsonStatements.push(...dropViews); + jsonStatements.push(...createViews); + + const { sqlStatements, groupedStatements } = fromJson(jsonStatements); + + const renames = prepareMigrationRenames([ + ...renamedTables, + ...columnRenames, + ]); + + return { + statements: jsonStatements, + sqlStatements, + groupedStatements, + renames, + warnings, + }; +}; diff --git a/drizzle-kit/src/dialects/sqlite/drizzle.ts b/drizzle-kit/src/dialects/sqlite/drizzle.ts new file mode 100644 index 0000000000..a60ff3da63 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/drizzle.ts @@ -0,0 +1,267 @@ +import { getTableName, is, SQL } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { AnySQLiteColumn, AnySQLiteTable } from 'drizzle-orm/sqlite-core'; +import { + getTableConfig, + getViewConfig, + SQLiteBaseInteger, + SQLiteSyncDialect, + SQLiteTable, + SQLiteTimestamp, + SQLiteView, +} from 'drizzle-orm/sqlite-core'; +import { safeRegister } from 'src/utils/utils-node'; +import type { CasingType } from '../../cli/validations/common'; +import { getColumnCasing, sqlToStr } from '../drizzle'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + InterimColumn, + InterimSchema, + PrimaryKey, + Table, + UniqueConstraint, + View, +} from './ddl'; +import { Int, nameForForeignKey, nameForPk, nameForUnique, typeFor } from './grammar'; + +export const fromDrizzleSchema = ( + dTables: AnySQLiteTable[], + dViews: SQLiteView[], + casing: CasingType | undefined, +): InterimSchema => { + const dialect = new SQLiteSyncDialect({ casing }); + const tableConfigs = dTables.map((it) => ({ table: it, config: getTableConfig(it) })); + const tables: Table[] = tableConfigs.map((it) => { + return { + entityType: 'tables', + name: it.config.name, + } satisfies Table; + }); + + const columns = tableConfigs.map((it) => { + return it.config.columns.map((column) => { + const name = getColumnCasing(column, casing); + const primaryKey: boolean = column.primary; + const generated = column.generated; + + const generatedObj: { + as: string; + type: 'virtual' | 'stored'; + } | null = generated + ? { + as: is(generated.as, SQL) + ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` + : typeof generated.as === 'function' + ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` + : `(${generated.as as any})`, + + // 'virtual' | 'stored' for for all dialects + // 'virtual' | 'persisted' for mssql + // We should remove this option from common Column and store it per dialect common + // Was discussed with Andrew + // Type error because of common in drizzle orm for all dialects (includes virtual' | 'stored' | 'persisted') + type: generated.mode === 'stored' ? 'stored' : 'virtual', + } + : null; + + const defalutValue = defaultFromColumn(column, casing); + + const hasUniqueIndex = Boolean(it.config.indexes.find((item) => { + const i = item.config; + const column = i.columns.length === 1 ? i.columns[0] : null; + return column && !is(column, SQL) && getColumnCasing(column, casing) === name; + })); + + return { + entityType: 'columns', + table: it.config.name, + name, + type: column.getSQLType(), + default: defalutValue, + notNull: column.notNull && !primaryKey, + pk: primaryKey, + pkName: null, + autoincrement: is(column, SQLiteBaseInteger) + ? column.autoIncrement + : false, + generated: generatedObj, + isUnique: !hasUniqueIndex && column.isUnique, + uniqueName: column.uniqueName ?? null, + } satisfies InterimColumn; + }); + }).flat(); + + const pks = tableConfigs.map((it) => { + return it.config.primaryKeys.map((pk) => { + const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); + return { + entityType: 'pks', + name: pk.name ?? nameForPk(getTableConfig(pk.table).name), + table: it.config.name, + columns: columnNames, + nameExplicit: pk.isNameExplicit, + } satisfies PrimaryKey; + }); + }).flat(); + + const fks = tableConfigs.map((it) => { + return it.config.foreignKeys.map((fk) => { + const tableFrom = it.config.name; + const onDelete = fk.onDelete ?? 'NO ACTION'; + const onUpdate = fk.onUpdate ?? 'NO ACTION'; + const reference = fk.reference(); + + const referenceFT = reference.foreignTable; + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); // TODO: casing? + + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + const name = fk.isNameExplicit() + ? fk.getName() + : nameForForeignKey({ table: tableFrom, columns: columnsFrom, tableTo, columnsTo }); + return { + entityType: 'fks', + table: it.config.name, + name, + tableTo, + columns: columnsFrom, + columnsTo, + onDelete, + onUpdate, + nameExplicit: fk.isNameExplicit(), + } satisfies ForeignKey; + }); + }).flat(); + + const indexes = tableConfigs.map((it) => { + return it.config.indexes.map((index) => { + const columns = index.config.columns; + const name = index.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + return { value: sql, isExpression: true }; + } + return { value: getColumnCasing(it, casing), isExpression: false }; + }); + + let where: string | undefined; + if (index.config.where !== undefined) { + if (is(index.config.where, SQL)) { + where = dialect.sqlToQuery(index.config.where).sql; + } + } + return { + entityType: 'indexes', + table: it.config.name, + name, + columns: indexColumns, + isUnique: index.config.unique ?? false, + where: where ?? null, + origin: 'manual', // created by user https://www.sqlite.org/pragma.html#pragma_index_list + } satisfies Index; + }); + }).flat(); + + const uniques = tableConfigs.map((it) => { + return it.config.uniqueConstraints.map((unique) => { + const columnNames = unique.columns.map((c) => getColumnCasing(c, casing)); + const name = unique.isNameExplicit ? unique.name : nameForUnique(it.config.name, columnNames); + return { + entityType: 'uniques', + table: it.config.name, + name: name, + columns: columnNames, + nameExplicit: unique.isNameExplicit, + } satisfies UniqueConstraint; + }); + }).flat(); + + const checks = tableConfigs.map((it) => { + return it.config.checks.map((check) => { + // TODO: dialect.sqlToQuery(check.value).sql returns "users"."age" > 21, as opposed to "age" > 21 for checks, which is wrong + const value = dialect.sqlToQuery(check.value, /* should fix */ 'indexes').sql.replace(`"${it.config.name}".`, ''); + return { + entityType: 'checks', + table: it.config.name, + name: check.name, + value: value, + } satisfies CheckConstraint; + }); + }).flat(); + + const views = dViews.map((it) => { + const { name: viewName, isExisting, query } = getViewConfig(it); + + return { + entityType: 'views', + name: viewName, + isExisting, + definition: isExisting ? null : dialect.sqlToQuery(query!).sql, + error: null, + } satisfies View; + }); + + return { tables, columns, indexes, uniques, fks, pks, checks, views }; +}; + +export const fromExports = (exports: Record) => { + const tables: AnySQLiteTable[] = []; + const views: SQLiteView[] = []; + const relations: Relations[] = []; + + const i0values = Object.values(exports); + i0values.forEach((t) => { + if (is(t, SQLiteTable)) { + tables.push(t); + } + + if (is(t, SQLiteView)) { + views.push(t); + } + + if (is(t, Relations)) { + relations.push(t); + } + }); + + return { tables, views, relations }; +}; + +export const prepareFromSchemaFiles = async (imports: string[]) => { + const tables: AnySQLiteTable[] = []; + const views: SQLiteView[] = []; + const relations: Relations[] = []; + + await safeRegister(async () => { + for (let i = 0; i < imports.length; i++) { + const it = imports[i]; + + const i0: Record = require(`${it}`); + const prepared = fromExports(i0); + + tables.push(...prepared.tables); + views.push(...prepared.views); + relations.push(...prepared.relations); + } + }); + + return { tables: Array.from(new Set(tables)), views, relations }; +}; + +export const defaultFromColumn = ( + column: AnySQLiteColumn, + casing: CasingType | undefined, +): Column['default'] => { + const def = column.default; + if (typeof def === 'undefined') return null; // '', 0, false, etc. + if (is(def, SQL)) return sqlToStr(def, casing); + if (is(column, SQLiteTimestamp)) return Int.defaultFromDrizzle(def, column.mode); + return typeFor(column.getSQLType()).defaultFromDrizzle(def); +}; diff --git a/drizzle-kit/src/dialects/sqlite/grammar.ts b/drizzle-kit/src/dialects/sqlite/grammar.ts new file mode 100644 index 0000000000..be45abf258 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/grammar.ts @@ -0,0 +1,573 @@ +import { trimChar } from '../../utils'; +import { parse, stringify } from '../../utils/when-json-met-bigint'; +import type { Column, ForeignKey } from './ddl'; +import type { Import } from './typescript'; + +const namedCheckPattern = /CONSTRAINT\s+["'`[]?(\w+)["'`\]]?\s+CHECK\s*\((.*)\)/gi; +const unnamedCheckPattern = /CHECK\s+\((.*)\)/gi; +const viewAsStatementRegex = new RegExp(`\\bAS\\b\\s+(WITH.+|SELECT.+)$`, 'is'); // 'i' for case-insensitive, 's' for dotall mode + +export const nameForForeignKey = (fk: Pick) => { + return `fk_${fk.table}_${fk.columns.join('_')}_${fk.tableTo}_${fk.columnsTo.join('_')}_fk`; +}; + +export const nameForUnique = (table: string, columns: string[]) => { + return `${table}_${columns.join('_')}_unique`; +}; + +export const nameForPk = (table: string) => { + return `${table}_pk`; +}; + +export interface SqlType { + is(type: string): boolean; + drizzleImport(): Import; + defaultFromDrizzle(value: unknown, mode?: MODE): Column['default']; + defaultFromIntrospect(value: string): Column['default']; + toTs(value: Column['default']): { def: string; options?: Record } | string; +} + +const intAffinities = [ + 'int', + 'integer', + 'tiniint', + 'smallint', + 'mediumint', + 'bigint', + 'unsigned big int', + 'int2', + 'int8', +]; + +export const Int: SqlType<'timestamp' | 'timestamp_ms'> = { + is(type) { + return intAffinities.indexOf(type.toLowerCase()) >= 0; + }, + drizzleImport: () => 'integer', + defaultFromDrizzle: (value, mode) => { + if (typeof value === 'boolean') { + return value ? '1' : '0'; + } + + if (typeof value === 'bigint') { + return `'${value.toString()}'`; + } + + if (value instanceof Date) { + const v = mode === 'timestamp' ? value.getTime() / 1000 : value.getTime(); + return v.toFixed(0); + } + + return String(value); + }, + defaultFromIntrospect: (value) => { + const it = trimChar(value, "'"); + const check = Number(it); + if (Number.isNaN(check)) return value; // unknown + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) return it; + return it; // bigint + }, + toTs: (value) => { + if (!value) return ''; + const check = Number(value); + + if (Number.isNaN(check)) return `sql\`${value}\``; // unknown + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) return value; + return `${value}n`; // bigint + }, +}; + +const realAffinities = [ + 'real', + 'double', + 'double precision', + 'float', +]; + +export const Real: SqlType = { + is: function(type: string): boolean { + return realAffinities.indexOf(type.toLowerCase()) >= 0; + }, + drizzleImport: function(): Import { + return 'real'; + }, + defaultFromDrizzle: function(value: unknown): Column['default'] { + return String(value); + }, + defaultFromIntrospect: function(value: string): Column['default'] { + return value; + }, + toTs: function(value: Column['default']): string { + return value ?? ''; + }, +}; + +const numericAffinities = [ + 'numeric', + 'decimal', + 'boolean', + 'date', + 'datetime', +]; +export const Numeric: SqlType = { + is: function(type: string): boolean { + const lowered = type.toLowerCase(); + + return numericAffinities.indexOf(lowered) >= 0 + || lowered.startsWith('numeric(') + || lowered.startsWith('decimal('); + }, + drizzleImport: function(): Import { + return 'numeric'; + }, + defaultFromDrizzle: function(value: unknown, _mode?: unknown): Column['default'] { + if (typeof value === 'string') return `'${value}'`; + if (typeof value === 'bigint') return `'${value.toString()}'`; + if (typeof value === 'number') return `${value.toString()}`; + throw new Error(`unexpected: ${value} ${typeof value}`); + }, + defaultFromIntrospect: function(value: string): Column['default'] { + return value; + }, + toTs: function(value: Column['default']) { + if (!value) return ''; + const check = Number(value); + + if (Number.isNaN(check)) return value; // unknown + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) { + return { def: value, options: { mode: 'number' } }; + } + return { def: `${value}n`, options: { mode: 'bigint' } }; // bigint + }, +}; + +const textAffinities = [ + 'text', + 'character', + 'varchar', + 'varying character', + 'nchar', + 'native character', + 'nvarchar', + 'clob', +]; + +export const Text: SqlType = { + is: function(type: string): boolean { + const lowered = type.toLowerCase(); + return textAffinities.indexOf(lowered) >= 0 + || lowered.startsWith('character(') + || lowered.startsWith('varchar(') + || lowered.startsWith('varying character(') + || lowered.startsWith('nchar(') + || lowered.startsWith('native character(') + || lowered.startsWith('nvarchar('); + }, + drizzleImport: function(): Import { + return 'text'; + }, + defaultFromDrizzle: function(value: unknown, _mode?: unknown): Column['default'] { + let result: string; + if (typeof value === 'string') result = value.replaceAll('\\', '\\\\').replaceAll("'", "''"); + else if (typeof value === 'object' || Array.isArray(value)) { + result = stringify(value, (_, value) => { + if (typeof value !== 'string') return value; + return value.replaceAll("'", "''"); + }); + } else { + throw new Error(`unexpected default: ${value}`); + } + return `'${result}'`; + }, + defaultFromIntrospect: function(value: string): Column['default'] { + return value; + }, + toTs: function(value: Column['default']) { + if (value === null) return ''; + if (!value.startsWith("'")) return `sql\`${value}\``; // CURRENT_TIMESTAMP + + try { + const parsed = parse(trimChar(value, "'"), (_, v) => { + if (typeof v === 'string') { + return v.replaceAll("''", "'"); + } + return v; + }); + + return { + def: stringify(parsed, undefined, undefined, true)!, + options: { mode: 'json' }, + }; + } catch {} + + const escaped = trimChar(value, "'").replaceAll("''", "'").replaceAll('"', '\\"'); + return `"${escaped}"`; + }, +}; + +export const Blob: SqlType = { + is: function(type: string): boolean { + const lowered = type.toLowerCase(); + return lowered === 'blob' || lowered.startsWith('blob'); + }, + drizzleImport: function(): Import { + return 'blob'; + }, + defaultFromDrizzle: function(value: unknown): Column['default'] { + if (typeof value === 'bigint') return `'${value.toString()}'`; + if (typeof Buffer !== 'undefined' && typeof Buffer.isBuffer === 'function' && Buffer.isBuffer(value)) { + return `X'${value.toString('hex').toUpperCase()}'`; + } + if (Array.isArray(value) || typeof value === 'object') { + return Text.defaultFromDrizzle(value); + } + throw new Error('unexpected'); + }, + defaultFromIntrospect: function(value: string) { + return value; + }, + toTs: function(value) { + if (value === null) return ''; + + if (typeof Buffer !== 'undefined' && value.startsWith("X'")) { + const parsed = Buffer.from(value.slice(2, value.length - 1), 'hex').toString('utf-8'); + const escaped = parsed.replaceAll('\\', '\\\\').replaceAll('"', '\\"'); + return `Buffer.from("${escaped}")`; + } + + try { + const trimmed = trimChar(value, "'"); + const num = Number(trimmed); + if (!Number.isNaN(num)) { + if (num >= Number.MIN_SAFE_INTEGER && num <= Number.MAX_SAFE_INTEGER) { + return String(num); + } else { + return `${trimmed}n`; + } + } + } catch {} + + return Text.toTs(value); + }, +}; + +export const typeFor = (sqlType: string): SqlType => { + if (Int.is(sqlType)) return Int; + if (Real.is(sqlType)) return Real; + if (Numeric.is(sqlType)) return Numeric; + if (Text.is(sqlType)) return Text; + if (Blob.is(sqlType)) return Blob; + + // If no specific type matches, default to Numeric + return Numeric; +}; + +export function sqlTypeFrom(sqlType: string): string { + const lowered = sqlType.toLowerCase(); + if ( + [ + 'int', + // 'integer', redundant + // 'integer auto_increment', redundant + 'tinyint', + 'smallint', + 'mediumint', + 'bigint', + 'unsigned big int', + // 'int2', redundant + // 'int8', redundant + ].some((it) => lowered.startsWith(it)) + ) { + return 'integer'; + } + + if ( + [ + 'character', + 'varchar', + 'varying character', + 'national varying character', + 'nchar', + 'native character', + 'nvarchar', + 'text', + 'clob', + ].some((it) => lowered.startsWith(it)) + ) { + const match = lowered.match(/\d+/); + + if (match) { + return `text(${match[0]})`; + } + + return 'text'; + } + + if (lowered.startsWith('blob')) { + return 'blob'; + } + + if ( + ['real', 'double', 'double precision', 'float'].some((it) => lowered.startsWith(it)) + ) { + return 'real'; + } + + return 'numeric'; +} + +export const parseDefault = (type: string, it: string): Column['default'] => { + if (it === null) return null; + const grammarType = typeFor(type); + + if (grammarType) return grammarType.defaultFromIntrospect(it); + + const trimmed = trimChar(it, "'"); + + if (/^-?\d+(?:\.\d+)?(?:[eE][+-]?\d+)?$/.test(trimmed)) { + const n = Number(it); + + if (n >= Number.MIN_SAFE_INTEGER && n <= Number.MAX_SAFE_INTEGER) { + return trimmed; + } + return `'${trimmed}'`; + } + + // TODO: handle where and need tests?? + if (['CURRENT_TIME', 'CURRENT_DATE', 'CURRENT_TIMESTAMP'].includes(it)) { + return `(${it})`; + } + return `(${it})`; +}; + +export const parseTableSQL = (sql: string) => { + const namedChecks = [...sql.matchAll(namedCheckPattern)].map((it) => { + const [_, name, value] = it; + return { name, value: value.trim() }; + }); + const unnamedChecks = [...sql.matchAll(unnamedCheckPattern)].map((it) => { + const [_, value] = it; + return { name: null, value: value.trim() }; + }).filter((it) => !namedChecks.some((x) => x.value === it.value)); + + return { + checks: [...namedChecks, ...unnamedChecks], + }; +}; + +export const parseViewSQL = (sql: string) => { + const match = sql.match(viewAsStatementRegex); + return match ? match[1] : null; +}; + +export interface Generated { + as: string; + type: 'stored' | 'virtual'; +} + +export function extractGeneratedColumns(input: string): Record { + const columns: Record = {}; + const regex = /["'`[]?(\w+)["'`\]]?\s+(\w+)\s+GENERATED\s+ALWAYS\s+AS\s*\(/gi; + + let match: RegExpExecArray | null; + while ((match = regex.exec(input)) !== null) { + const columnName = match[1]; + let startIndex = regex.lastIndex - 1; // position of '(' + let depth = 1; + let endIndex = startIndex + 1; + + // Find matching closing parenthesis + while (endIndex < input.length && depth > 0) { + const char = input[endIndex]; + if (char === '(') depth++; + else if (char === ')') depth--; + endIndex++; + } + + const expression = input.slice(startIndex, endIndex).trim(); + + // Find STORED/VIRTUAL type after the expression + const afterExpr = input.slice(endIndex); + const typeMatch = afterExpr.match(/\b(STORED|VIRTUAL)\b/i); + const type = typeMatch ? typeMatch[1].toLowerCase() as Generated['type'] : 'virtual'; + + columns[columnName] = { + as: expression, + type, + }; + } + return columns; +} + +export const omitSystemTables = () => { + // ['__drizzle_migrations', `'\\_cf\\_%'`, `'\\_litestream\\_%'`, `'libsql\\_%'`, `'sqlite\\_%'`]; + return true; +}; + +interface IParseResult { + uniques: { name: string | null; columns: string[] }[]; + pk: { name: string | null; columns: string[] }; +} + +/** + * Parses a SQLite DDL string to find primary key and unique constraints + * Handles quoted with [], ``, "", or no quotes + */ +export function parseSqliteDdl(ddl: string): IParseResult { + const result: IParseResult = { + pk: { name: null, columns: [] }, + uniques: [], + }; + + const cleanIdentifier = (identifier: string): string => { + return identifier.trim().replace(/^(?:\[|`|")/, '').replace(/(?:\]|`|")$/, ''); + }; + + const parseColumns = (columnsStr: string): string[] => { + return columnsStr.split(',').map((c) => cleanIdentifier(c)); + }; + + const normalizedDdl = ddl.replace(/(\r\n|\n|\r)/gm, ' ').replace(/\s+/g, ' '); + const bodyMatch = normalizedDdl.match(/CREATE\s+TABLE.*?\((.*)\)/i); + if (!bodyMatch) { + return result; // Not a valid CREATE TABLE statement + } + let tableBody = bodyMatch[1]; + + const ident = '(?:\\[[^\\]]+\\]|`[^`]+`|"[^"]+"|[\\w_]+)'; + + // find table level UNIQUE constraints + const uniqueConstraintRegex = new RegExp(`CONSTRAINT\\s+(${ident})\\s+UNIQUE\\s*\\(([^)]+)\\)`, 'gi'); + tableBody = tableBody.replace(uniqueConstraintRegex, (match, name, columns) => { + result.uniques.push({ name: cleanIdentifier(name), columns: parseColumns(columns) }); + return ''; // remove the matched constraint from the string + }); + + // find table level PRIMARY KEY constraint + const pkConstraintRegex = new RegExp(`CONSTRAINT\\s+(${ident})\\s+PRIMARY\\s+KEY\\s*\\(([^)]+)\\)`, 'i'); + tableBody = tableBody.replace(pkConstraintRegex, (match, name, columns) => { + result.pk = { name: cleanIdentifier(name), columns: parseColumns(columns) }; + return ''; // remove the matched constraint from the string + }); + + // split the remaining body into individual definition parts + const definitions = tableBody.split(',').filter((def) => def.trim() !== ''); + + const inlineConstraintNameRegex = new RegExp(`CONSTRAINT\\s+(${ident})`, 'i'); + for (const def of definitions) { + const trimmedDef = def.trim(); + + // find inline PRIMARY KEY + const inlinePkRegex = new RegExp(`^(${ident})\\s+.*\\bPRIMARY\\s+KEY\\b`, 'i'); + const pkMatch = trimmedDef.match(inlinePkRegex); + if (pkMatch) { + const pkColumn = cleanIdentifier(pkMatch[1]); + // check for an inline constraint name -> `id INT CONSTRAINT pk_id PRIMARY KEY` + const pkNameMatch = trimmedDef.match(inlineConstraintNameRegex); + result.pk = { name: pkNameMatch ? cleanIdentifier(pkNameMatch[1]) : null, columns: [pkColumn] }; + } + + // find inline UNIQUE + const inlineUniqueRegex = new RegExp(`^(${ident})\\s+.*\\bUNIQUE\\b`, 'i'); + const uniqueMatch = trimmedDef.match(inlineUniqueRegex); + if (uniqueMatch) { + const uqColumn = cleanIdentifier(uniqueMatch[1]); + const alreadyExists = result.uniques.some((u) => u.columns.length === 1 && u.columns[0] === uqColumn); + const uqNameMatch = trimmedDef.match(inlineConstraintNameRegex); + const uqName = uqNameMatch ? cleanIdentifier(uqNameMatch[1]) : null; + if (!alreadyExists) { + result.uniques.push({ name: uqName, columns: [uqColumn] }); + } + } + } + + return result; +} + +interface IFkConstraint { + name: string | null; + fromTable: string; // The table where the FK is defined + toTable: string; // The table being referenced + fromColumns: string[]; // Columns in the current table + toColumns: string[]; // Columns in the referenced table +} +/** + * Parses a SQLite DDL string to find all foreign key constraints + */ +export function parseSqliteFks(ddl: string): IFkConstraint[] { + const results: IFkConstraint[] = []; + + const cleanIdentifier = (identifier: string): string => { + return identifier.trim().replace(/^(?:\[|`|")/, '').replace(/(?:\]|`|")$/, ''); + }; + + const parseColumns = (columnsStr: string): string[] => { + return columnsStr.split(',').map((c) => cleanIdentifier(c)); + }; + + const normalizedDdl = ddl.replace(/(\r\n|\n|\r)/gm, ' ').replace(/\s+/g, ' '); + + // find the name of the table being created (the "from" table) + const ident = '(?:\\[[^\\]]+\\]|`[^`]+`|"[^"]+"|[\\w_]+)'; + const fromTableMatch = normalizedDdl.match( + new RegExp(`CREATE\\s+TABLE\\s+(?:IF\\s+NOT\\s+EXISTS\\s+)?(${ident})`, 'i'), + ); + if (!fromTableMatch) { + return results; // Not a valid CREATE TABLE statement + } + const fromTable = cleanIdentifier(fromTableMatch[1]); + + const bodyMatch = normalizedDdl.match(/\((.*)\)/i); + if (!bodyMatch) { + return results; + } + let tableBody = bodyMatch[1]; + + // find and remove all table level FOREIGN KEY constraints + const tableFkRegex = new RegExp( + `(?:CONSTRAINT\\s+(${ident})\\s+)?FOREIGN\\s+KEY\\s*\\(([^)]+)\\)\\s+REFERENCES\\s+(${ident})(?:\\s*\\(([^)]+)\\))?`, + 'gi', + ); + + tableBody = tableBody.replace(tableFkRegex, (match, name, fromCols, refTable, toCols) => { + results.push({ + name: name ? cleanIdentifier(name) : null, + fromTable: fromTable, + toTable: cleanIdentifier(refTable), + fromColumns: parseColumns(fromCols), + toColumns: toCols ? parseColumns(toCols) : [], + }); + return ''; // Remove from DDL body + }); + + // find inline REFERENCES on the cleaned string + const definitions = tableBody.split(',').filter((def) => def.trim() !== ''); + + for (const def of definitions) { + const trimmedDef = def.trim(); + + const inlineFkRegex = new RegExp( + `^(${ident}).*?\\s+REFERENCES\\s+(${ident})(?:\\s*\\(([^)]+)\\))?`, + 'i', + ); + const inlineMatch = trimmedDef.match(inlineFkRegex); + + if (inlineMatch) { + const fromColumn = cleanIdentifier(inlineMatch[1]); + const toTable = cleanIdentifier(inlineMatch[2]); + const toColumn = inlineMatch[3] ? cleanIdentifier(inlineMatch[3]) : null; + + const nameMatch = trimmedDef.match(new RegExp(`CONSTRAINT\\s+(${ident})`, 'i')); + + results.push({ + name: nameMatch ? cleanIdentifier(nameMatch[1]) : null, + fromTable: fromTable, + toTable: toTable, + fromColumns: [fromColumn], + toColumns: toColumn ? [toColumn] : [], + }); + } + } + + return results; +} diff --git a/drizzle-kit/src/dialects/sqlite/introspect.ts b/drizzle-kit/src/dialects/sqlite/introspect.ts new file mode 100644 index 0000000000..9d56355cf8 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/introspect.ts @@ -0,0 +1,647 @@ +import type { IntrospectStage, IntrospectStatus } from '../../cli/views'; +import { areStringArraysEqual, type DB } from '../../utils'; +import type { EntityFilter } from '../pull-utils'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + InterimColumn, + PrimaryKey, + SqliteEntities, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import type { Generated } from './grammar'; +import { + extractGeneratedColumns, + nameForForeignKey, + nameForPk, + nameForUnique, + parseDefault, + parseSqliteDdl, + parseSqliteFks, + parseTableSQL, + parseViewSQL, + sqlTypeFrom, +} from './grammar'; + +export const fromDatabaseForDrizzle = async ( + db: DB, + filter: EntityFilter = () => true, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, +) => { + const res = await fromDatabase(db, filter, progressCallback); + res.indexes = res.indexes.filter((it) => it.origin !== 'auto'); + + return res; +}; + +export const fromDatabase = async ( + db: DB, + filter: EntityFilter, + progressCallback: ( + stage: IntrospectStage, + count: number, + status: IntrospectStatus, + ) => void = () => {}, + queryCallback: ( + id: string, + rows: Record[], + error: Error | null, + ) => void = () => {}, +) => { + // TODO: fetch tables and views list with system filter from grammar + const dbTableColumns = await db.query<{ + table: string; + name: string; + columnType: string; + notNull: number; + defaultValue: string; + pk: number; + hidden: number; + sql: string; + type: 'table' | 'view'; + }>( + `SELECT + m.name as "table", + p.name as "name", + p.type as "columnType", + p."notnull" as "notNull", + p.dflt_value as "defaultValue", + p.pk as pk, + p.hidden as hidden, + m.sql, + m.type as type + FROM sqlite_master AS m + JOIN pragma_table_xinfo(m.name) AS p + WHERE + m.type = 'table' + and m.tbl_name != '__drizzle_migrations' + and m.tbl_name NOT LIKE '\\_cf\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE '\\_litestream\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'libsql\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'sqlite\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'd1\\_%' ESCAPE '\\' + ORDER BY p.cid + ; + `, + ).then((columns) => { + queryCallback('columns', columns, null); + return columns.filter((it) => filter({ type: 'table', schema: false, name: it.table })); + }).catch((error) => { + queryCallback('columns', [], error); + throw error; + }); + + const views = await db.query<{ + name: string; + sql: string; + }>( + `SELECT + m.name as "name", + m.sql + FROM sqlite_master AS m + WHERE + m.type = 'view' + and m.tbl_name != '__drizzle_migrations' + and m.tbl_name NOT LIKE '\\_cf\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE '\\_litestream\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'libsql\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'sqlite\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'd1\\_%' ESCAPE '\\' + ORDER BY m.name COLLATE NOCASE + ;`, + ).then((views) => { + queryCallback('views', views, null); + return views.filter((it) => filter({ type: 'table', schema: false, name: it.name })).map((it): View => { + const definition = parseViewSQL(it.sql); + + if (!definition) { + throw new Error(`Could not process view ${it.name}:\n${it.sql}`); + } + + return { + entityType: 'views', + name: it.name, + definition, + isExisting: false, + error: null, + }; + }); + }).catch((error) => { + queryCallback('views', [], error); + throw error; + }); + + let dbViewColumns: { + table: string; + name: string; + columnType: string; + notNull: number; + defaultValue: string; + pk: number; + hidden: number; + }[] = []; + try { + dbViewColumns = await db.query<{ + table: string; + name: string; + columnType: string; + notNull: number; + defaultValue: string; + pk: number; + hidden: number; + sql: string; + type: 'view'; + }>( + `SELECT + m.name as "table", + p.name as "name", + p.type as "columnType", + p."notnull" as "notNull", + p.dflt_value as "defaultValue", + p.pk as pk, + p.hidden as hidden, + m.sql, + m.type as type + FROM sqlite_master AS m + JOIN pragma_table_xinfo(m.name) AS p + WHERE + m.type = 'view' + and m.tbl_name != '__drizzle_migrations' + and m.tbl_name NOT LIKE '\\_cf\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE '\\_litestream\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'libsql\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'sqlite\\_%' ESCAPE '\\' + and m.tbl_name NOT LIKE 'd1\\_%' ESCAPE '\\' + ORDER BY m.name COLLATE NOCASE, p.cid + ; + `, + ).then((columns) => { + queryCallback('viewColumns', columns, null); + return columns.filter((it) => filter({ type: 'table', schema: false, name: it.table })); + }).catch((error) => { + queryCallback('viewColumns', [], error); + throw error; + }); + } catch { + for (const view of views) { + try { + const viewColumns = await db.query<{ + table: string; + name: string; + columnType: string; + notNull: number; + defaultValue: string; + pk: number; + hidden: number; + }>( + `SELECT + '${view.name}' as "table", + p.name as "name", + p.type as "columnType", + p."notnull" as "notNull", + p.dflt_value as "defaultValue", + p.pk as pk, + p.hidden as hidden + FROM pragma_table_xinfo(${view.name}) AS p + ORDER BY p.name COLLATE NOCASE, p.cid + ; + `, + ).then((columns) => { + queryCallback(`viewColumns:${view.name}`, columns, null); + return columns; + }).catch((error) => { + queryCallback(`viewColumns:${view.name}`, [], error); + throw error; + }); + dbViewColumns.push(...viewColumns); + } catch (error) { + const errorMessage = (error as Error).message; + const viewIndex = views.findIndex((v) => v.name === view.name); + views[viewIndex] = { + ...views[viewIndex], + error: errorMessage, + }; + } + } + } + + const dbTablesWithSequences = await db.query<{ + name: string; + }>( + `SELECT * FROM sqlite_master WHERE name != 'sqlite_sequence' + and name != 'sqlite_stat1' + and name != '_litestream_seq' + and name != '_litestream_lock' + and tbl_name != '_cf_KV' + and sql GLOB '*[ *' || CHAR(9) || CHAR(10) || CHAR(13) || ']AUTOINCREMENT[^'']*';`, + ).then((tables) => { + queryCallback('tablesWithSequences', tables, null); + return tables.filter((it) => filter({ type: 'table', schema: false, name: it.name })); + }).catch((error) => { + queryCallback('tablesWithSequences', [], error); + throw error; + }); + + const dbIndexes = await db.query<{ + table: string; + sql: string; + name: string; + column: string; + isUnique: number; + origin: string; // u=auto c=manual pk + seq: string; + cid: number; + }>(` + SELECT + m.tbl_name as "table", + m.sql, + il.name as "name", + ii.name as "column", + il.[unique] as "isUnique", + il.origin, + il.seq, + ii.cid + FROM sqlite_master AS m, + pragma_index_list(m.name) AS il, + pragma_index_info(il.name) AS ii + WHERE + m.type = 'table' + and m.tbl_name != '_cf_KV' + ORDER BY m.name COLLATE NOCASE; + `).then((indexes) => { + queryCallback('indexes', indexes, null); + return indexes.filter((it) => filter({ type: 'table', schema: false, name: it.table })); + }).catch((error) => { + queryCallback('indexes', [], error); + throw error; + }); + + let columnsCount = 0; + let tablesCount = new Set(); + let indexesCount = 0; + let foreignKeysCount = 0; + let checksCount = 0; + let viewsCount = 0; + + type DBIndex = typeof dbIndexes[number]; + // append primaryKeys by table + + const tableToParsedFks = dbTableColumns.reduce((acc, it) => { + if (!(it.table in acc)) { + acc[it.table] = parseSqliteFks(it.sql); + } + return acc; + }, {} as { + [tname: string]: { + name: string | null; + toTable: string; + fromTable: string; + fromColumns: string[]; + toColumns: string[]; + }[]; + }); + + const tableToPk = dbTableColumns.reduce((acc, it) => { + const isPrimary = it.pk !== 0; + if (isPrimary) { + if (it.table in acc) { + acc[it.table].push(it.name); + } else { + acc[it.table] = [it.name]; + } + } + return acc; + }, {} as { [tname: string]: string[] }); + + const tableToGenerated = dbTableColumns.reduce((acc, it) => { + if (it.hidden !== 2 && it.hidden !== 3) return acc; + acc[it.table] = extractGeneratedColumns(it.sql); + return acc; + }, {} as Record>); + + const tableToIndexColumns = dbIndexes.reduce( + (acc, it) => { + const whereIdx = it.sql.toLowerCase().indexOf(' where '); + const where = whereIdx < 0 ? null : it.sql.slice(whereIdx + 7); + const column = { value: it.column, isExpression: it.cid === -2 }; + if (it.table in acc) { + if (it.name in acc[it.table]) { + const idx = acc[it.table][it.name]; + idx.columns.push(column); + } else { + const idx = { index: it, columns: [column], where }; + acc[it.table][it.name] = idx; + } + } else { + const idx = { index: it, columns: [column], where }; + acc[it.table] = { [it.name]: idx }; + } + return acc; + }, + {} as Record< + string, + Record + >, + ); + + const tablesToSQL = dbTableColumns.reduce((acc, it) => { + if (it.table in acc) return acc; + + acc[it.table] = it.sql; + return acc; + }, {} as Record) || {}; + + const tables: SqliteEntities['tables'][] = [ + ...new Set(dbTableColumns.filter((it) => it.type === 'table').map((it) => it.table)), + ].map((it) => ({ + entityType: 'tables', + name: it, + })); + + const pks: PrimaryKey[] = []; + for (const [key, value] of Object.entries(tableToPk)) { + const tableSql = tablesToSQL[key]; + const parsed = parseSqliteDdl(tableSql); + + if (value.length === 1) continue; + + pks.push({ + entityType: 'pks', + table: key, + name: parsed.pk.name ?? nameForPk(key), + columns: value, + nameExplicit: false, + }); + } + + const columns: InterimColumn[] = []; + for (const column of dbTableColumns.filter((it) => it.type === 'table')) { + columnsCount += 1; + + progressCallback('columns', columnsCount, 'fetching'); + + tablesCount.add(column.table); + + progressCallback('tables', tablesCount.size, 'fetching'); + + const name = column.name; + const notNull = column.notNull === 1; // 'YES', 'NO' + const type = sqlTypeFrom(column.columnType); // varchar(256) + const isPrimary = column.pk !== 0; + + const columnDefault: Column['default'] = parseDefault(column.columnType, column.defaultValue); + const autoincrement = isPrimary && dbTablesWithSequences.some((it) => it.name === column.table); + const pk = tableToPk[column.table]; + const primaryKey = isPrimary && pk && pk.length === 1; + const generated = tableToGenerated[column.table]?.[column.name] || null; + + const tableIndexes = Object.values(tableToIndexColumns[column.table] || {}); + + const unique = primaryKey + ? null // if pk, no UNIQUE + : tableIndexes.filter((it) => { + const idx = it.index; + + // we can only safely define UNIQUE column when there is automatically(origin=u) created unique index on the column(only 1) + return idx.origin === 'u' && idx.isUnique && it.columns.length === 1 && idx.table === column.table + && idx.column === column.name; + }).map((it) => { + const parsed = parseSqliteDdl(it.index.sql); + + const constraint = parsed.uniques.find((parsedUnique) => + areStringArraysEqual(it.columns.map((indexCol) => indexCol.value), parsedUnique.columns) + ); + if (!constraint) return null; + + return { name: constraint.name }; + })[0] || null; + + const pkName = !primaryKey + ? null // if pk, no UNIQUE + : tableIndexes.filter((it) => { + const idx = it.index; + + // we can only safely define PRIMARY KEY column when there is automatically(origin=pk) created unique index on the column(only 1) + return idx.origin === 'pk' && idx.isUnique && it.columns.length === 1 && idx.table === column.table + && idx.column === column.name; + }).map((it) => { + const parsed = parseSqliteDdl(it.index.sql); + if (parsed.pk.columns.length > 1) return; + + const constraint = areStringArraysEqual(parsed.pk.columns, [name]) ? parsed.pk : null; + if (!constraint) return { name: null }; + + return { name: constraint.name }; + })[0] || null; + + columns.push({ + entityType: 'columns', + table: column.table, + default: columnDefault, + autoincrement, + name, + pk: primaryKey, + pkName: pkName?.name ?? nameForPk(column.table), + type, + notNull, + generated, + isUnique: !!unique, + uniqueName: unique?.name ?? null, + }); + } + + progressCallback('columns', columnsCount, 'done'); + progressCallback('tables', tablesCount.size, 'done'); + + const dbFKs = await db.query<{ + tableFrom: string; + tableTo: string; + from: string; + to: string; + onUpdate: string; + sql: string; + onDelete: string; + seq: number; + id: number; + }>( + `SELECT + m.name as "tableFrom", + f.id as "id", + f."table" as "tableTo", + f."from", + f."to", + m."sql" as sql, + f."on_update" as "onUpdate", + f."on_delete" as "onDelete", + f.seq as "seq" + FROM sqlite_master m, pragma_foreign_key_list(m.name) as f + WHERE m.tbl_name != '_cf_KV';`, + ).then((fks) => { + queryCallback('fks', fks, null); + return fks.filter((it) => filter({ type: 'table', schema: false, name: it.tableFrom })); + }).catch((error) => { + queryCallback('fks', [], error); + throw error; + }); + type DBFK = typeof dbFKs[number]; + + const fksToColumns = dbFKs.reduce((acc, it) => { + const key = `${it.tableFrom}:${it.id}`; + if (key in acc) { + acc[key].columnsFrom.push(it.from); + acc[key].columnsTo.push(it.to); + } else { + acc[key] = { + fk: it, + columnsFrom: [it.from], + columnsTo: [it.to], + }; + } + return acc; + }, {} as Record); + + const fks: ForeignKey[] = []; + for (const fk of dbFKs) { + foreignKeysCount += 1; + progressCallback('fks', foreignKeysCount, 'fetching'); + + const { columnsFrom, columnsTo } = fksToColumns[`${fk.tableFrom}:${fk.id}`]!; + + // can be undefined if fk references to non-existing table + const parsedFk = tableToParsedFks[fk.tableFrom] as typeof tableToParsedFks[string] | undefined; + const constraint = parsedFk?.find((it) => + areStringArraysEqual(it.fromColumns, columnsFrom) && areStringArraysEqual(it.toColumns, columnsTo) + && (it.toTable === fk.tableTo) && (it.fromTable === fk.tableFrom) + ); + let name: string; + if (!constraint) { + name = nameForForeignKey({ table: fk.tableFrom, columns: columnsFrom, tableTo: fk.tableTo, columnsTo }); + } else {name = constraint.name + ?? nameForForeignKey({ table: fk.tableFrom, columns: columnsFrom, tableTo: fk.tableTo, columnsTo });} + + fks.push({ + entityType: 'fks', + table: fk.tableFrom, + name, + tableTo: fk.tableTo, + columns: columnsFrom, + columnsTo, + nameExplicit: true, + onDelete: fk.onDelete ?? 'NO ACTION', + onUpdate: fk.onUpdate ?? 'NO ACTION', + }); + } + + progressCallback('fks', foreignKeysCount, 'done'); + + const indexes: Index[] = []; + for (const [table, index] of Object.entries(tableToIndexColumns)) { + const values = Object.values(index); + for (const { index, columns, where } of values) { + indexesCount += 1; + progressCallback('indexes', indexesCount, 'fetching'); + + const origin = index.origin === 'u' || index.origin === 'pk' ? 'auto' : index.origin === 'c' ? 'manual' : null; + if (!origin) throw new Error(`Index with unexpected origin: ${index.origin}`); + + indexes.push({ + entityType: 'indexes', + table, + name: index.name, + isUnique: index.isUnique === 1, + origin, + where, + columns, + }); + } + } + progressCallback('indexes', indexesCount, 'done'); + progressCallback('enums', 0, 'done'); + + const viewsToColumns = dbViewColumns.reduce((acc, it) => { + const column: ViewColumn = { + view: it.table, + name: it.name, + type: sqlTypeFrom(it.columnType), + notNull: it.notNull === 1, + }; + if (it.table in acc) { + acc[it.table].push(column); + } else { + acc[it.table] = [column]; + } + return acc; + }, {} as Record); + + viewsCount = Object.keys(viewsToColumns).length; + progressCallback('views', viewsCount, 'fetching'); + + progressCallback('views', viewsCount, 'done'); + + let checkCounter = 0; + const checkConstraints: Record = {}; + + const checks: CheckConstraint[] = []; + for (const [table, sql] of Object.entries(tablesToSQL)) { + const res = parseTableSQL(sql); + for (const it of res.checks) { + const { name, value } = it; + + let checkName = name ? name : `${table}_check_${++checkCounter}`; + checks.push({ entityType: 'checks', table, name: checkName, value: value.trim() }); + } + + checksCount += Object.values(checkConstraints).length; + progressCallback('checks', checksCount, 'fetching'); + } + + progressCallback('checks', checksCount, 'done'); + + const uniques: UniqueConstraint[] = []; + for (const [table, item] of Object.entries(tableToIndexColumns)) { + for (const { columns, index } of Object.values(item).filter((it) => it.index.isUnique)) { + if (columns.length === 1) continue; + if (columns.some((it) => it.isExpression)) { + throw new Error(`unexpected unique index '${index.name}' with expression value: ${index.sql}`); + } + + const origin = index.origin === 'u' || index.origin === 'pk' ? 'auto' : index.origin === 'c' ? 'manual' : null; + if (!origin) throw new Error(`Index with unexpected origin: ${index.origin}`); + + const parsed = parseSqliteDdl(index.sql); + + const constraint = parsed.uniques.find((parsedUnique) => + areStringArraysEqual(columns.map((it) => it.value), parsedUnique.columns) + ); + if (!constraint) continue; + + uniques.push({ + entityType: 'uniques', + table, + name: constraint.name ?? nameForUnique(table, columns.map((it) => it.value)), + nameExplicit: true, + columns: columns.map((it) => it.value), + }); + } + } + + return { + tables, + columns, + pks, + fks, + indexes, + checks, + uniques, + views, + viewsToColumns, + }; +}; diff --git a/drizzle-kit/src/dialects/sqlite/serializer.ts b/drizzle-kit/src/dialects/sqlite/serializer.ts new file mode 100644 index 0000000000..cbfb2dbc9d --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/serializer.ts @@ -0,0 +1,67 @@ +import type { CasingType } from 'src/cli/validations/common'; +import { sqliteSchemaError } from '../../cli/views'; +import { prepareFilenames } from '../../utils/utils-node'; +import type { SQLiteDDL } from './ddl'; +import { createDDL, interimToDDL } from './ddl'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from './drizzle'; +import type { SqliteSnapshot } from './snapshot'; +import { drySqliteSnapshot, snapshotValidator } from './snapshot'; + +export const prepareSqliteSnapshot = async ( + snapshots: string[], + schemaPath: string | string[], + casing: CasingType | undefined, +): Promise< + { + ddlPrev: SQLiteDDL; + ddlCur: SQLiteDDL; + snapshot: SqliteSnapshot; + snapshotPrev: SqliteSnapshot; + custom: SqliteSnapshot; + } +> => { + const { readFileSync } = await import('fs'); + const { randomUUID } = await import('crypto'); + const prevSnapshot = snapshots.length === 0 + ? drySqliteSnapshot + : snapshotValidator.strict(JSON.parse(readFileSync(snapshots[snapshots.length - 1]).toString())); + + const ddlPrev = createDDL(); + for (const entry of prevSnapshot.ddl) { + ddlPrev.entities.push(entry); + } + const filenames = prepareFilenames(schemaPath); + + const { tables, views } = await prepareFromSchemaFiles(filenames); + const interim = fromDrizzleSchema(tables, views, casing); + + const { ddl: ddlCur, errors } = interimToDDL(interim); + + if (errors.length > 0) { + console.log(errors.map((it) => sqliteSchemaError(it)).join('\n\n')); + process.exit(); + } + + const id = randomUUID(); + const prevIds = [prevSnapshot.id]; + + const snapshot = { + version: '7', + dialect: 'sqlite', + id, + prevIds, + ddl: ddlCur.entities.list(), + renames: [], + } satisfies SqliteSnapshot; + + const { id: _ignoredId, prevIds: _ignoredPrevIds, ...prevRest } = prevSnapshot; + + // that's for custom migrations, when we need new IDs, but old snapshot + const custom: SqliteSnapshot = { + id, + prevIds, + ...prevRest, + }; + + return { ddlPrev, ddlCur, snapshot, snapshotPrev: prevSnapshot, custom }; +}; diff --git a/drizzle-kit/src/dialects/sqlite/snapshot.ts b/drizzle-kit/src/dialects/sqlite/snapshot.ts new file mode 100644 index 0000000000..d666c9fd8f --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/snapshot.ts @@ -0,0 +1,168 @@ +import { any, array as zArray, boolean, enum as enumType, literal, object, record, string, type TypeOf } from 'zod'; +import { originUUID } from '../../utils'; +import { array, validator } from '../simpleValidator'; +import type { SQLiteDDL, SqliteEntity } from './ddl'; +import { createDDL } from './ddl'; + +// ------- V3 -------- +const index = object({ + name: string(), + columns: string().array(), + where: string().optional(), + isUnique: boolean(), +}).strict(); + +const fk = object({ + name: string(), + tableFrom: string(), + columnsFrom: string().array(), + tableTo: string(), + columnsTo: string().array(), + onUpdate: string().optional(), + onDelete: string().optional(), +}).strict(); + +const compositePK = object({ + columns: string().array(), + name: string(), +}).strict(); + +const column = object({ + name: string(), + type: string(), + typeSchema: string().optional(), // compatibility with Postgres schema? + primaryKey: boolean(), + notNull: boolean(), + autoincrement: boolean().optional(), + default: string().optional(), + generated: object({ + type: enumType(['stored', 'virtual']), + as: string(), + }).optional(), +}).strict(); + +const uniqueConstraint = object({ + name: string(), + columns: string().array(), +}).strict(); + +const checkConstraint = object({ + name: string(), + value: string(), +}).strict(); + +const table = object({ + name: string(), + columns: record(string(), column), + indexes: record(string(), index), + foreignKeys: record(string(), fk), + compositePrimaryKeys: record(string(), compositePK), + uniqueConstraints: record(string(), uniqueConstraint).default({}), + checkConstraints: record(string(), checkConstraint).default({}), +}).strict(); + +export const view = object({ + name: string(), + columns: record(string(), column), + definition: string().optional(), + isExisting: boolean(), +}).strict(); + +// use main dialect +const dialect = enumType(['sqlite']); + +const schemaHash = object({ + id: string(), + prevIds: zArray(string()), +}).strict(); + +const schemaHashV5 = object({ + id: string(), + prevId: string(), +}).strict(); + +export const schemaInternalV5 = object({ + version: literal('5'), + dialect: dialect, + tables: record(string(), table), + enums: object({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), +}).strict(); + +const latestVersion = literal('7'); +export const schemaInternalV6 = object({ + version: literal('6'), + dialect: dialect, + tables: record(string(), table), + views: record(string(), view).default({}), + enums: object({}), + _meta: object({ + tables: record(string(), string()), + columns: record(string(), string()), + }), + internal: any(), +}).strict(); + +export const schemaV5 = schemaInternalV5.merge(schemaHashV5).strict(); +export const schemaV6 = schemaInternalV6.merge(schemaHashV5).strict(); +export const schema = schemaInternalV6.merge(schemaHash).strict(); +export type SQLiteSchemaV6 = TypeOf; +export type SQLiteSchema = TypeOf; + +export type Dialect = TypeOf; + +const tableSquashed = object({ + name: string(), + schema: string().optional(), + columns: record(string(), column), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()), + uniqueConstraints: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), +}).strict(); + +export const schemaSquashed = object({ + version: latestVersion, + dialect: dialect, + tables: record(string(), tableSquashed), + views: record(string(), view), + enums: record( + string(), + object({ + name: string(), + schema: string(), + values: string().array(), + }).strict(), + ), +}).strict(); + +export const sqliteSchemaV5 = schemaV5; +export const sqliteSchemaV6 = schemaV6; + +export const toJsonSnapshot = (ddl: SQLiteDDL, id: string, prevIds: string[], renames: string[]): SqliteSnapshot => { + return { dialect: 'sqlite', id, prevIds, version: '7', ddl: ddl.entities.list(), renames }; +}; + +const ddl = createDDL(); +export const snapshotValidator = validator({ + version: ['7'], + dialect: ['sqlite'], + id: 'string', + prevIds: array((_) => true), + ddl: array((it) => ddl.entities.validate(it)), + renames: array((_) => true), +}); + +export type SqliteSnapshot = typeof snapshotValidator.shape; +export const drySqliteSnapshot = snapshotValidator.strict({ + version: '7', + dialect: 'sqlite', + id: originUUID, + prevIds: [], + ddl: [], + renames: [], +}); diff --git a/drizzle-kit/src/dialects/sqlite/statements.ts b/drizzle-kit/src/dialects/sqlite/statements.ts new file mode 100644 index 0000000000..754ade5a65 --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/statements.ts @@ -0,0 +1,151 @@ +import type { DiffEntities } from '../cockroach/ddl'; +import type { Column, DiffColumn, ForeignKey, Index, SQLiteDDL, TableFull, View } from './ddl'; + +export interface JsonCreateTableStatement { + type: 'create_table'; + table: TableFull; +} + +export interface JsonRecreateTableStatement { + type: 'recreate_table'; + to: TableFull; + from: TableFull; + checkDiffs: SQLiteDDL['_']['diffs']['createdrop']['checks'][]; + uniquesDiff: SQLiteDDL['_']['diffs']['createdrop']['uniques'][]; + pksDiff: SQLiteDDL['_']['diffs']['createdrop']['pks'][]; + fksDiff: SQLiteDDL['_']['diffs']['createdrop']['fks'][]; + indexesDiff: SQLiteDDL['_']['diffs']['createdrop']['indexes'][]; + + alteredColumnsBecameGenerated: SQLiteDDL['_']['diffs']['alter']['columns'][]; + newStoredColumns: Column[]; + + columnAlters: SQLiteDDL['_']['diffs']['alter']['columns'][]; + pksAlters: SQLiteDDL['_']['diffs']['alter']['pks'][]; + fksAlters: SQLiteDDL['_']['diffs']['alter']['fks'][]; + uniquesAlters: SQLiteDDL['_']['diffs']['alter']['uniques'][]; + checksAlters: SQLiteDDL['_']['diffs']['alter']['checks'][]; +} + +export interface JsonDropTableStatement { + type: 'drop_table'; + tableName: string; +} + +export interface JsonRenameTableStatement { + type: 'rename_table'; + from: string; + to: string; +} + +export interface JsonDropColumnStatement { + type: 'drop_column'; + column: Column; +} + +export interface JsonAddColumnStatement { + type: 'add_column'; + column: Column; + fk: ForeignKey | null; +} + +export interface JsonCreateIndexStatement { + type: 'create_index'; + index: Index; +} + +export interface JsonDropIndexStatement { + type: 'drop_index'; + index: Index; +} + +export interface JsonRenameColumnStatement { + type: 'rename_column'; + table: string; + from: string; + to: string; +} + +export interface JsonRecreateColumnStatement { + type: 'recreate_column'; + column: Column; + diffGenerated: DiffEntities['columns']['generated']; + fk: ForeignKey | null; +} + +export type JsonCreateViewStatement = { + type: 'create_view'; + view: View; +}; + +export interface JsonDropViewStatement { + type: 'drop_view'; + view: View; +} + +export interface JsonRenameViewStatement { + type: 'rename_view'; + from: View; + to: View; +} + +export type JsonStatement = + | JsonRecreateTableStatement + | JsonRecreateColumnStatement + | JsonRenameColumnStatement + | JsonRecreateColumnStatement + | JsonDropTableStatement + | JsonRenameTableStatement + | JsonDropColumnStatement + | JsonCreateIndexStatement + | JsonDropIndexStatement + | JsonCreateTableStatement + | JsonAddColumnStatement + | JsonDropViewStatement + | JsonRenameViewStatement + | JsonCreateViewStatement; + +export const prepareStatement = < + TType extends JsonStatement['type'], + TStatement extends Extract, +>( + type: TType, + args: Omit, +): TStatement => { + return { + type, + ...args, + } as TStatement; +}; + +export const prepareAddColumns = ( + columns: Column[], + fks: ForeignKey[], +): JsonAddColumnStatement[] => { + return columns.map((it) => { + const fk = fks.find((t) => t.columns.length === 1 && t.columns[0] === it.name && t.table === it.table) || null; + return { + type: 'add_column', + column: it, + fk, + } satisfies JsonAddColumnStatement; + }); +}; + +export const prepareRecreateColumn = ( + diffColumn: DiffColumn, + column: Column, + fk: ForeignKey | null, +): JsonRecreateColumnStatement => { + // there're no other updates of entities, apart from name changes/some deletions+creations + // which doesn't trigger recreate + if (diffColumn.generated) { + return { + type: 'recreate_column', + diffGenerated: diffColumn.generated, + column: column, + fk: fk, + }; + } + + throw new Error('unexpected'); +}; diff --git a/drizzle-kit/src/dialects/sqlite/typescript.ts b/drizzle-kit/src/dialects/sqlite/typescript.ts new file mode 100644 index 0000000000..ea3234fc1f --- /dev/null +++ b/drizzle-kit/src/dialects/sqlite/typescript.ts @@ -0,0 +1,441 @@ +/* eslint-disable @typescript-eslint/no-unsafe-argument */ +import { toCamelCase } from 'drizzle-orm/casing'; +import '../../@types/utils'; +import type { Casing } from '../../cli/validations/common'; +import { assertUnreachable } from '../../utils'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + PrimaryKey, + SQLiteDDL, + UniqueConstraint, + View, + ViewColumn, +} from './ddl'; +import { typeFor } from './grammar'; + +export const imports = ['integer', 'real', 'text', 'numeric', 'blob'] as const; +export type Import = typeof imports[number]; +const sqliteImports = new Set([ + 'sqliteTable', + ...imports, +]); + +const objToStatement2 = (json: any) => { + json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); + + const keys = Object.keys(json); + if (keys.length === 0) return; + + let statement = '{ '; + statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys + statement += ' }'; + return statement; +}; + +const relations = new Set(); + +const escapeColumnKey = (value: string) => { + if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { + return `"${value}"`; + } + return value; +}; + +const withCasing = (value: string, casing?: Casing) => { + if (casing === 'preserve') { + return escapeColumnKey(value); + } + if (casing === 'camel') { + return escapeColumnKey(value.camelCase()); + } + + return value; +}; + +const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { + if (casing === 'preserve') { + return ''; + } + if (casing === 'camel') { + return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; + } + + assertUnreachable(casing); +}; + +export const ddlToTypeScript = ( + schema: SQLiteDDL, + casing: Casing, + viewColumns: Record, + _type: 'sqlite' | 'libsql', +) => { + for (const fk of schema.fks.list()) { + const relation = `${fk.table}-${fk.tableTo}`; + relations.add(relation); + } + + const imports = new Set(); + + const columnTypes = new Set([]); + for (const it of schema.entities.list()) { + if (it.entityType === 'indexes') imports.add(it.isUnique ? 'uniqueIndex' : 'index'); + if (it.entityType === 'pks' && it.columns.length > 1) imports.add('primaryKey'); + if (it.entityType === 'uniques' && it.columns.length > 1) imports.add('unique'); + if (it.entityType === 'checks') imports.add('check'); + if (it.entityType === 'columns') columnTypes.add(it.type); + if (it.entityType === 'views') imports.add('sqliteView'); + if (it.entityType === 'tables') imports.add('sqliteTable'); + if (it.entityType === 'fks') { + imports.add('foreignKey'); + if (it.columns.length > 1 || isCyclic(it) || isSelf(it)) imports.add('AnySQLiteColumn'); + } + } + + for (const it of Array.from(columnTypes.values())) { + imports.add(typeFor(it).drizzleImport()); + } + + for (const it of Object.values(viewColumns).flat()) { + if (sqliteImports.has(it.type)) imports.add(it.type); + } + + const tableStatements = [] as string[]; + for (const table of schema.tables.list()) { + const columns = schema.columns.list({ table: table.name }); + const fks = schema.fks.list({ table: table.name }); + const pk = schema.pks.one({ table: table.name }); + const indexes = schema.indexes.list({ table: table.name }); + const uniqies = schema.uniques.list({ table: table.name }); + const checks = schema.checks.list({ table: table.name }); + + let statement = `export const ${withCasing(table.name, casing)} = sqliteTable("${table.name}", {\n`; + + statement += createTableColumns(columns, fks, pk, casing); + statement += '}'; + + // more than 2 fields or self reference or cyclic + const filteredFKs = fks.filter((it) => { + return it.columns.length > 1 || isSelf(it) || isCyclic(it); + }); + + if ( + indexes.length > 0 + || filteredFKs.length > 0 + || pk && pk.columns.length > 1 + || uniqies.length > 0 + || checks.length > 0 + ) { + statement += ',\n(table) => ['; + statement += createTableIndexes(table.name, indexes, casing); + statement += createTableFKs(Object.values(filteredFKs), casing); + statement += pk && pk.columns.length > 1 ? createTablePK(pk, casing) : ''; + statement += createTableUniques(uniqies, casing); + statement += createTableChecks(checks, casing); + statement += ']'; + } + statement += ');'; + + tableStatements.push(statement); + } + + const viewsStatements = schema.views.list().map((view) => { + let statement = `export const ${withCasing(view.name, casing)} = sqliteView("${view.name}", {\n`; + const columns = viewColumns[view.name] || []; + statement += createViewColumns(view, columns, casing); + statement += '})'; + statement += `.as(sql\`${view.definition?.replaceAll('`', '\\`')}\`);`; + + return statement; + }); + + const importsTs = `import { ${ + [...imports].join(', ') + } } from "drizzle-orm/sqlite-core"\nimport { sql } from "drizzle-orm"\n\n`; + + let decalrations = tableStatements.join('\n\n'); + decalrations += '\n\n'; + decalrations += viewsStatements.join('\n\n'); + + const file = importsTs + decalrations; + + // for drizzle studio query runner + const schemaEntry = ` + { + ${ + Object.values(schema.tables) + .map((it) => withCasing(it.name, casing)) + .join(',') + } + } + `; + + return { file, imports: importsTs, decalrations, schemaEntry }; +}; + +const isCyclic = (fk: ForeignKey) => { + const key = `${fk.table}-${fk.tableTo}`; + const reverse = `${fk.tableTo}-${fk.table}`; + return relations.has(key) && relations.has(reverse); +}; + +const isSelf = (fk: ForeignKey) => { + return fk.table === fk.tableTo; +}; + +const tryJson = (it: string) => { + try { + return JSON.parse(it); + } catch { + return null; + } +}; + +const mapColumnDefault = (it: NonNullable) => { + if ( + typeof it === 'string' + && it.startsWith('(') + && it.endsWith(')') + ) { + return `sql\`${it}\``; + } + // If default value is NULL as string it will come back from db as "'NULL'" and not just "NULL" + if (it === 'NULL') { + return `sql\`NULL\``; + } + + if (typeof it === 'string') { + const json = tryJson(it); + if (json) { + return objToStatement2(json); + } + + return it.replaceAll('"', '\\"').replaceAll("''", "'"); + } + + return it; +}; + +const column = ( + type: string, + name: string, + defaultValue: Column['default'], + casing: Casing, +) => { + let lowered = type; + + const grammarType = typeFor(type); + if (grammarType) { + const drizzleType = grammarType.drizzleImport(); + const res = grammarType.toTs(defaultValue); + const { def, options } = typeof res === 'string' ? { def: res } : res; + const defaultStatement = def ? `.default(${def})` : ''; + const opts = options ? `${JSON.stringify(options)}` : ''; + return `${withCasing(name, casing)}: ${drizzleType}(${dbColumnName({ name, casing })}${opts})${defaultStatement}`; + } + + // TODO: ?? + if (lowered.startsWith('text')) { + const match = lowered.match(/\d+/); + let out: string; + if (match) { + out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing, withMode: true })}{ length: ${ + match[0] + } })`; + } else { + out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; + } + + out += defaultValue ? `.default("${mapColumnDefault(defaultValue)}")` : ''; + return out; + } + + return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; +}; + +const createTableColumns = ( + columns: Column[], + fks: ForeignKey[], + pk: PrimaryKey | null, + casing: Casing, +): string => { + let statement = ''; + for (const it of columns) { + const isPrimary = pk && pk.columns.length === 1 && pk.columns[0] === it.name; + + statement += '\t'; + statement += column(it.type, it.name, it.default, casing); + statement += isPrimary ? `.primaryKey(${it.autoincrement ? '{ autoIncrement: true }' : ''})` : ''; + statement += it.notNull && !isPrimary ? '.notNull()' : ''; + statement += it.generated + ? `.generatedAlwaysAs(sql\`${ + it.generated.as.replace(/`/g, '\\`').slice(1, -1) + }\`, { mode: "${it.generated.type}" })` + : ''; + + const references = fks.filter((fk) => fk.columns.length === 1 && fk.columns[0] === it.name); + + for (const fk of references) { + statement += `.references(() => ${withCasing(fk.tableTo, casing)}.${withCasing(fk.columnsTo[0], casing)})`; + + const onDelete = fk.onDelete && fk.onDelete !== 'no action' ? fk.onDelete : null; + const onUpdate = fk.onUpdate && fk.onUpdate !== 'no action' ? fk.onUpdate : null; + const params = { onDelete, onUpdate }; + + const typeSuffix = isCyclic(fk) ? ': AnySQLiteColumn' : ''; + + const paramsStr = objToStatement2(params); + if (paramsStr) { + statement += `.references(()${typeSuffix} => ${withCasing(fk.tableTo, casing)}.${ + withCasing(fk.columnsTo[0], casing) + }, ${paramsStr} )`; + } else { + statement += `.references(()${typeSuffix} => ${ + withCasing( + fk.tableTo, + casing, + ) + }.${withCasing(fk.columnsTo[0], casing)})`; + } + } + statement += ',\n'; + } + + return statement; +}; + +const createViewColumns = (view: View, columns: ViewColumn[], casing: Casing) => { + let statement = ''; + + for (const it of columns) { + const key = withCasing(it.name, casing); + statement += `${key}: ${it.type}()`; + statement += it.notNull ? '.notNull()' : ''; + statement += ',\n'; + } + + return statement; +}; + +const createTableIndexes = ( + tableName: string, + idxs: Index[], + casing: Casing, +): string => { + let statement = ''; + + for (const it of idxs) { + let idxKey = it.name.startsWith(tableName) && it.name !== tableName + ? it.name.slice(tableName.length + 1) + : it.name; + idxKey = idxKey.endsWith('_index') + ? idxKey.slice(0, -'_index'.length) + '_idx' + : idxKey; + idxKey = withCasing(idxKey, casing); + + const columnNames = it.columns.filter((c) => !c.isExpression).map((c) => c.value); + const indexGeneratedName = `${tableName}_${columnNames.join('_')}_index`; + const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; + + statement += `\t\t${idxKey}: `; + statement += it.isUnique ? 'uniqueIndex(' : 'index('; + statement += `${escapedIndexName})`; + statement += `.on(${ + it.columns + .map((it) => `table.${withCasing(it.value, casing)}`) + .join(', ') + }),`; + statement += `\n`; + } + + return statement; +}; + +const createTableUniques = ( + unqs: UniqueConstraint[], + casing: Casing, +): string => { + let statement = ''; + + unqs.forEach((it) => { + const idxKey = withCasing(it.name, casing); + + statement += `\t\t${idxKey}: `; + statement += 'unique('; + statement += `"${it.name}")`; + statement += `.on(${ + it.columns + .map((it) => `table.${withCasing(it, casing)}`) + .join(', ') + }),`; + statement += `\n`; + }); + + return statement; +}; + +const createTableChecks = ( + checks: CheckConstraint[], + _casing: Casing, +): string => { + let statement = ''; + + checks.forEach((it) => { + statement += 'check('; + statement += `"${it.name}", `; + statement += `sql\`${it.value}\`)`; + statement += `,\n`; + }); + + return statement; +}; + +const createTablePK = (pk: PrimaryKey, casing: Casing): string => { + let statement = 'primaryKey({ columns: ['; + statement += `${ + pk.columns.map((c) => { + return `table.${withCasing(c, casing)}`; + }).join(', ') + }]`; + + statement += `${pk.name ? `, name: "${pk.name}"` : ''}}`; + statement += ')'; + statement += `\n`; + return statement; +}; + +const createTableFKs = (fks: ForeignKey[], casing: Casing): string => { + let statement = ''; + + fks.forEach((it) => { + const isSelf = it.tableTo === it.table; + const tableTo = isSelf ? 'table' : `${withCasing(it.tableTo, casing)}`; + statement += `\t\t${withCasing(it.name, casing)}: foreignKey(() => ({\n`; + statement += `\t\t\tcolumns: [${ + it.columns + .map((i) => `table.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tforeignColumns: [${ + it.columnsTo + .map((i) => `${tableTo}.${withCasing(i, casing)}`) + .join(', ') + }],\n`; + statement += `\t\t\tname: "${it.name}"\n`; + statement += `\t\t}))`; + + statement += it.onUpdate && it.onUpdate !== 'no action' + ? `.onUpdate("${it.onUpdate}")` + : ''; + + statement += it.onDelete && it.onDelete !== 'no action' + ? `.onDelete("${it.onDelete}")` + : ''; + + statement += `,\n`; + }); + + return statement; +}; diff --git a/drizzle-kit/src/dialects/utils.ts b/drizzle-kit/src/dialects/utils.ts new file mode 100644 index 0000000000..b2cb9e38c1 --- /dev/null +++ b/drizzle-kit/src/dialects/utils.ts @@ -0,0 +1,173 @@ +import { type Simplify, trimChar } from '../utils'; +import type { CockroachDDL } from './cockroach/ddl'; +import type { MssqlDDL } from './mssql/ddl'; +import type { MysqlDDL } from './mysql/ddl'; +import type { PostgresDDL } from './postgres/ddl'; +import type { SQLiteDDL } from './sqlite/ddl'; + +export type Named = { + name: string; +}; + +export type NamedWithSchema = { + name: string; + schema: string; +}; + +export type ModifiedItems = { + schema?: string; + table: string; + items: T[]; +}; + +export type RenamedItems = { + schema?: string; + table: string; + renames: { from: T; to: T }[]; +}; + +type NullIfUndefined = T extends undefined ? null : T; + +export const getOrNull = , TKey extends keyof T>( + it: T | null, + key: TKey, +): NullIfUndefined | null => { + if (it === null) return null; + return (it?.[key] ?? null) as any; +}; + +export type GroupedRow< + TStatement extends { $diffType: 'create' | 'drop' | 'alter'; schema?: string | null; table?: string | null }, +> = + & { + inserted: TStatement[]; + deleted: TStatement[]; + updated: TStatement[]; + } + & { + [K in 'schema' | 'table' as null extends TStatement[K] ? never : K]: TStatement[K]; + }; + +export const groupDiffs = < + T extends { $diffType: 'create' | 'drop' | 'alter'; schema?: string | null; table?: string | null }, +>( + arr: T[], +): Simplify>[] => { + if (arr.length === 0) return []; + if (!arr[0].table && !arr[0].schema) throw new Error('No schema or table in item'); + + const res: GroupedRow[] = []; + for (let i = 0; i < arr.length; i++) { + const stmnt = arr[i]; + + const idx = res.findIndex((it) => + ('schema' in it ? stmnt.schema === it['schema'] : true) && ('table' in it ? stmnt.table === it.table : true) + ); + + let item: GroupedRow; + + if (idx < 0) { + const sch = 'schema' in stmnt ? { schema: stmnt.schema } : {}; + const tbl = 'table' in stmnt ? { table: stmnt.table } : {}; + item = { + ...sch, + ...tbl, + deleted: [], + inserted: [], + updated: [], + } as any; + res.push(item); + } else { + item = res[idx]; + } + + if (stmnt.$diffType === 'drop') { + item.deleted.push(stmnt); + } else if (stmnt.$diffType === 'create') { + item.inserted.push(stmnt); + } else { + item.updated.push(stmnt); + } + } + return res; +}; + +export const numberForTs = (value: string): { mode: 'number' | 'bigint'; value: string } => { + const check = Number(value); + if (Number.isNaN(check)) return { mode: 'number', value: `sql\`${trimChar(escapeForTsLiteral(value), '"')}\`` }; + + if (check >= Number.MIN_SAFE_INTEGER && check <= Number.MAX_SAFE_INTEGER) return { mode: 'number', value: value }; + return { mode: 'bigint', value: `${value}n` }; +}; + +// numeric precision can be bigger than 9 as it was before here +export const parseParams = (type: string) => { + return type.match(/\(((?:\d+(?:\s*,\s*\d+)*)|max)\)/i)?.[1].split(',').map((x) => x.trim()) ?? []; +}; + +export const escapeForSqlDefault = (input: string, mode: 'default' | 'pg-arr' = 'default') => { + let value = input.replace(/\\/g, '\\\\').replace(/'/g, "''"); + if (mode === 'pg-arr') value = value.replaceAll('"', '\\"'); + return value; +}; + +export const unescapeFromSqlDefault = (input: string, mode: 'default' | 'arr' = 'default') => { + let res = input.replace(/\\"/g, '"').replace(/\\\\/g, '\\'); + + if (mode === 'arr') return res; + return res.replace(/''/g, "'"); +}; + +export const escapeForTsLiteral = (input: string) => { + return JSON.stringify(input); +}; + +export function inspect(it: any): string { + if (!it) return ''; + + const keys = Object.keys(it); + if (keys.length === 0) return ''; + + const pairs = keys.map((key) => { + const formattedKey = /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(key) + ? key + : `'${key}'`; + + const value = it[key]; + const formattedValue = typeof value === 'string' ? `'${value}'` : String(value); + + return `${formattedKey}: ${formattedValue}`; + }); + + return `{ ${pairs.join(', ')} }`; +} + +export const preserveEntityNames = < + C extends + | PostgresDDL['uniques' | 'fks' | 'pks' | 'indexes'] + | MysqlDDL['indexes' | 'fks'] + | MssqlDDL['uniques' | 'fks' | 'pks' | 'defaults'] + | CockroachDDL['fks' | 'pks' | 'indexes'] + | SQLiteDDL['uniques' | 'pks' | 'fks'], +>( + collection1: C, + collection2: C, + mode: 'push' | 'default', +) => { + const items = collection1.list().filter((x) => mode === 'push' || !x.nameExplicit); + for (const left of items) { + const { entityType: _1, name: _2, nameExplicit: _3, ...filter } = left; + + const match = collection2.list({ ...filter, nameExplicit: false } as any); + + if (match.length !== 1 || match[0].name === left.name) continue; + + collection2.update({ + set: { name: left.name }, + where: { + ...filter, + nameExplicit: false, + } as any, + }); + } +}; diff --git a/drizzle-kit/src/ext/api-mysql.ts b/drizzle-kit/src/ext/api-mysql.ts new file mode 100644 index 0000000000..44128a51dd --- /dev/null +++ b/drizzle-kit/src/ext/api-mysql.ts @@ -0,0 +1,55 @@ +import type { Relations } from 'drizzle-orm/_relations'; +import type { AnyMySqlTable } from 'drizzle-orm/mysql-core'; +import type { CasingType } from 'src/cli/validations/common'; +import type { MysqlCredentials } from 'src/cli/validations/mysql'; + +export const startStudioServer = async ( + imports: Record, + credentials: MysqlCredentials, + options?: { + host?: string; + port?: number; + casing?: CasingType; + key?: string; + cert?: string; + }, +) => { + const { is } = await import('drizzle-orm'); + const { MySqlTable, getTableConfig } = await import('drizzle-orm/mysql-core'); + const { Relations } = await import('drizzle-orm/_relations'); + const { drizzleForMySQL, prepareServer } = await import('../cli/commands/studio'); + + const mysqlSchema: Record> = {}; + const relations: Record = {}; + + Object.entries(imports).forEach(([k, t]) => { + if (is(t, MySqlTable)) { + const schema = getTableConfig(t).schema || 'public'; + mysqlSchema[schema] = mysqlSchema[schema] || {}; + mysqlSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + + const setup = await drizzleForMySQL(credentials, mysqlSchema, relations, [], options?.casing); + const server = await prepareServer(setup); + + const host = options?.host || '127.0.0.1'; + const port = options?.port || 4983; + server.start({ + host, + port, + key: options?.key, + cert: options?.cert, + cb: (err) => { + if (err) { + console.error(err); + } else { + console.log(`Studio is running at ${options?.key ? 'https' : 'http'}://${host}:${port}`); + } + }, + }); +}; diff --git a/drizzle-kit/src/ext/api-postgres.ts b/drizzle-kit/src/ext/api-postgres.ts new file mode 100644 index 0000000000..5f7efd54c0 --- /dev/null +++ b/drizzle-kit/src/ext/api-postgres.ts @@ -0,0 +1,245 @@ +import type { PGlite } from '@electric-sql/pglite'; +import type { Relations } from 'drizzle-orm/_relations'; +import type { AnyPgTable, PgDatabase } from 'drizzle-orm/pg-core'; +import type { EntitiesFilterConfig } from 'src/cli/validations/cli'; +import { upToV8 } from 'src/dialects/postgres/versions'; +import type { CasingType } from '../cli/validations/common'; +import type { PostgresCredentials } from '../cli/validations/postgres'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, +} from '../dialects/postgres/ddl'; +import { createDDL, interimToDDL } from '../dialects/postgres/ddl'; +import type { PostgresSnapshot } from '../dialects/postgres/snapshot'; +import { originUUID } from '../utils'; +import type { DB } from '../utils'; + +export const generateDrizzleJson = async ( + imports: Record, + prevId?: string, + schemaFilters?: string[], + casing?: CasingType, +): Promise => { + const { prepareEntityFilter } = await import('src/dialects/pull-utils'); + const { postgresSchemaError, postgresSchemaWarning } = await import('../cli/views'); + const { toJsonSnapshot } = await import('../dialects/postgres/snapshot'); + const { fromDrizzleSchema, fromExports } = await import('../dialects/postgres/drizzle'); + const { extractPostgresExisting } = await import('../dialects/drizzle'); + const prepared = fromExports(imports); + + const existing = extractPostgresExisting(prepared.schemas, prepared.views, prepared.matViews); + + const filter = prepareEntityFilter('postgresql', { + schemas: schemaFilters ?? [], + tables: [], + entities: undefined, + extensions: [], + }, existing); + + // TODO: do we wan't to export everything or ignore .existing and respect entity filters in config + const { schema: interim, errors, warnings } = fromDrizzleSchema(prepared, casing, filter); + + const { ddl, errors: err2 } = interimToDDL(interim); + if (warnings.length > 0) { + console.log(warnings.map((it) => postgresSchemaWarning(it)).join('\n\n')); + } + + if (errors.length > 0) { + console.log(errors.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + if (err2.length > 0) { + console.log(err2.map((it) => postgresSchemaError(it)).join('\n')); + process.exit(1); + } + + return toJsonSnapshot(ddl, prevId ? [prevId] : [originUUID], []); +}; + +export const generateMigration = async ( + prev: PostgresSnapshot, + cur: PostgresSnapshot, +) => { + const { resolver } = await import('../cli/prompts'); + const { ddlDiff } = await import('../dialects/postgres/diff'); + const from = createDDL(); + const to = createDDL(); + + for (const it of prev.ddl) { + from.entities.push(it); + } + for (const it of cur.ddl) { + to.entities.push(it); + } + + const { sqlStatements } = await ddlDiff( + from, + to, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('role'), + resolver('privilege'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'default', + ); + + return sqlStatements; +}; + +export const pushSchema = async ( + imports: Record, + drizzleInstance: PgDatabase, + casing?: CasingType, + entitiesConfig?: EntitiesFilterConfig, +) => { + const { prepareEntityFilter } = await import('src/dialects/pull-utils'); + const { resolver } = await import('../cli/prompts'); + const { fromDatabaseForDrizzle } = await import('src/dialects/postgres/introspect'); + const { fromDrizzleSchema, fromExports } = await import('../dialects/postgres/drizzle'); + const { suggestions } = await import('../cli/commands/push-postgres'); + const { extractPostgresExisting } = await import('../dialects/drizzle'); + const { ddlDiff } = await import('../dialects/postgres/diff'); + const { sql } = await import('drizzle-orm'); + + const db: DB = { + query: async (query: string, _params?: any[]) => { + const res = await drizzleInstance.execute(sql.raw(query)); + return res.rows; + }, + }; + const prepared = fromExports(imports); + + const filterConfig = entitiesConfig ?? { + tables: [], + schemas: [], + extensions: [], + entities: undefined, + } satisfies EntitiesFilterConfig; + const existing = extractPostgresExisting(prepared.schemas, prepared.views, prepared.matViews); + const filter = prepareEntityFilter('postgresql', filterConfig, existing); + + const prev = await fromDatabaseForDrizzle(db, filter); + + // TODO: filter? + // TODO: do we wan't to export everything or ignore .existing and respect entity filters in config + const { schema: cur } = fromDrizzleSchema(prepared, casing, filter); + + const { ddl: from, errors: _err1 } = interimToDDL(prev); + const { ddl: to, errors: _err2 } = interimToDDL(cur); + + // TODO: handle errors, for now don't throw + + const { sqlStatements, statements } = await ddlDiff( + from, + to, + resolver('schema'), + resolver('enum'), + resolver('sequence'), + resolver('policy'), + resolver('role'), + resolver('privilege'), + resolver('table'), + resolver('column'), + resolver('view'), + resolver('unique'), + resolver('index'), + resolver('check'), + resolver('primary key'), + resolver('foreign key'), + 'push', + ); + + const hints = await suggestions(db, statements); + + return { + sqlStatements, + hints, + apply: async () => { + const losses = hints.map((x) => x.statement).filter((x) => typeof x !== 'undefined'); + for (const st of losses) { + await db.query(st); + } + for (const st of sqlStatements) { + await db.query(st); + } + }, + }; +}; + +export const startStudioServer = async ( + imports: Record, + credentials: PostgresCredentials | { + driver: 'pglite'; + client: PGlite; + }, + options?: { + host?: string; + port?: number; + casing?: CasingType; + key?: string; + cert?: string; + }, +) => { + const { is } = await import('drizzle-orm'); + const { PgTable, getTableConfig } = await import('drizzle-orm/pg-core'); + const { Relations } = await import('drizzle-orm/_relations'); + const { drizzleForPostgres, prepareServer } = await import('../cli/commands/studio'); + + const pgSchema: Record> = {}; + const relations: Record = {}; + + Object.entries(imports).forEach(([k, t]) => { + if (is(t, PgTable)) { + const schema = getTableConfig(t).schema || 'public'; + pgSchema[schema] = pgSchema[schema] || {}; + pgSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + + const setup = await drizzleForPostgres(credentials, pgSchema, relations, [], options?.casing); + const server = await prepareServer(setup); + + const host = options?.host || '127.0.0.1'; + const port = options?.port || 4983; + server.start({ + host, + port, + key: options?.key, + cert: options?.cert, + cb: (err) => { + if (err) { + console.error(err); + } else { + console.log(`Studio is running at ${options?.key ? 'https' : 'http'}://${host}:${port}`); + } + }, + }); +}; + +export const up = upToV8; diff --git a/drizzle-kit/src/ext/api-singlestore.ts b/drizzle-kit/src/ext/api-singlestore.ts new file mode 100644 index 0000000000..f9b470f7b7 --- /dev/null +++ b/drizzle-kit/src/ext/api-singlestore.ts @@ -0,0 +1,55 @@ +import type { Relations } from 'drizzle-orm/_relations'; +import type { AnySingleStoreTable } from 'drizzle-orm/singlestore-core'; +import type { CasingType } from 'src/cli/validations/common'; +import type { SingleStoreCredentials } from 'src/cli/validations/singlestore'; + +export const startStudioServer = async ( + imports: Record, + credentials: SingleStoreCredentials, + options?: { + host?: string; + port?: number; + casing?: CasingType; + key?: string; + cert?: string; + }, +) => { + const { is } = await import('drizzle-orm'); + const { SingleStoreTable, getTableConfig } = await import('drizzle-orm/singlestore-core'); + const { Relations } = await import('drizzle-orm/_relations'); + const { drizzleForSingleStore, prepareServer } = await import('../cli/commands/studio'); + + const singleStoreSchema: Record> = {}; + const relations: Record = {}; + + Object.entries(imports).forEach(([k, t]) => { + if (is(t, SingleStoreTable)) { + const schema = getTableConfig(t).schema || 'public'; + singleStoreSchema[schema] = singleStoreSchema[schema] || {}; + singleStoreSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + + const setup = await drizzleForSingleStore(credentials, singleStoreSchema, relations, [], options?.casing); + const server = await prepareServer(setup); + + const host = options?.host || '127.0.0.1'; + const port = options?.port || 4983; + server.start({ + host, + port, + key: options?.key, + cert: options?.cert, + cb: (err) => { + if (err) { + console.error(err); + } else { + console.log(`Studio is running at ${options?.key ? 'https' : 'http'}://${host}:${port}`); + } + }, + }); +}; diff --git a/drizzle-kit/src/ext/api-sqlite.ts b/drizzle-kit/src/ext/api-sqlite.ts new file mode 100644 index 0000000000..2224bffb75 --- /dev/null +++ b/drizzle-kit/src/ext/api-sqlite.ts @@ -0,0 +1,55 @@ +import type { Relations } from 'drizzle-orm/_relations'; +import type { AnySQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { CasingType } from 'src/cli/validations/common'; +import type { SqliteCredentials } from 'src/cli/validations/sqlite'; + +export const startStudioServer = async ( + imports: Record, + credentials: SqliteCredentials, + options?: { + host?: string; + port?: number; + casing?: CasingType; + key?: string; + cert?: string; + }, +) => { + const { is } = await import('drizzle-orm'); + const { SQLiteTable } = await import('drizzle-orm/sqlite-core'); + const { Relations } = await import('drizzle-orm/_relations'); + const { drizzleForSQLite, prepareServer } = await import('../cli/commands/studio'); + + const sqliteSchema: Record> = {}; + const relations: Record = {}; + + Object.entries(imports).forEach(([k, t]) => { + if (is(t, SQLiteTable)) { + const schema = 'public'; // sqlite does not have schemas + sqliteSchema[schema] = sqliteSchema[schema] || {}; + sqliteSchema[schema][k] = t; + } + + if (is(t, Relations)) { + relations[k] = t; + } + }); + + const setup = await drizzleForSQLite(credentials, sqliteSchema, relations, [], options?.casing); + const server = await prepareServer(setup); + + const host = options?.host || '127.0.0.1'; + const port = options?.port || 4983; + server.start({ + host, + port, + key: options?.key, + cert: options?.cert, + cb: (err) => { + if (err) { + console.error(err); + } else { + console.log(`Studio is running at ${options?.key ? 'https' : 'http'}://${host}:${port}`); + } + }, + }); +}; diff --git a/drizzle-kit/src/ext/api.ts b/drizzle-kit/src/ext/api.ts new file mode 100644 index 0000000000..52dfb8dfb1 --- /dev/null +++ b/drizzle-kit/src/ext/api.ts @@ -0,0 +1,345 @@ +// import { LibSQLDatabase } from 'drizzle-orm/libsql'; +// import type { MySql2Database } from 'drizzle-orm/mysql2'; +// import { PgDatabase } from 'drizzle-orm/pg-core'; +// import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; +// import { introspect as postgresIntrospect } from '../cli/commands/pull-postgres'; +// import { sqliteIntrospect } from '../cli/commands/pull-sqlite'; +// import { suggestions } from '../cli/commands/push-postgres'; +// import { updateUpToV6 as upPgV6, updateUpToV7 as upPgV7 } from '../cli/commands/up-postgres'; +// import { resolver } from '../cli/prompts'; +// import type { CasingType } from '../cli/validations/common'; +// import { ProgressView, schemaError, schemaWarning } from '../cli/views'; +// import { fromDrizzleSchema, fromExports } from '../dialects/postgres/drizzle'; +// import { PostgresSnapshot, toJsonSnapshot } from '../dialects/postgres/snapshot'; +// import type { Config } from '../index'; +// import { originUUID } from '../utils'; +// import type { DB, SQLiteDB } from '../utils'; +// import { getTablesFilterByExtensions } from './extensions/getTablesFilterByExtensions'; + +// import * as postgres from './api-postgres'; + +// SQLite + +// TODO commented this because of build error +// export const generateSQLiteDrizzleJson = async ( +// imports: Record, +// prevId?: string, +// casing?: CasingType, +// ): Promise => { +// const { prepareFromExports } = await import('./dialects/sqlite/imports'); + +// const prepared = prepareFromExports(imports); + +// const id = randomUUID(); + +// const snapshot = fromDrizzleSchema(prepared.tables, prepared.views, casing); + +// return { +// ...snapshot, +// id, +// prevId: prevId ?? originUUID, +// }; +// }; + +// export const generateSQLiteMigration = async ( +// prev: DrizzleSQLiteSnapshotJSON, +// cur: DrizzleSQLiteSnapshotJSON, +// ) => { +// const { applySqliteSnapshotsDiff } = await import('./dialects/sqlite/diff'); + +// const validatedPrev = sqliteSchema.parse(prev); +// const validatedCur = sqliteSchema.parse(cur); + +// const squashedPrev = squashSqliteScheme(validatedPrev); +// const squashedCur = squashSqliteScheme(validatedCur); + +// const { sqlStatements } = await applySqliteSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// sqliteViewsResolver, +// validatedPrev, +// validatedCur, +// ); + +// return sqlStatements; +// }; + +// export const pushSQLiteSchema = async ( +// imports: Record, +// drizzleInstance: LibSQLDatabase, +// ) => { +// const { applySqliteSnapshotsDiff } = await import('./dialects/sqlite/diff'); +// const { sql } = await import('drizzle-orm'); + +// const db: SQLiteDB = { +// query: async (query: string, params?: any[]) => { +// const res = drizzleInstance.all(sql.raw(query)); +// return res; +// }, +// run: async (query: string) => { +// return Promise.resolve(drizzleInstance.run(sql.raw(query))).then( +// () => {}, +// ); +// }, +// }; + +// const cur = await generateSQLiteDrizzleJson(imports); +// const progress = new ProgressView( +// 'Pulling schema from database...', +// 'Pulling schema from database...', +// ); + +// const { schema: prev } = await sqliteIntrospect(db, [], progress); + +// const validatedPrev = sqliteSchema.parse(prev); +// const validatedCur = sqliteSchema.parse(cur); + +// const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); +// const squashedCur = squashSqliteScheme(validatedCur, 'push'); + +// const { statements, _meta } = await applySqliteSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// sqliteViewsResolver, +// validatedPrev, +// validatedCur, +// 'push', +// ); + +// const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( +// db, +// statements, +// squashedPrev, +// squashedCur, +// _meta!, +// ); + +// return { +// hasDataLoss: shouldAskForApprove, +// warnings: infoToPrint, +// statementsToExecute, +// apply: async () => { +// for (const dStmnt of statementsToExecute) { +// await db.query(dStmnt); +// } +// }, +// }; +// }; + +// MySQL +// TODO commented this because of build error +// export const generateMySQLDrizzleJson = async ( +// imports: Record, +// prevId?: string, +// casing?: CasingType, +// ): Promise => { +// const { prepareFromExports } = await import('./serializer/mysqlImports'); + +// const prepared = prepareFromExports(imports); + +// const id = randomUUID(); + +// const snapshot = generateMySqlSnapshot(prepared.tables, prepared.views, casing); + +// return { +// ...snapshot, +// id, +// prevId: prevId ?? originUUID, +// }; +// }; + +// export const generateMySQLMigration = async ( +// prev: DrizzleMySQLSnapshotJSON, +// cur: DrizzleMySQLSnapshotJSON, +// ) => { +// const { ddlDiff: applyMysqlSnapshotsDiff } = await import('./dialects/mysql/mysql'); + +// const validatedPrev = mysqlSchema.parse(prev); +// const validatedCur = mysqlSchema.parse(cur); + +// const squashedPrev = squashMysqlScheme(validatedPrev); +// const squashedCur = squashMysqlScheme(validatedCur); + +// const { sqlStatements } = await applyMysqlSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// mySqlViewsResolver, +// uniqueResolver, +// validatedPrev, +// validatedCur, +// ); + +// return sqlStatements; +// }; + +// export const pushMySQLSchema = async ( +// imports: Record, +// drizzleInstance: MySql2Database, +// databaseName: string, +// ) => { +// const { ddlDiff: applyMysqlSnapshotsDiff } = await import('./dialects/mysql/mysql'); +// const { logSuggestionsAndReturn } = await import( +// './cli/commands/mysqlPushUtils' +// ); +// const { mysqlPushIntrospect } = await import( +// './cli/commands/pull-mysql' +// ); +// const { sql } = await import('drizzle-orm'); + +// const db: DB = { +// query: async (query: string, params?: any[]) => { +// const res = await drizzleInstance.execute(sql.raw(query)); +// return res[0] as unknown as any[]; +// }, +// }; +// const cur = await generateMySQLDrizzleJson(imports); +// const { schema: prev } = await mysqlPushIntrospect(db, databaseName, []); + +// const validatedPrev = mysqlSchema.parse(prev); +// const validatedCur = mysqlSchema.parse(cur); + +// const squashedPrev = squashMysqlScheme(validatedPrev); +// const squashedCur = squashMysqlScheme(validatedCur); + +// const { statements } = await applyMysqlSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// mySqlViewsResolver, +// uniqueResolver, +// validatedPrev, +// validatedCur, +// 'push', +// ); + +// const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( +// db, +// statements, +// validatedCur, +// ); + +// return { +// hasDataLoss: shouldAskForApprove, +// warnings: infoToPrint, +// statementsToExecute, +// apply: async () => { +// for (const dStmnt of statementsToExecute) { +// await db.query(dStmnt); +// } +// }, +// }; +// }; + +// SingleStore +// TODO commented this because of build error +// export const generateSingleStoreDrizzleJson = async ( +// imports: Record, +// prevId?: string, +// casing?: CasingType, +// ): Promise => { +// const { prepareFromExports } = await import('./serializer/singlestoreImports'); + +// const prepared = prepareFromExports(imports); + +// const id = randomUUID(); + +// const snapshot = generateSingleStoreSnapshot(prepared.tables, /* prepared.views, */ casing); + +// return { +// ...snapshot, +// id, +// prevId: prevId ?? originUUID, +// }; +// }; + +// export const generateSingleStoreMigration = async ( +// prev: DrizzleSingleStoreSnapshotJSON, +// cur: DrizzleSingleStoreSnapshotJSON, +// ) => { +// const { applySingleStoreSnapshotsDiff } = await import('./snapshot-differ/singlestore'); + +// const validatedPrev = singlestoreSchema.parse(prev); +// const validatedCur = singlestoreSchema.parse(cur); + +// const squashedPrev = squashSingleStoreScheme(validatedPrev); +// const squashedCur = squashSingleStoreScheme(validatedCur); + +// const { sqlStatements } = await applySingleStoreSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// /* singleStoreViewsResolver, */ +// validatedPrev, +// validatedCur, +// 'push', +// ); + +// return sqlStatements; +// }; + +// export const pushSingleStoreSchema = async ( +// imports: Record, +// drizzleInstance: SingleStoreDriverDatabase, +// databaseName: string, +// ) => { +// const { applySingleStoreSnapshotsDiff } = await import('./snapshot-differ/singlestore'); +// const { logSuggestionsAndReturn } = await import( +// './cli/commands/singlestorePushUtils' +// ); +// const { singlestorePushIntrospect } = await import( +// './cli/commands/pull-singlestore' +// ); +// const { sql } = await import('drizzle-orm'); + +// const db: DB = { +// query: async (query: string) => { +// const res = await drizzleInstance.execute(sql.raw(query)); +// return res[0] as unknown as any[]; +// }, +// }; +// const cur = await generateSingleStoreDrizzleJson(imports); +// const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []); + +// const validatedPrev = singlestoreSchema.parse(prev); +// const validatedCur = singlestoreSchema.parse(cur); + +// const squashedPrev = squashSingleStoreScheme(validatedPrev); +// const squashedCur = squashSingleStoreScheme(validatedCur); + +// const { statements } = await applySingleStoreSnapshotsDiff( +// squashedPrev, +// squashedCur, +// tablesResolver, +// columnsResolver, +// /* singleStoreViewsResolver, */ +// validatedPrev, +// validatedCur, +// 'push', +// ); + +// const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( +// db, +// statements, +// validatedCur, +// validatedPrev, +// ); + +// return { +// hasDataLoss: shouldAskForApprove, +// warnings: infoToPrint, +// statementsToExecute, +// apply: async () => { +// for (const dStmnt of statementsToExecute) { +// await db.query(dStmnt); +// } +// }, +// }; +// }; diff --git a/drizzle-kit/src/ext/mover-mysql.ts b/drizzle-kit/src/ext/mover-mysql.ts new file mode 100644 index 0000000000..3185f85a84 --- /dev/null +++ b/drizzle-kit/src/ext/mover-mysql.ts @@ -0,0 +1,18 @@ +export { + type CheckConstraint, + type Column, + createDDL, + type ForeignKey, + type Index, + type InterimColumn, + type MysqlDDL, + type PrimaryKey, + type Table, + type View, +} from '../dialects/mysql/ddl'; + +import { ddlDiffDry as ddd } from '../dialects/mysql/diff'; +import { fromDatabase as fd } from '../dialects/mysql/introspect'; + +export const ddlDiffDry = ddd; +export const fromDatabase = fd; diff --git a/drizzle-kit/src/ext/mover-postgres.ts b/drizzle-kit/src/ext/mover-postgres.ts new file mode 100644 index 0000000000..2fe1e2aba4 --- /dev/null +++ b/drizzle-kit/src/ext/mover-postgres.ts @@ -0,0 +1,29 @@ +export { + type CheckConstraint, + type Column, + createDDL, + type Enum, + type ForeignKey, + type Identity, + type Index, + type InterimSchema, + type Policy, + type PostgresDDL, + type PostgresEntity, + type PrimaryKey, + type Role, + type Schema, + type Sequence, + type UniqueConstraint, + type View, +} from '../dialects/postgres/ddl'; + +import { ddlDiffDry as ddd } from '../dialects/postgres/diff'; +import { fromDatabase as fd, fromDatabaseForDrizzle as fdfd } from '../dialects/postgres/introspect'; + +export const ddlDiffDry = ddd; +export const fromDatabase = fd; +export const fromDatabaseForDrizzle = fdfd; + +import type { PostgresEntities } from '../dialects/postgres/ddl'; +export type Table = PostgresEntities['tables']; diff --git a/drizzle-kit/src/ext/studio-mysql.ts b/drizzle-kit/src/ext/studio-mysql.ts new file mode 100644 index 0000000000..1117880540 --- /dev/null +++ b/drizzle-kit/src/ext/studio-mysql.ts @@ -0,0 +1,143 @@ +import { fromDatabase as fd } from 'src/dialects/mysql/introspect'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + InterimColumn, + InterimSchema, + MysqlEntities, + PrimaryKey, + View, + ViewColumn, +} from '../dialects/mysql/ddl'; +import { interimToDDL } from '../dialects/mysql/ddl'; +import { ddlDiff } from '../dialects/mysql/diff'; +import { mockResolver } from '../utils/mocks'; + +export type Interim = Omit; + +export type InterimTable = { + name: string; + columns: Interim[]; + indexes: Interim[]; + checks: Interim[]; + pks: Interim[]; + fks: Interim[]; +}; + +export type InterimView = { + name: string; + columns: Interim[]; + definition: string; + algorithm: 'undefined' | 'merge' | 'temptable'; + sqlSecurity: 'definer' | 'invoker'; + withCheckOption: 'local' | 'cascaded' | null; +}; + +export type InterimStudioSchema = { + tables: InterimTable[]; + views: InterimView[]; +}; + +const fromInterims = ({ + tables, + views, +}: InterimStudioSchema): InterimSchema => { + const tbls: MysqlEntities['tables'][] = tables.map((it) => ({ + entityType: 'tables', + name: it.name, + })); + const columns: InterimColumn[] = tables + .map((table) => { + return table.columns.map((it) => { + return { + entityType: 'columns', + ...it, + } satisfies InterimColumn; + }); + }) + .flat(1); + + const indexes: Index[] = tables + .map((table) => { + return table.indexes.map((it) => { + return { entityType: 'indexes', ...it } satisfies Index; + }); + }) + .flat(1); + + const checks: CheckConstraint[] = tables + .map((table) => { + return table.checks.map((it) => { + return { entityType: 'checks', ...it } satisfies CheckConstraint; + }); + }) + .flat(1); + const fks: ForeignKey[] = tables + .map((table) => { + return table.fks.map((it) => { + return { entityType: 'fks', ...it } satisfies ForeignKey; + }); + }) + .flat(1); + const pks: PrimaryKey[] = tables + .map((table) => { + return table.pks.map((it) => { + return { entityType: 'pks', ...it } satisfies PrimaryKey; + }); + }) + .flat(1); + + const vws: View[] = views.map(({ columns: _, ...it }) => { + return { + entityType: 'views', + algorithm: it.algorithm, + definition: it.definition, + name: it.name, + sqlSecurity: it.sqlSecurity, + withCheckOption: it.withCheckOption, + }; + }); + const viewColumns: ViewColumn[] = views + .map((table) => { + return table.columns.map((it) => { + return { + view: table.name, + ...it, + } satisfies ViewColumn; + }); + }) + .flat(1); + + return { + tables: tbls, + columns: columns, + pks, + fks, + checks, + indexes, + views: vws, + viewColumns, + }; +}; + +export const diffMySql = async (from: InterimStudioSchema, to: InterimStudioSchema, renamesArr: string[]) => { + const { ddl: ddl1 } = interimToDDL(fromInterims(from)); + const { ddl: ddl2 } = interimToDDL(fromInterims(to)); + + const renames = new Set(renamesArr); + + const { sqlStatements, groupedStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'default', + ); + + return { sqlStatements, groupedStatements, statements }; +}; + +export const fromDatabase = fd; diff --git a/drizzle-kit/src/ext/studio-postgres.ts b/drizzle-kit/src/ext/studio-postgres.ts new file mode 100644 index 0000000000..b3a776aeaa --- /dev/null +++ b/drizzle-kit/src/ext/studio-postgres.ts @@ -0,0 +1,199 @@ +import { fromDatabase as afd } from 'src/dialects/postgres/aws-introspect'; +import { fromDatabase as dfd } from 'src/dialects/postgres/duckdb-introspect'; +import { fromDatabase as fd } from 'src/dialects/postgres/introspect'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + InterimColumn, + InterimIndex, + InterimSchema, + Policy, + PostgresEntities, + PrimaryKey, + Privilege, + Role, + Schema, + Sequence, + UniqueConstraint, + View, + ViewColumn, +} from '../dialects/postgres/ddl'; +import { interimToDDL } from '../dialects/postgres/ddl'; +import { ddlDiff } from '../dialects/postgres/diff'; +import { mockResolver } from '../utils/mocks'; + +export type Interim = Omit; + +export type InterimTable = { + schema: string; + name: string; + columns: Interim[]; + indexes: Interim[]; + checks: Interim[]; + uniques: Interim[]; + pks: Interim[]; + fks: Interim[]; + isRlsEnabled: boolean; +}; + +export type InterimView = { + schema: string; + name: string; + materialized: boolean; + columns: Interim[]; + definition: string | null; +}; + +export type InterimStudioSchema = { + schemas: Schema[]; + tables: InterimTable[]; + views: InterimView[]; + enums: Enum[]; + sequences: Sequence[]; + roles: Role[]; + privileges: Privilege[]; + policies: Policy[]; +}; + +const fromInterims = ({ + schemas, + tables, + enums, + policies, + roles, + privileges, + sequences, + views, +}: InterimStudioSchema): InterimSchema => { + const tbls: PostgresEntities['tables'][] = tables.map((it) => ({ + entityType: 'tables', + name: it.name, + schema: it.schema, + isRlsEnabled: it.isRlsEnabled, + })); + const columns: InterimColumn[] = tables + .map((table) => { + return table.columns.map((it) => { + return { + entityType: 'columns', + ...it, + } satisfies InterimColumn; + }); + }) + .flat(1); + + const indexes: InterimIndex[] = tables + .map((table) => { + return table.indexes.map((it) => { + return { entityType: 'indexes', ...it } satisfies InterimIndex; + }); + }) + .flat(1); + + const checks: CheckConstraint[] = tables + .map((table) => { + return table.checks.map((it) => { + return { entityType: 'checks', ...it } satisfies CheckConstraint; + }); + }) + .flat(1); + const uniques: UniqueConstraint[] = tables + .map((table) => { + return table.uniques.map((it) => { + return { entityType: 'uniques', ...it } satisfies UniqueConstraint; + }); + }) + .flat(1); + const fks: ForeignKey[] = tables + .map((table) => { + return table.fks.map((it) => { + return { entityType: 'fks', ...it } satisfies ForeignKey; + }); + }) + .flat(1); + const pks: PrimaryKey[] = tables + .map((table) => { + return table.pks.map((it) => { + return { entityType: 'pks', ...it } satisfies PrimaryKey; + }); + }) + .flat(1); + + const vws: View[] = views.map(({ columns: _, ...it }) => { + return { + entityType: 'views', + tablespace: it.schema, + using: null, + with: null, + withNoData: null, + definition: it.definition, + materialized: it.materialized, + name: it.name, + schema: it.schema, + }; + }); + const viewColumns: ViewColumn[] = views + .map((table) => { + return table.columns.map((it) => { + return { + view: table.name, + typeDimensions: 0, // never user in studio + ...it, + } satisfies ViewColumn; + }); + }) + .flat(1); + + return { + schemas, + tables: tbls, + columns: columns, + pks, + fks, + checks, + uniques, + indexes, + views: vws, + viewColumns, + enums, + sequences, + roles, + privileges, + policies, + }; +}; + +export const diffPostgresql = async (from: InterimStudioSchema, to: InterimStudioSchema, renamesArr: string[]) => { + const { ddl: ddl1 } = interimToDDL(fromInterims(from)); + const { ddl: ddl2 } = interimToDDL(fromInterims(to)); + + const renames = new Set(renamesArr); + + const { sqlStatements, groupedStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), // views + mockResolver(renames), // uniques + mockResolver(renames), // indexes + mockResolver(renames), // checks + mockResolver(renames), // pks + 'default', + ); + + return { sqlStatements, groupedStatements, statements }; +}; + +export const fromDatabase = fd; +export const fromAwsDatabase = afd; +export const fromDuckDbDatabase = dfd; diff --git a/drizzle-kit/src/ext/studio-sqlite.ts b/drizzle-kit/src/ext/studio-sqlite.ts new file mode 100644 index 0000000000..1941b5ff0d --- /dev/null +++ b/drizzle-kit/src/ext/studio-sqlite.ts @@ -0,0 +1,118 @@ +import { fromDatabase as fd } from 'src/dialects/sqlite/introspect'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + InterimColumn, + InterimSchema, + PrimaryKey, + SqliteEntities, + UniqueConstraint, + View, +} from '../dialects/sqlite/ddl'; +import { interimToDDL } from '../dialects/sqlite/ddl'; +import { ddlDiff } from '../dialects/sqlite/diff'; +import { mockResolver } from '../utils/mocks'; + +export type Interim = Omit; + +export type InterimTable = { + name: string; + columns: Interim[]; + indexes: Interim[]; + checks: Interim[]; + uniques: Interim[]; + pks: Interim[]; + fks: Interim[]; +}; + +export type InterimView = { + name: string; + columns: Interim[]; + definition: string | null; +}; + +export type InterimStudioSchema = { + tables: InterimTable[]; + views: InterimView[]; +}; + +const fromInterims = (tables: InterimTable[], views: InterimView[]): InterimSchema => { + const tbls: SqliteEntities['tables'][] = tables.map((it) => ({ + entityType: 'tables', + name: it.name, + })); + const columns: InterimColumn[] = tables.map((table) => { + return table.columns.map((it) => { + return { + entityType: 'columns', + ...it, + } satisfies InterimColumn; + }); + }).flat(1); + + const indexes: Index[] = tables.map((table) => { + return table.indexes.filter((it) => it.origin === 'manual').map((it) => { + return { entityType: 'indexes', ...it } satisfies Index; + }); + }).flat(1); + + const checks: CheckConstraint[] = tables.map((table) => { + return table.checks.map((it) => { + return { entityType: 'checks', ...it } satisfies CheckConstraint; + }); + }).flat(1); + const uniques: UniqueConstraint[] = tables.map((table) => { + return table.uniques.map((it) => { + return { entityType: 'uniques', ...it } satisfies UniqueConstraint; + }); + }).flat(1); + const fks: ForeignKey[] = tables.map((table) => { + return table.fks.map((it) => { + return { entityType: 'fks', ...it } satisfies ForeignKey; + }); + }).flat(1); + const pks: PrimaryKey[] = tables.map((table) => { + return table.pks.map((it) => { + return { entityType: 'pks', ...it } satisfies PrimaryKey; + }); + }).flat(1); + + const vws: View[] = views.map((it) => { + return { entityType: 'views', isExisting: false, error: null, definition: it.definition, name: it.name }; + }); + + return { + tables: tbls, + columns: columns, + pks, + fks, + checks, + uniques, + indexes, + views: vws, + }; +}; + +export const diffSqlite = async ( + from: InterimStudioSchema, + to: InterimStudioSchema, + renamesArr: string[], +) => { + const renames = new Set(renamesArr); + const { ddl: ddl1 } = interimToDDL(fromInterims(from.tables, from.views)); + const { ddl: ddl2 } = interimToDDL(fromInterims(to.tables, to.views)); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + 'default', + ); + + return { sqlStatements, statements, groupedStatements }; +}; + +export const fromDatabase = fd; diff --git a/drizzle-kit/src/extensions/getTablesFilterByExtensions.ts b/drizzle-kit/src/extensions/getTablesFilterByExtensions.ts deleted file mode 100644 index 80321fc6a4..0000000000 --- a/drizzle-kit/src/extensions/getTablesFilterByExtensions.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { Config } from '../index'; - -export const getTablesFilterByExtensions = ({ - extensionsFilters, - dialect, -}: Pick): string[] => { - if (extensionsFilters) { - if ( - extensionsFilters.includes('postgis') - && dialect === 'postgresql' - ) { - return ['!geography_columns', '!geometry_columns', '!spatial_ref_sys']; - } - } - return []; -}; diff --git a/drizzle-kit/src/index.ts b/drizzle-kit/src/index.ts index 498f61260c..56b2bf4ff0 100644 --- a/drizzle-kit/src/index.ts +++ b/drizzle-kit/src/index.ts @@ -1,6 +1,6 @@ -import { ConnectionOptions } from 'tls'; +import type { ConnectionOptions } from 'tls'; import type { Driver, Prefix } from './cli/validations/common'; -import type { Dialect } from './schemaValidator'; +import type { Dialect } from './utils/schemaValidator'; // import {SslOptions} from 'mysql2' type SslOptions = { @@ -256,6 +256,46 @@ export type Config = }) ); } + // TODO update? + | { + dialect: Verify; + dbCredentials: + | { + port: number; + user: string; + password: string; + database: string; + server: string; + options?: { + encrypt?: boolean; + trustServerCertificate?: boolean; + }; + } + | { + url: string; + }; + } + | { + dialect: Verify; + dbCredentials: + | ({ + host: string; + port?: number; + user?: string; + password?: string; + database: string; + ssl?: + | boolean + | 'require' + | 'allow' + | 'prefer' + | 'verify-full' + | ConnectionOptions; + } & {}) + | { + url: string; + }; + } ); /** diff --git a/drizzle-kit/src/introspect-gel.ts b/drizzle-kit/src/introspect-gel.ts deleted file mode 100644 index a7b9493c13..0000000000 --- a/drizzle-kit/src/introspect-gel.ts +++ /dev/null @@ -1,1091 +0,0 @@ -import { getTableName, is } from 'drizzle-orm'; -import { - createTableRelationsHelpers, - extractTablesRelationalConfig, - Many, - One, - Relation, - Relations, -} from 'drizzle-orm/_relations'; -import { AnyGelTable } from 'drizzle-orm/gel-core'; -import './@types/utils'; -import { toCamelCase } from 'drizzle-orm/casing'; -import { Casing } from './cli/validations/common'; -import { assertUnreachable } from './global'; -import { - CheckConstraint, - Column, - ForeignKey, - GelKitInternals, - GelSchemaInternal, - Index, - Policy, - PrimaryKey, - UniqueConstraint, -} from './serializer/gelSchema'; -import { indexName } from './serializer/gelSerializer'; -import { unescapeSingleQuotes } from './utils'; - -const gelImportsList = new Set([ - 'gelTable', - 'smallint', - 'integer', - 'bigint', - 'bigintT', - 'boolean', - 'bytes', - 'dateDuration', - 'decimal', - 'doublePrecision', - 'duration', - 'json', - 'localDate', - 'localTime', - 'real', - 'relDuration', - 'text', - 'timestamp', - 'timestamptz', - 'uuid', - 'time', -]); - -const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { - if (isExpression) { - return `sql\`${defaultValue}\``; - } - - return defaultValue; -}; - -const relations = new Set(); - -const escapeColumnKey = (value: string) => { - if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { - return `"${value}"`; - } - return value; -}; - -const withCasing = (value: string, casing: Casing) => { - if (casing === 'preserve') { - return escapeColumnKey(value); - } - if (casing === 'camel') { - return escapeColumnKey(value.camelCase()); - } - - assertUnreachable(casing); -}; - -const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { - if (casing === 'preserve') { - return ''; - } - if (casing === 'camel') { - return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; - } - - assertUnreachable(casing); -}; - -// export const relationsToTypeScriptForStudio = ( -// schema: Record>>, -// relations: Record>>>, -// ) => { -// const relationalSchema: Record = { -// ...Object.fromEntries( -// Object.entries(schema) -// .map(([key, val]) => { -// // have unique keys across schemas -// const mappedTableEntries = Object.entries(val).map((tableEntry) => { -// return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; -// }); - -// return mappedTableEntries; -// }) -// .flat(), -// ), -// ...relations, -// }; - -// const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); - -// let result = ''; - -// function findColumnKey(table: AnyGelTable, columnName: string) { -// for (const tableEntry of Object.entries(table)) { -// const key = tableEntry[0]; -// const value = tableEntry[1]; - -// if (value.name === columnName) { -// return key; -// } -// } -// } - -// Object.values(relationsConfig.tables).forEach((table) => { -// const tableName = table.tsName.split('.')[1]; -// const relations = table.relations; -// let hasRelations = false; -// let relationsObjAsStr = ''; -// let hasOne = false; -// let hasMany = false; - -// Object.values(relations).forEach((relation) => { -// hasRelations = true; - -// if (is(relation, Many)) { -// hasMany = true; -// relationsObjAsStr += `\t\t${relation.fieldName}: many(${ -// relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] -// }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; -// } - -// if (is(relation, One)) { -// hasOne = true; -// relationsObjAsStr += `\t\t${relation.fieldName}: one(${ -// relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] -// }, { fields: [${ -// relation.config?.fields.map( -// (c) => -// `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ -// findColumnKey(relation.sourceTable, c.name) -// }`, -// ) -// }], references: [${ -// relation.config?.references.map( -// (c) => -// `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ -// findColumnKey(relation.referencedTable, c.name) -// }`, -// ) -// }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; -// } -// }); - -// if (hasRelations) { -// result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ -// hasOne && hasMany ? ', ' : '' -// }${hasMany ? 'many' : ''}}) => ({ -// ${relationsObjAsStr} -// }));\n`; -// } -// }); - -// return result; -// }; - -function generateIdentityParams(identity: Column['identity']) { - let paramsObj = `{ name: "${identity!.name}"`; - if (identity?.startWith) { - paramsObj += `, startWith: ${identity.startWith}`; - } - if (identity?.increment) { - paramsObj += `, increment: ${identity.increment}`; - } - if (identity?.minValue) { - paramsObj += `, minValue: ${identity.minValue}`; - } - if (identity?.maxValue) { - paramsObj += `, maxValue: ${identity.maxValue}`; - } - if (identity?.cache) { - paramsObj += `, cache: ${identity.cache}`; - } - if (identity?.cycle) { - paramsObj += `, cycle: true`; - } - paramsObj += ' }'; - if (identity?.type === 'always') { - return `.generatedAlwaysAsIdentity(${paramsObj})`; - } - return `.generatedByDefaultAsIdentity(${paramsObj})`; -} - -export const paramNameFor = (name: string, schema?: string) => { - const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; - return `${name}${schemaSuffix}`; -}; - -export const schemaToTypeScript = (schema: GelSchemaInternal, casing: Casing) => { - // collectFKs - Object.values(schema.tables).forEach((table) => { - Object.values(table.foreignKeys).forEach((fk) => { - const relation = `${fk.tableFrom}-${fk.tableTo}`; - relations.add(relation); - }); - }); - - const schemas = Object.fromEntries( - Object.entries(schema.schemas).map((it) => { - return [it[0], withCasing(it[1].replace('::', ''), casing)]; - }), - ); - - // const enumTypes = Object.values(schema.enums).reduce((acc, cur) => { - // acc.add(`${cur.schema}.${cur.name}`); - // return acc; - // }, new Set()); - - const imports = Object.values(schema.tables).reduce( - (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => (idx.isUnique ? 'uniqueIndex' : 'index')); - const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); - if (Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it))) { - res.gel.push('type AnyGelColumn'); - } - const pkImports = Object.values(it.compositePrimaryKeys).map((it) => 'primaryKey'); - const uniqueImports = Object.values(it.uniqueConstraints).map((it) => 'unique'); - - const checkImports = Object.values(it.checkConstraints).map( - (it) => 'check', - ); - - const policiesImports = Object.values(it.policies).map( - (it) => 'gelPolicy', - ); - - if (it.schema && it.schema !== 'public' && it.schema !== '') { - res.gel.push('gelSchema'); - } - - res.gel.push(...idxImports); - res.gel.push(...fkImpots); - res.gel.push(...pkImports); - res.gel.push(...uniqueImports); - res.gel.push(...policiesImports); - res.gel.push(...checkImports); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched: string = col.type?.replace('[]', '') ?? ''; - patched = patched.startsWith('time without time zone') ? 'localTime' : patched; - - patched = patched === 'double precision' ? 'doublePrecision' : patched; - patched = patched.startsWith('edgedbt.bigint_t') ? 'bigintT' : patched; - - patched = patched.startsWith('jsonb') ? 'json' : patched; - patched = patched.startsWith('edgedbt.timestamptz_t') ? 'timestamptz' : patched; - patched = patched.startsWith('edgedbt.timestamp_t') ? 'timestamp' : patched; - - patched = patched.startsWith('edgedbt.relative_duration_t') ? 'relDuration' : patched; - patched = patched.startsWith('bytea') ? 'bytes' : patched; - - patched = patched.startsWith('numeric') ? 'decimal' : patched; - - patched = patched.startsWith('edgedbt.duration_t') ? 'duration' : patched; - patched = patched.startsWith('edgedbt.date_t') ? 'localDate' : patched; - patched = patched.startsWith('edgedbt.date_duration_t') ? 'dateDuration' : patched; - - return patched; - }) - .filter((type) => { - return gelImportsList.has(type); - }); - - res.gel.push(...columnImports); - return res; - }, - { gel: [] as string[] }, - ); - - // Object.values(schema.sequences).forEach((it) => { - // if (it.schema && it.schema !== 'public' && it.schema !== '') { - // imports.gel.push('gelSchema'); - // } else if (it.schema === 'public') { - // imports.gel.push('gelSequence'); - // } - // }); - - // Object.values(schema.enums).forEach((it) => { - // if (it.schema && it.schema !== 'public' && it.schema !== '') { - // imports.gel.push('gelSchema'); - // } else if (it.schema === 'public') { - // imports.gel.push('gelEnum'); - // } - // }); - - if (Object.keys(schema.roles).length > 0) { - imports.gel.push('gelRole'); - } - - // const enumStatements = Object.values(schema.enums) - // .map((it) => { - // const enumSchema = schemas[it.schema]; - // // const func = schema || schema === "public" ? "gelTable" : schema; - // const paramName = paramNameFor(it.name, enumSchema); - - // const func = enumSchema ? `${enumSchema}.enum` : 'gelEnum'; - - // const values = Object.values(it.values) - // .map((it) => `'${unescapeSingleQuotes(it, false)}'`) - // .join(', '); - // return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; - // }) - // .join('') - // .concat('\n'); - - // const sequencesStatements = Object.values(schema.sequences) - // .map((it) => { - // const seqSchema = schemas[it.schema]; - // const paramName = paramNameFor(it.name, seqSchema); - - // const func = seqSchema ? `${seqSchema}.sequence` : 'gelSequence'; - - // let params = ''; - - // if (it.startWith) { - // params += `, startWith: "${it.startWith}"`; - // } - // if (it.increment) { - // params += `, increment: "${it.increment}"`; - // } - // if (it.minValue) { - // params += `, minValue: "${it.minValue}"`; - // } - // if (it.maxValue) { - // params += `, maxValue: "${it.maxValue}"`; - // } - // if (it.cache) { - // params += `, cache: "${it.cache}"`; - // } - // if (it.cycle) { - // params += `, cycle: true`; - // } else { - // params += `, cycle: false`; - // } - - // return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${ - // params ? `, { ${params.trimChar(',')} }` : '' - // })\n`; - // }) - // .join('') - // .concat(''); - - const schemaStatements = Object.entries(schemas) - .filter((it) => it[0] !== 'public') - .map((it) => { - return `export const ${it[1].replace('::', '').camelCase()} = gelSchema("${it[0]}");\n`; - }) - .join(''); - - const rolesNameToTsKey: Record = {}; - - const rolesStatements = Object.entries(schema.roles) - .map((it) => { - const fields = it[1]; - rolesNameToTsKey[fields.name] = it[0]; - return `export const ${withCasing(it[0], casing)} = gelRole("${fields.name}", ${ - !fields.createDb && !fields.createRole && fields.inherit - ? '' - : `${ - `, { ${fields.createDb ? `createDb: true,` : ''}${fields.createRole ? ` createRole: true,` : ''}${ - !fields.inherit ? ` inherit: false ` : '' - }`.trimChar(',') - }}` - } );\n`; - }) - .join(''); - - const tableStatements = Object.values(schema.tables).map((table) => { - const tableSchema = schemas[table.schema]; - const paramName = paramNameFor(table.name, tableSchema); - - const func = tableSchema ? `${tableSchema}.table` : 'gelTable'; - let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; - statement += createTableColumns( - table.name, - Object.values(table.columns), - Object.values(table.foreignKeys), - // enumTypes, - new Set(), - schemas, - casing, - schema.internal, - ); - statement += '}'; - - // more than 2 fields or self reference or cyclic - // Andrii: I switched this one off until we will get custom names in .references() - // const filteredFKs = Object.values(table.foreignKeys).filter((it) => { - // return it.columnsFrom.length > 1 || isSelf(it); - // }); - - if ( - Object.keys(table.indexes).length > 0 - || Object.values(table.foreignKeys).length > 0 - || Object.values(table.policies).length > 0 - || Object.keys(table.compositePrimaryKeys).length > 0 - || Object.keys(table.uniqueConstraints).length > 0 - || Object.keys(table.checkConstraints).length > 0 - ) { - statement += ', '; - statement += '(table) => ['; - statement += createTableIndexes(table.name, Object.values(table.indexes), casing); - statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); - statement += createTablePKs( - Object.values(table.compositePrimaryKeys), - casing, - ); - statement += createTableUniques( - Object.values(table.uniqueConstraints), - casing, - ); - statement += createTablePolicies( - Object.values(table.policies), - casing, - rolesNameToTsKey, - ); - statement += createTableChecks( - Object.values(table.checkConstraints), - casing, - ); - statement += '\n]'; - } - - statement += ');'; - return statement; - }); - - // const viewsStatements = Object.values(schema.views) - // .map((it) => { - // const viewSchema = schemas[it.schema]; - - // const paramName = paramNameFor(it.name, viewSchema); - - // const func = viewSchema - // ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) - // : it.materialized - // ? 'gelMaterializedView' - // : 'gelView'; - - // const withOption = it.with ?? ''; - - // const as = `sql\`${it.definition}\``; - - // const tablespace = it.tablespace ?? ''; - - // const columns = createTableColumns( - // '', - // Object.values(it.columns), - // [], - // enumTypes, - // schemas, - // casing, - // schema.internal, - // ); - - // let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; - // statement += tablespace ? `.tablespace("${tablespace}")` : ''; - // statement += withOption ? `.with(${JSON.stringify(withOption)})` : ''; - // statement += `.as(${as});`; - - // return statement; - // }) - // .join('\n\n'); - - const uniqueGelImports = ['gelTable', ...new Set(imports.gel)]; - - const importsTs = `import { ${ - uniqueGelImports.join( - ', ', - ) - } } from "drizzle-orm/gel-core" -import { sql } from "drizzle-orm"\n\n`; - - let decalrations = schemaStatements; - decalrations += rolesStatements; - // decalrations += enumStatements; - // decalrations += sequencesStatements; - decalrations += '\n'; - decalrations += tableStatements.join('\n\n'); - decalrations += '\n'; - // decalrations += viewsStatements; - - const file = importsTs + decalrations; - - // for drizzle studio query runner - const schemaEntry = ` - { - ${ - Object.values(schema.tables) - .map((it) => withCasing(it.name, casing)) - .join(',\n') - } - } - `; - - return { file, imports: importsTs, decalrations, schemaEntry }; -}; - -const isCyclic = (fk: ForeignKey) => { - const key = `${fk.tableFrom}-${fk.tableTo}`; - const reverse = `${fk.tableTo}-${fk.tableFrom}`; - return relations.has(key) && relations.has(reverse); -}; - -const isSelf = (fk: ForeignKey) => { - return fk.tableFrom === fk.tableTo; -}; - -const buildArrayDefault = (defaultValue: string, typeName: string): string => { - if ( - typeof defaultValue === 'string' - && !(defaultValue.startsWith('_nullif_array_nulls(ARRAY[') || defaultValue.startsWith('ARRAY[')) - ) { - return `sql\`${defaultValue}\``; - } - - const regex = /ARRAY\[(.*)\]/; - const match = defaultValue.match(regex); - - if (!match) { - return `sql\`${defaultValue}\``; - } - - defaultValue = match[1]; - return `sql\`[${defaultValue}]\``; -}; - -const mapDefault = ( - tableName: string, - type: string, - name: string, - enumTypes: Set, - typeSchema: string, - defaultValue?: any, - internals?: GelKitInternals, -) => { - const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; - const isArray = internals?.tables[tableName]?.columns[name]?.isArray ?? false; - const lowered = type.toLowerCase().replace('[]', ''); - - if (name === 'id') { - return `.default(sql\`uuid_generate_v4()\`)`; - } - - if (isArray) { - return typeof defaultValue !== 'undefined' ? `.default(${buildArrayDefault(defaultValue, lowered)})` : ''; - } - - if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` - : ''; - } - - if (lowered.startsWith('integer')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` - : ''; - } - - if (lowered.startsWith('smallint')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` - : ''; - } - - if (lowered.startsWith('bigint')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` - : ''; - } - - if (lowered.startsWith('edgedbt.bigint_t')) { - return typeof defaultValue !== 'undefined' - ? `.default(BigInt(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)}))` - : ''; - } - - if (lowered.startsWith('boolean')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('double precision')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('edgedbt.date_duration_t')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; - } - - if (lowered.startsWith('real')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('uuid')) { - const res = defaultValue === 'gen_random_uuid()' - ? '.defaultRandom()' - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - - return res; - } - - if (lowered.startsWith('numeric')) { - defaultValue = defaultValue - ? (defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) - ? defaultValue.substring(1, defaultValue.length - 1) - : defaultValue) - : undefined; - return defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; - } - - if (lowered.startsWith('edgedbt.timestamptz_t')) { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{2}(:\d{2})?)?'$/.test(defaultValue) // Matches 'YYYY-MM-DD HH:MI:SS', 'YYYY-MM-DD HH:MI:SS.FFFFFF', 'YYYY-MM-DD HH:MI:SS+TZ', 'YYYY-MM-DD HH:MI:SS.FFFFFF+TZ' and 'YYYY-MM-DD HH:MI:SS+HH:MI' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('time without time zone')) { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{2}:\d{2}(:\d{2})?(\.\d+)?'$/.test(defaultValue) // Matches 'HH:MI', 'HH:MI:SS' and 'HH:MI:SS.FFFFFF' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('edgedbt.duration_t')) { - return defaultValue ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; - } - - if (lowered === 'edgedbt.date_t') { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{4}-\d{2}-\d{2}'$/.test(defaultValue) // Matches 'YYYY-MM-DD' - ? `.default(${defaultValue})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('edgedbt.relative_duration_t')) { - return defaultValue ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; - } - - if (lowered.startsWith('text')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` - : ''; - } - - if (lowered.startsWith('json')) { - const def = typeof defaultValue !== 'undefined' - ? defaultValue - : null; - - return defaultValue ? `.default(sql\`${def}\`)` : ''; - } - - if (lowered.startsWith('bytea')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, true)})` - : ''; - } - - return ''; -}; - -const column = ( - tableName: string, - type: string, - name: string, - enumTypes: Set, - typeSchema: string, - casing: Casing, - defaultValue?: any, - internals?: GelKitInternals, -) => { - const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; - const lowered = type.toLowerCase().replace('[]', ''); - - if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { - let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ - dbColumnName({ name, casing }) - })`; - return out; - } - - if (lowered.startsWith('integer')) { - let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('smallint')) { - let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('bigint')) { - let out = `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('edgedbt.bigint_t')) { - let out = `${withCasing(name, casing)}: bigintT(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('boolean')) { - let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('double precision')) { - let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('edgedbt.date_duration_t')) { - let out = `${withCasing(name, casing)}: dateDuration(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('real')) { - let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('uuid')) { - let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('numeric')) { - let out = `${withCasing(name, casing)}: decimal(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('edgedbt.timestamptz_t')) { - let out = `${withCasing(name, casing)}: timestamptz(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('edgedbt.timestamp_t')) { - let out = `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('edgedbt.date_t')) { - let out = `${withCasing(name, casing)}: localDate(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('edgedbt.duration_t')) { - let out = `${withCasing(name, casing)}: duration(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('edgedbt.relative_duration_t')) { - let out = `${withCasing(name, casing)}: relDuration(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('text')) { - let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('jsonb')) { - let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('time without time zone')) { - let out = `${withCasing(name, casing)}: localTime(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('bytea')) { - let out = `${withCasing(name, casing)}: bytes(${dbColumnName({ name, casing })})`; - return out; - } - - let unknown = `// TODO: failed to parse database type '${type}'\n`; - unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; - return unknown; -}; - -const dimensionsInArray = (size?: number): string => { - let res = ''; - if (typeof size === 'undefined') return res; - for (let i = 0; i < size; i++) { - res += '.array()'; - } - return res; -}; - -const createTableColumns = ( - tableName: string, - columns: Column[], - fks: ForeignKey[], - enumTypes: Set, - schemas: Record, - casing: Casing, - internals: GelKitInternals, -): string => { - let statement = ''; - - // no self refs and no cyclic - const oneColumnsFKs = Object.values(fks) - .filter((it) => { - return !isSelf(it); - }) - .filter((it) => it.columnsFrom.length === 1); - - const fkByColumnName = oneColumnsFKs.reduce((res, it) => { - const arr = res[it.columnsFrom[0]] || []; - arr.push(it); - res[it.columnsFrom[0]] = arr; - return res; - }, {} as Record); - - columns.forEach((it) => { - const columnStatement = column( - tableName, - it.type, - it.name, - enumTypes, - it.typeSchema ?? 'public', - casing, - it.default, - internals, - ); - statement += '\t'; - statement += columnStatement; - // Provide just this in column function - if (internals?.tables[tableName]?.columns[it.name]?.isArray) { - statement += dimensionsInArray(internals?.tables[tableName]?.columns[it.name]?.dimensions); - } - statement += mapDefault(tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', it.default, internals); - statement += it.primaryKey ? '.primaryKey()' : ''; - statement += it.notNull && !it.identity ? '.notNull()' : ''; - - statement += it.identity ? generateIdentityParams(it.identity) : ''; - - statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; - - // const fks = fkByColumnName[it.name]; - // Andrii: I switched it off until we will get a custom naem setting in references - // if (fks) { - // const fksStatement = fks - // .map((it) => { - // const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; - // const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; - // const params = { onDelete, onUpdate }; - - // const typeSuffix = isCyclic(it) ? ': AnyGelColumn' : ''; - - // const paramsStr = objToStatement2(params); - // const tableSchema = schemas[it.schemaTo || '']; - // const paramName = paramNameFor(it.tableTo, tableSchema); - // if (paramsStr) { - // return `.references(()${typeSuffix} => ${ - // withCasing( - // paramName, - // casing, - // ) - // }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; - // } - // return `.references(()${typeSuffix} => ${ - // withCasing( - // paramName, - // casing, - // ) - // }.${withCasing(it.columnsTo[0], casing)})`; - // }) - // .join(''); - // statement += fksStatement; - // } - - statement += ',\n'; - }); - - return statement; -}; - -const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { - let statement = ''; - - idxs.forEach((it) => { - // we have issue when index is called as table called - let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; - idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; - - idxKey = withCasing(idxKey, casing); - - const indexGeneratedName = indexName( - tableName, - it.columns.map((it) => it.expression), - ); - const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; - - statement += `\n\t`; - statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `${escapedIndexName})`; - statement += `${it.concurrently ? `.concurrently()` : ''}`; - - statement += `.using("${it.method}", ${ - it.columns - .map((it) => { - if (it.isExpression) { - return `sql\`${it.expression}\``; - } else { - return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ - it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' - }${ - it.opclass - ? `.op("${it.opclass}")` - : '' - }`; - } - }) - .join(', ') - })`; - statement += it.where ? `.where(sql\`${it.where}\`)` : ''; - - function reverseLogic(mappedWith: Record): string { - let reversedString = '{'; - for (const key in mappedWith) { - if (mappedWith.hasOwnProperty(key)) { - reversedString += `${key}: "${mappedWith[key]}",`; - } - } - reversedString = reversedString.length > 1 ? reversedString.slice(0, reversedString.length - 1) : reversedString; - return `${reversedString}}`; - } - - statement += it.with && Object.keys(it.with).length > 0 ? `.with(${reverseLogic(it.with)})` : ''; - statement += `,`; - }); - - return statement; -}; - -const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { - let statement = ''; - - pks.forEach((it) => { - statement += `\n\t`; - statement += 'primaryKey({ columns: ['; - statement += `${ - it.columns - .map((c) => { - return `table.${withCasing(c, casing)}`; - }) - .join(', ') - }]${it.name ? `, name: "${it.name}"` : ''}}`; - statement += ')'; - statement += `,`; - }); - - return statement; -}; - -// get a map of db role name to ts key -// if to by key is in this map - no quotes, otherwise - quotes - -const createTablePolicies = ( - policies: Policy[], - casing: Casing, - rolesNameToTsKey: Record = {}, -): string => { - let statement = ''; - - policies.forEach((it) => { - const idxKey = withCasing(it.name, casing); - - const mappedItTo = it.to?.map((v) => { - return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; - }); - - statement += `\n\t`; - statement += 'gelPolicy('; - statement += `"${it.name}", { `; - statement += `as: "${it.as?.toLowerCase()}", for: "${it.for?.toLowerCase()}", to: [${mappedItTo?.join(', ')}]${ - it.using ? `, using: sql\`${it.using}\`` : '' - }${it.withCheck ? `, withCheck: sql\`${it.withCheck}\` ` : ''}`; - statement += ` }),`; - }); - - return statement; -}; - -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: Casing, -): string => { - let statement = ''; - - unqs.forEach((it) => { - statement += `\n\t`; - statement += 'unique('; - statement += `"${it.name}")`; - statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; - statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; - statement += `,`; - }); - - return statement; -}; - -const createTableChecks = ( - checkConstraints: CheckConstraint[], - casing: Casing, -) => { - let statement = ''; - - checkConstraints.forEach((it) => { - statement += `\n\t`; - statement += 'check('; - statement += `"${it.name}", `; - statement += `sql\`${it.value}\`)`; - statement += `,`; - }); - - return statement; -}; - -const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { - let statement = ''; - - fks.forEach((it) => { - const tableSchema = schemas[it.schemaTo || '']; - const paramName = paramNameFor(it.tableTo, tableSchema); - - const isSelf = it.tableTo === it.tableFrom; - const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; - statement += `\n\t`; - statement += `foreignKey({\n`; - statement += `\t\t\tcolumns: [${it.columnsFrom.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; - statement += `\t\t\tforeignColumns: [${ - it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') - }],\n`; - statement += `\t\t\tname: "${it.name}"\n`; - statement += `\t\t})`; - - statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; - - statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; - - statement += `,`; - }); - - return statement; -}; diff --git a/drizzle-kit/src/introspect-mysql.ts b/drizzle-kit/src/introspect-mysql.ts deleted file mode 100644 index 8f1ddfd065..0000000000 --- a/drizzle-kit/src/introspect-mysql.ts +++ /dev/null @@ -1,1030 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -import { toCamelCase } from 'drizzle-orm/casing'; -import './@types/utils'; -import type { Casing } from './cli/validations/common'; -import { assertUnreachable } from './global'; -import { - CheckConstraint, - Column, - ForeignKey, - Index, - MySqlSchema, - MySqlSchemaInternal, - PrimaryKey, - UniqueConstraint, -} from './serializer/mysqlSchema'; -import { unescapeSingleQuotes } from './utils'; - -const mysqlImportsList = new Set([ - 'mysqlTable', - 'mysqlEnum', - 'bigint', - 'binary', - 'boolean', - 'char', - 'date', - 'datetime', - 'decimal', - 'double', - 'float', - 'int', - 'json', - 'mediumint', - 'real', - 'serial', - 'smallint', - 'text', - 'tinytext', - 'mediumtext', - 'longtext', - 'time', - 'timestamp', - 'tinyint', - 'varbinary', - 'varchar', - 'year', - 'enum', -]); - -const objToStatement = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); - statement += ' }'; - return statement; -}; - -const objToStatement2 = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys - statement += ' }'; - return statement; -}; - -const timeConfig = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const binaryConfig = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const importsPatch = { - 'double precision': 'doublePrecision', - 'timestamp without time zone': 'timestamp', -} as Record; - -const relations = new Set(); - -const escapeColumnKey = (value: string) => { - if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { - return `"${value}"`; - } - return value; -}; - -const prepareCasing = (casing?: Casing) => (value: string) => { - if (casing === 'preserve') { - return escapeColumnKey(value); - } - if (casing === 'camel') { - return escapeColumnKey(value.camelCase()); - } - - assertUnreachable(casing); -}; - -const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { - if (casing === 'preserve') { - return ''; - } - if (casing === 'camel') { - return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; - } - - assertUnreachable(casing); -}; - -export const schemaToTypeScript = ( - schema: MySqlSchemaInternal, - casing: Casing, -) => { - const withCasing = prepareCasing(casing); - // collectFKs - Object.values(schema.tables).forEach((table) => { - Object.values(table.foreignKeys).forEach((fk) => { - const relation = `${fk.tableFrom}-${fk.tableTo}`; - relations.add(relation); - }); - }); - - const imports = Object.values(schema.tables).reduce( - (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); - const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); - const pkImports = Object.values(it.compositePrimaryKeys).map( - (it) => 'primaryKey', - ); - const uniqueImports = Object.values(it.uniqueConstraints).map( - (it) => 'unique', - ); - const checkImports = Object.values(it.checkConstraint).map( - (it) => 'check', - ); - - res.mysql.push(...idxImports); - res.mysql.push(...fkImpots); - res.mysql.push(...pkImports); - res.mysql.push(...uniqueImports); - res.mysql.push(...checkImports); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched = importsPatch[col.type] ?? col.type; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('binary(') ? 'binary' : patched; - patched = patched.startsWith('decimal(') ? 'decimal' : patched; - patched = patched.startsWith('smallint(') ? 'smallint' : patched; - patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; - patched = patched.startsWith('datetime(') ? 'datetime' : patched; - patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; - patched = patched.startsWith('int(') ? 'int' : patched; - patched = patched.startsWith('double(') ? 'double' : patched; - patched = patched.startsWith('float(') ? 'float' : patched; - patched = patched.startsWith('int unsigned') ? 'int' : patched; - patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; - patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; - patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; - patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; - return patched; - }) - .filter((type) => { - return mysqlImportsList.has(type); - }); - - res.mysql.push(...columnImports); - return res; - }, - { mysql: [] as string[] }, - ); - - Object.values(schema.views).forEach((it) => { - imports.mysql.push('mysqlView'); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched = importsPatch[col.type] ?? col.type; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('binary(') ? 'binary' : patched; - patched = patched.startsWith('decimal(') ? 'decimal' : patched; - patched = patched.startsWith('smallint(') ? 'smallint' : patched; - patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; - patched = patched.startsWith('datetime(') ? 'datetime' : patched; - patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; - patched = patched.startsWith('int(') ? 'int' : patched; - patched = patched.startsWith('double(') ? 'double' : patched; - patched = patched.startsWith('float(') ? 'float' : patched; - patched = patched.startsWith('int unsigned') ? 'int' : patched; - patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; - patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; - patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; - patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; - return patched; - }) - .filter((type) => { - return mysqlImportsList.has(type); - }); - - imports.mysql.push(...columnImports); - }); - - const tableStatements = Object.values(schema.tables).map((table) => { - const func = 'mysqlTable'; - let statement = ''; - if (imports.mysql.includes(withCasing(table.name))) { - statement = `// Table name is in conflict with ${ - withCasing( - table.name, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; - statement += createTableColumns( - Object.values(table.columns), - Object.values(table.foreignKeys), - withCasing, - casing, - table.name, - schema, - ); - statement += '}'; - - // more than 2 fields or self reference or cyclic - const filteredFKs = Object.values(table.foreignKeys).filter((it) => { - return it.columnsFrom.length > 1 || isSelf(it); - }); - - if ( - Object.keys(table.indexes).length > 0 - || filteredFKs.length > 0 - || Object.keys(table.compositePrimaryKeys).length > 0 - || Object.keys(table.uniqueConstraints).length > 0 - || Object.keys(table.checkConstraint).length > 0 - ) { - statement += ',\n'; - statement += '(table) => ['; - statement += createTableIndexes( - table.name, - Object.values(table.indexes), - withCasing, - ); - statement += createTableFKs(Object.values(filteredFKs), withCasing); - statement += createTablePKs( - Object.values(table.compositePrimaryKeys), - withCasing, - ); - statement += createTableUniques( - Object.values(table.uniqueConstraints), - withCasing, - ); - statement += createTableChecks( - Object.values(table.checkConstraint), - withCasing, - ); - statement += '\n]'; - } - - statement += ');'; - return statement; - }); - - const viewsStatements = Object.values(schema.views).map((view) => { - const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; - const func = 'mysqlView'; - let statement = ''; - - if (imports.mysql.includes(withCasing(name))) { - statement = `// Table name is in conflict with ${ - withCasing( - view.name, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; - statement += createTableColumns( - Object.values(columns), - [], - withCasing, - casing, - name, - schema, - ); - statement += '})'; - - statement += algorithm ? `.algorithm("${algorithm}")` : ''; - statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; - statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; - statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; - - return statement; - }); - - const uniqueMySqlImports = [ - 'mysqlTable', - 'mysqlSchema', - 'AnyMySqlColumn', - ...new Set(imports.mysql), - ]; - const importsTs = `import { ${ - uniqueMySqlImports.join( - ', ', - ) - } } from "drizzle-orm/mysql-core"\nimport { sql } from "drizzle-orm"\n\n`; - - let decalrations = ''; - decalrations += tableStatements.join('\n\n'); - decalrations += '\n'; - decalrations += viewsStatements.join('\n\n'); - - const file = importsTs + decalrations; - - const schemaEntry = ` - { - ${ - Object.values(schema.tables) - .map((it) => withCasing(it.name)) - .join(',') - } - } - `; - - return { - file, // backward compatible, print to file - imports: importsTs, - decalrations, - schemaEntry, - }; -}; - -const isCyclic = (fk: ForeignKey) => { - const key = `${fk.tableFrom}-${fk.tableTo}`; - const reverse = `${fk.tableTo}-${fk.tableFrom}`; - return relations.has(key) && relations.has(reverse); -}; - -const isSelf = (fk: ForeignKey) => { - return fk.tableFrom === fk.tableTo; -}; - -const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { - if (isExpression) { - return `sql\`${defaultValue}\``; - } - - return defaultValue; -}; - -const mapColumnDefaultForJson = (defaultValue: any) => { - if ( - typeof defaultValue === 'string' - && defaultValue.startsWith("('") - && defaultValue.endsWith("')") - ) { - return defaultValue.substring(2, defaultValue.length - 2); - } - - return defaultValue; -}; - -const column = ( - type: string, - name: string, - casing: (value: string) => string, - rawCasing: Casing, - defaultValue?: any, - autoincrement?: boolean, - onUpdate?: boolean, - isExpression?: boolean, -) => { - let lowered = type; - if (!type.startsWith('enum(')) { - lowered = type.toLowerCase(); - } - - if (lowered === 'serial') { - return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; - } - - if (lowered.startsWith('int')) { - const isUnsigned = lowered.startsWith('int unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: int(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('tinyint')) { - const isUnsigned = lowered.startsWith('tinyint unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - // let out = `${name.camelCase()}: tinyint("${name}")`; - let out: string = `${casing(name)}: tinyint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('smallint')) { - const isUnsigned = lowered.startsWith('smallint unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: smallint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('mediumint')) { - const isUnsigned = lowered.startsWith('mediumint unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: mediumint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('bigint')) { - const isUnsigned = lowered.startsWith('bigint unsigned'); - let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${ - isUnsigned ? ', unsigned: true' : '' - } })`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'boolean') { - let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('double')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) { - const [precision, scale] = lowered - .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - const timeConfigParams = params ? timeConfig(params) : undefined; - - let out = params - ? `${casing(name)}: double(${ - dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) - }${timeConfig(params)})` - : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`; - - // let out = `${name.camelCase()}: double("${name}")`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('float')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) { - const [precision, scale] = lowered - .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'real') { - let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('timestamp')) { - const keyLength = 'timestamp'.length + 1; - let fsp = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - fsp = fsp ? fsp : null; - - const params = timeConfig({ fsp, mode: "'string'" }); - - let out = params - ? `${casing(name)}: timestamp(${ - dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) - }${params})` - : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`; - - // mysql has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case - defaultValue = defaultValue === 'now()' || defaultValue === '(CURRENT_TIMESTAMP)' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - - let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; - out += onUpdateNow; - - return out; - } - - if (lowered.startsWith('time')) { - const keyLength = 'time'.length + 1; - let fsp = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - fsp = fsp ? fsp : null; - - const params = timeConfig({ fsp }); - - let out = params - ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` - : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered === 'date') { - let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ - casing( - name, - ) - }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - // in mysql text can't have default value. Will leave it in case smth ;) - if (lowered === 'text') { - let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in mysql text can't have default value. Will leave it in case smth ;) - if (lowered === 'tinytext') { - let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in mysql text can't have default value. Will leave it in case smth ;) - if (lowered === 'mediumtext') { - let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in mysql text can't have default value. Will leave it in case smth ;) - if (lowered === 'longtext') { - let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'year') { - let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in mysql json can't have default value. Will leave it in case smth ;) - if (lowered === 'json') { - let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`; - - out += defaultValue - ? `.default(${mapColumnDefaultForJson(defaultValue)})` - : ''; - - return out; - } - - if (lowered.startsWith('varchar')) { - let out: string = `${ - casing( - name, - ) - }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ - lowered.substring( - 'varchar'.length + 1, - lowered.length - 1, - ) - } })`; - - const mappedDefaultValue = mapColumnDefault(defaultValue, isExpression); - out += defaultValue - ? `.default(${isExpression ? mappedDefaultValue : unescapeSingleQuotes(mappedDefaultValue, true)})` - : ''; - return out; - } - - if (lowered.startsWith('char')) { - let out: string = `${ - casing( - name, - ) - }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ - lowered.substring( - 'char'.length + 1, - lowered.length - 1, - ) - } })`; - - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('datetime')) { - let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; - - const fsp = lowered.startsWith('datetime(') - ? lowered.substring('datetime'.length + 1, lowered.length - 1) - : undefined; - - out = fsp - ? `${ - casing( - name, - ) - }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${ - lowered.substring( - 'datetime'.length + 1, - lowered.length - 1, - ) - } })` - : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('decimal')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) { - const [precision, scale] = lowered - .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - const timeConfigParams = params ? timeConfig(params) : undefined; - - let out = params - ? `${casing(name)}: decimal(${ - dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) - }${timeConfigParams})` - : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('binary')) { - const keyLength = 'binary'.length + 1; - let length = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - length = length ? length : null; - - const params = binaryConfig({ length }); - - let out = params - ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` - : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('enum')) { - const values = lowered - .substring('enum'.length + 1, lowered.length - 1) - .split(',') - .map((v) => unescapeSingleQuotes(v, true)) - .join(','); - let out = `${casing(name)}: mysqlEnum(${dbColumnName({ name, casing: rawCasing, withMode: true })}[${values}])`; - const mappedDefaultValue = mapColumnDefault(defaultValue, isExpression); - out += defaultValue - ? `.default(${isExpression ? mappedDefaultValue : unescapeSingleQuotes(mappedDefaultValue, true)})` - : ''; - return out; - } - - if (lowered.startsWith('varbinary')) { - const keyLength = 'varbinary'.length + 1; - let length = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - length = length ? length : null; - - const params = binaryConfig({ length }); - - let out = params - ? `${casing(name)}: varbinary(${ - dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) - }${params})` - : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - console.log('uknown', type); - return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; -}; - -const createTableColumns = ( - columns: Column[], - fks: ForeignKey[], - casing: (val: string) => string, - rawCasing: Casing, - tableName: string, - schema: MySqlSchemaInternal, -): string => { - let statement = ''; - - // no self refs and no cyclic - const oneColumnsFKs = Object.values(fks) - .filter((it) => { - return !isSelf(it); - }) - .filter((it) => it.columnsFrom.length === 1); - - const fkByColumnName = oneColumnsFKs.reduce((res, it) => { - const arr = res[it.columnsFrom[0]] || []; - arr.push(it); - res[it.columnsFrom[0]] = arr; - return res; - }, {} as Record); - - columns.forEach((it) => { - statement += '\t'; - statement += column( - it.type, - it.name, - casing, - rawCasing, - it.default, - it.autoincrement, - it.onUpdate, - schema.internal?.tables![tableName]?.columns[it.name] - ?.isDefaultAnExpression ?? false, - ); - statement += it.primaryKey ? '.primaryKey()' : ''; - statement += it.notNull ? '.notNull()' : ''; - - statement += it.generated - ? `.generatedAlwaysAs(sql\`${ - it.generated.as.replace( - /`/g, - '\\`', - ) - }\`, { mode: "${it.generated.type}" })` - : ''; - - const fks = fkByColumnName[it.name]; - if (fks) { - const fksStatement = fks - .map((it) => { - const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; - const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; - const params = { onDelete, onUpdate }; - - const typeSuffix = isCyclic(it) ? ': AnyMySqlColumn' : ''; - - const paramsStr = objToStatement2(params); - if (paramsStr) { - return `.references(()${typeSuffix} => ${ - casing( - it.tableTo, - ) - }.${casing(it.columnsTo[0])}, ${paramsStr} )`; - } - return `.references(()${typeSuffix} => ${casing(it.tableTo)}.${ - casing( - it.columnsTo[0], - ) - })`; - }) - .join(''); - statement += fksStatement; - } - - statement += ',\n'; - }); - - return statement; -}; - -const createTableIndexes = ( - tableName: string, - idxs: Index[], - casing: (value: string) => string, -): string => { - let statement = ''; - - idxs.forEach((it) => { - let idxKey = it.name.startsWith(tableName) && it.name !== tableName - ? it.name.slice(tableName.length + 1) - : it.name; - idxKey = idxKey.endsWith('_index') - ? idxKey.slice(0, -'_index'.length) + '_idx' - : idxKey; - - idxKey = casing(idxKey); - - statement += `\n\t`; - statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `"${it.name}")`; - statement += `.on(${ - it.columns - .map((it) => `table.${casing(it)}`) - .join(', ') - }),`; - }); - - return statement; -}; - -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: (value: string) => string, -): string => { - let statement = ''; - - unqs.forEach((it) => { - const idxKey = casing(it.name); - - statement += `\n\t`; - statement += 'unique('; - statement += `"${it.name}")`; - statement += `.on(${ - it.columns - .map((it) => `table.${casing(it)}`) - .join(', ') - }),`; - }); - - return statement; -}; - -const createTableChecks = ( - checks: CheckConstraint[], - casing: (value: string) => string, -): string => { - let statement = ''; - - checks.forEach((it) => { - statement += `\n\t`; - statement += 'check('; - statement += `"${it.name}", `; - statement += `sql\`${it.value.replace(/`/g, '\\`')}\`)`; - statement += `,`; - }); - - return statement; -}; - -const createTablePKs = ( - pks: PrimaryKey[], - casing: (value: string) => string, -): string => { - let statement = ''; - - pks.forEach((it) => { - let idxKey = casing(it.name); - - statement += `\n\t`; - statement += 'primaryKey({ columns: ['; - statement += `${ - it.columns - .map((c) => { - return `table.${casing(c)}`; - }) - .join(', ') - }]${it.name ? `, name: "${it.name}"` : ''}}`; - statement += '),'; - }); - - return statement; -}; - -const createTableFKs = ( - fks: ForeignKey[], - casing: (value: string) => string, -): string => { - let statement = ''; - - fks.forEach((it) => { - const isSelf = it.tableTo === it.tableFrom; - const tableTo = isSelf ? 'table' : `${casing(it.tableTo)}`; - statement += `\n\t`; - statement += `foreignKey({\n`; - statement += `\t\t\tcolumns: [${ - it.columnsFrom - .map((i) => `table.${casing(i)}`) - .join(', ') - }],\n`; - statement += `\t\t\tforeignColumns: [${ - it.columnsTo - .map((i) => `${tableTo}.${casing(i)}`) - .join(', ') - }],\n`; - statement += `\t\t\tname: "${it.name}"\n`; - statement += `\t\t})`; - - statement += it.onUpdate && it.onUpdate !== 'no action' - ? `.onUpdate("${it.onUpdate}")` - : ''; - - statement += it.onDelete && it.onDelete !== 'no action' - ? `.onDelete("${it.onDelete}")` - : ''; - - statement += `,`; - }); - - return statement; -}; diff --git a/drizzle-kit/src/introspect-pg.ts b/drizzle-kit/src/introspect-pg.ts deleted file mode 100644 index cf98b6a9a4..0000000000 --- a/drizzle-kit/src/introspect-pg.ts +++ /dev/null @@ -1,1379 +0,0 @@ -import { getTableName, is } from 'drizzle-orm'; -import { - createTableRelationsHelpers, - extractTablesRelationalConfig, - Many, - One, - Relation, - Relations, -} from 'drizzle-orm/_relations'; -import { AnyPgTable } from 'drizzle-orm/pg-core'; -import './@types/utils'; -import { toCamelCase } from 'drizzle-orm/casing'; -import { Casing } from './cli/validations/common'; -import { assertUnreachable } from './global'; -import { - CheckConstraint, - Column, - ForeignKey, - Index, - PgKitInternals, - PgSchemaInternal, - Policy, - PrimaryKey, - UniqueConstraint, -} from './serializer/pgSchema'; -import { indexName } from './serializer/pgSerializer'; -import { unescapeSingleQuotes } from './utils'; - -const pgImportsList = new Set([ - 'pgTable', - 'pgEnum', - 'smallint', - 'integer', - 'bigint', - 'boolean', - 'text', - 'varchar', - 'char', - 'serial', - 'smallserial', - 'bigserial', - 'decimal', - 'numeric', - 'real', - 'json', - 'jsonb', - 'time', - 'timestamp', - 'date', - 'interval', - 'cidr', - 'inet', - 'bytea', - 'macaddr', - 'macaddr8', - 'bigint', - 'doublePrecision', - 'uuid', - 'vector', - 'point', - 'line', - 'geometry', -]); - -const objToStatement2 = (json: { [s: string]: unknown }) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys - statement += ' }'; - return statement; -}; - -const timeConfig = (json: { [s: string]: unknown }) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const possibleIntervals = [ - 'year', - 'month', - 'day', - 'hour', - 'minute', - 'second', - 'year to month', - 'day to hour', - 'day to minute', - 'day to second', - 'hour to minute', - 'hour to second', - 'minute to second', -]; - -const intervalStrToObj = (str: string) => { - if (str.startsWith('interval(')) { - return { - precision: Number(str.substring('interval('.length, str.length - 1)), - }; - } - const splitted = str.split(' '); - if (splitted.length === 1) { - return {}; - } - const rest = splitted.slice(1, splitted.length).join(' '); - if (possibleIntervals.includes(rest)) { - return { fields: `"${rest}"` }; - } - - for (const s of possibleIntervals) { - if (rest.startsWith(`${s}(`)) { - return { - fields: `"${s}"`, - precision: Number(rest.substring(s.length + 1, rest.length - 1)), - }; - } - } - return {}; -}; - -const intervalConfig = (str: string) => { - const json = intervalStrToObj(str); - // json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it: keyof typeof json) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { - if (isExpression) { - return `sql\`${defaultValue}\``; - } - - return defaultValue; -}; - -const importsPatch = { - 'double precision': 'doublePrecision', - 'timestamp without time zone': 'timestamp', - 'timestamp with time zone': 'timestamp', - 'time without time zone': 'time', - 'time with time zone': 'time', -} as Record; - -const relations = new Set(); - -const escapeColumnKey = (value: string) => { - if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { - return `"${value}"`; - } - return value; -}; - -const withCasing = (value: string, casing: Casing) => { - if (casing === 'preserve') { - return escapeColumnKey(value); - } - if (casing === 'camel') { - return escapeColumnKey(toCamelCase(value)); - } - - assertUnreachable(casing); -}; - -const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { - if (casing === 'preserve') { - return ''; - } - if (casing === 'camel') { - return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; - } - - assertUnreachable(casing); -}; - -export const relationsToTypeScriptForStudio = ( - schema: Record>>, - relations: Record>>>, -) => { - const relationalSchema: Record = { - ...Object.fromEntries( - Object.entries(schema) - .map(([key, val]) => { - // have unique keys across schemas - const mappedTableEntries = Object.entries(val).map((tableEntry) => { - return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; - }); - - return mappedTableEntries; - }) - .flat(), - ), - ...relations, - }; - - const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); - - let result = ''; - - function findColumnKey(table: AnyPgTable, columnName: string) { - for (const tableEntry of Object.entries(table)) { - const key = tableEntry[0]; - const value = tableEntry[1]; - - if (value.name === columnName) { - return key; - } - } - } - - Object.values(relationsConfig.tables).forEach((table) => { - const tableName = table.tsName.split('.')[1]; - const relations = table.relations; - let hasRelations = false; - let relationsObjAsStr = ''; - let hasOne = false; - let hasMany = false; - - Object.values(relations).forEach((relation) => { - hasRelations = true; - - if (is(relation, Many)) { - hasMany = true; - relationsObjAsStr += `\t\t${relation.fieldName}: many(${ - relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] - }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; - } - - if (is(relation, One)) { - hasOne = true; - relationsObjAsStr += `\t\t${relation.fieldName}: one(${ - relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] - }, { fields: [${ - relation.config?.fields.map( - (c) => - `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ - findColumnKey(relation.sourceTable as AnyPgTable, c.name) - }`, - ) - }], references: [${ - relation.config?.references.map( - (c) => - `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ - findColumnKey(relation.referencedTable as AnyPgTable, c.name) - }`, - ) - }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; - } - }); - - if (hasRelations) { - result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ - hasOne && hasMany ? ', ' : '' - }${hasMany ? 'many' : ''}}) => ({ - ${relationsObjAsStr} - }));\n`; - } - }); - - return result; -}; - -function generateIdentityParams(identity: Column['identity']) { - let paramsObj = `{ name: "${identity!.name}"`; - if (identity?.startWith) { - paramsObj += `, startWith: ${identity.startWith}`; - } - if (identity?.increment) { - paramsObj += `, increment: ${identity.increment}`; - } - if (identity?.minValue) { - paramsObj += `, minValue: ${identity.minValue}`; - } - if (identity?.maxValue) { - paramsObj += `, maxValue: ${identity.maxValue}`; - } - if (identity?.cache) { - paramsObj += `, cache: ${identity.cache}`; - } - if (identity?.cycle) { - paramsObj += `, cycle: true`; - } - paramsObj += ' }'; - if (identity?.type === 'always') { - return `.generatedAlwaysAsIdentity(${paramsObj})`; - } - return `.generatedByDefaultAsIdentity(${paramsObj})`; -} - -export const paramNameFor = (name: string, schema?: string) => { - const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; - return `${name}${schemaSuffix}`; -}; - -export const schemaToTypeScript = (schema: PgSchemaInternal, casing: Casing) => { - // collectFKs - Object.values(schema.tables).forEach((table) => { - Object.values(table.foreignKeys).forEach((fk) => { - const relation = `${fk.tableFrom}-${fk.tableTo}`; - relations.add(relation); - }); - }); - - const schemas = Object.fromEntries( - Object.entries(schema.schemas).map((it) => { - return [it[0], withCasing(it[1], casing)]; - }), - ); - - const enumTypes = Object.values(schema.enums).reduce((acc, cur) => { - acc.add(`${cur.schema}.${cur.name}`); - return acc; - }, new Set()); - - const imports = Object.values(schema.tables).reduce( - (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => (idx.isUnique ? 'uniqueIndex' : 'index')); - const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); - if (Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it))) { - res.pg.push('type AnyPgColumn'); - } - const pkImports = Object.values(it.compositePrimaryKeys).map((it) => 'primaryKey'); - const uniqueImports = Object.values(it.uniqueConstraints).map((it) => 'unique'); - - const checkImports = Object.values(it.checkConstraints).map( - (it) => 'check', - ); - - const policiesImports = Object.values(it.policies).map( - (it) => 'pgPolicy', - ); - - if (it.schema && it.schema !== 'public' && it.schema !== '') { - res.pg.push('pgSchema'); - } - - res.pg.push(...idxImports); - res.pg.push(...fkImpots); - res.pg.push(...pkImports); - res.pg.push(...uniqueImports); - res.pg.push(...policiesImports); - res.pg.push(...checkImports); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); - patched = patched === 'double precision' ? 'doublePrecision' : patched; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('numeric(') ? 'numeric' : patched; - patched = patched.startsWith('time(') ? 'time' : patched; - patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; - patched = patched.startsWith('vector(') ? 'vector' : patched; - patched = patched.startsWith('geometry(') ? 'geometry' : patched; - return patched; - }) - .filter((type) => { - return pgImportsList.has(type); - }); - - res.pg.push(...columnImports); - return res; - }, - { pg: [] as string[] }, - ); - - Object.values(schema.views).forEach((it) => { - if (it.schema && it.schema !== 'public' && it.schema !== '') { - imports.pg.push('pgSchema'); - } else if (it.schema === 'public') { - it.materialized ? imports.pg.push('pgMaterializedView') : imports.pg.push('pgView'); - } - - Object.values(it.columns).forEach(() => { - const columnImports = Object.values(it.columns) - .map((col) => { - let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); - patched = patched === 'double precision' ? 'doublePrecision' : patched; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('numeric(') ? 'numeric' : patched; - patched = patched.startsWith('time(') ? 'time' : patched; - patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; - patched = patched.startsWith('vector(') ? 'vector' : patched; - patched = patched.startsWith('geometry(') ? 'geometry' : patched; - return patched; - }) - .filter((type) => { - return pgImportsList.has(type); - }); - - imports.pg.push(...columnImports); - }); - }); - - Object.values(schema.sequences).forEach((it) => { - if (it.schema && it.schema !== 'public' && it.schema !== '') { - imports.pg.push('pgSchema'); - } else if (it.schema === 'public') { - imports.pg.push('pgSequence'); - } - }); - - Object.values(schema.enums).forEach((it) => { - if (it.schema && it.schema !== 'public' && it.schema !== '') { - imports.pg.push('pgSchema'); - } else if (it.schema === 'public') { - imports.pg.push('pgEnum'); - } - }); - - if (Object.keys(schema.roles).length > 0) { - imports.pg.push('pgRole'); - } - - const enumStatements = Object.values(schema.enums) - .map((it) => { - const enumSchema = schemas[it.schema]; - // const func = schema || schema === "public" ? "pgTable" : schema; - const paramName = paramNameFor(it.name, enumSchema); - - const func = enumSchema ? `${enumSchema}.enum` : 'pgEnum'; - - const values = Object.values(it.values) - .map((it) => `'${unescapeSingleQuotes(it, false)}'`) - .join(', '); - return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; - }) - .join('') - .concat('\n'); - - const sequencesStatements = Object.values(schema.sequences) - .map((it) => { - const seqSchema = schemas[it.schema]; - const paramName = paramNameFor(it.name, seqSchema); - - const func = seqSchema ? `${seqSchema}.sequence` : 'pgSequence'; - - let params = ''; - - if (it.startWith) { - params += `, startWith: "${it.startWith}"`; - } - if (it.increment) { - params += `, increment: "${it.increment}"`; - } - if (it.minValue) { - params += `, minValue: "${it.minValue}"`; - } - if (it.maxValue) { - params += `, maxValue: "${it.maxValue}"`; - } - if (it.cache) { - params += `, cache: "${it.cache}"`; - } - if (it.cycle) { - params += `, cycle: true`; - } else { - params += `, cycle: false`; - } - - return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${ - params ? `, { ${params.trimChar(',')} }` : '' - })\n`; - }) - .join('') - .concat(''); - - const schemaStatements = Object.entries(schemas) - // .filter((it) => it[0] !== "public") - .map((it) => { - return `export const ${it[1]} = pgSchema("${it[0]}");\n`; - }) - .join(''); - - const rolesNameToTsKey: Record = {}; - - const rolesStatements = Object.entries(schema.roles) - .map((it) => { - const fields = it[1]; - rolesNameToTsKey[fields.name] = it[0]; - return `export const ${withCasing(it[0], casing)} = pgRole("${fields.name}", ${ - !fields.createDb && !fields.createRole && fields.inherit - ? '' - : `${ - `, { ${fields.createDb ? `createDb: true,` : ''}${fields.createRole ? ` createRole: true,` : ''}${ - !fields.inherit ? ` inherit: false ` : '' - }`.trimChar(',') - }}` - } );\n`; - }) - .join(''); - - const tableStatements = Object.values(schema.tables).map((table) => { - const tableSchema = schemas[table.schema]; - const paramName = paramNameFor(table.name, tableSchema); - - const func = tableSchema ? `${tableSchema}.table` : 'pgTable'; - let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; - statement += createTableColumns( - table.name, - Object.values(table.columns), - Object.values(table.foreignKeys), - enumTypes, - schemas, - casing, - schema.internal, - ); - statement += '}'; - - // more than 2 fields or self reference or cyclic - // Andrii: I switched this one off until we will get custom names in .references() - // const filteredFKs = Object.values(table.foreignKeys).filter((it) => { - // return it.columnsFrom.length > 1 || isSelf(it); - // }); - - if ( - Object.keys(table.indexes).length > 0 - || Object.values(table.foreignKeys).length > 0 - || Object.values(table.policies).length > 0 - || Object.keys(table.compositePrimaryKeys).length > 0 - || Object.keys(table.uniqueConstraints).length > 0 - || Object.keys(table.checkConstraints).length > 0 - ) { - statement += ', '; - statement += '(table) => ['; - statement += createTableIndexes(table.name, Object.values(table.indexes), casing); - statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); - statement += createTablePKs( - Object.values(table.compositePrimaryKeys), - casing, - ); - statement += createTableUniques( - Object.values(table.uniqueConstraints), - casing, - ); - statement += createTablePolicies( - Object.values(table.policies), - casing, - rolesNameToTsKey, - ); - statement += createTableChecks( - Object.values(table.checkConstraints), - casing, - ); - statement += '\n]'; - } - - statement += ');'; - return statement; - }); - - const viewsStatements = Object.values(schema.views) - .map((it) => { - const viewSchema = schemas[it.schema]; - - const paramName = paramNameFor(it.name, viewSchema); - - const func = viewSchema - ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) - : it.materialized - ? 'pgMaterializedView' - : 'pgView'; - - const withOption = it.with ?? ''; - - const as = `sql\`${it.definition}\``; - - const tablespace = it.tablespace ?? ''; - - const columns = createTableColumns( - '', - Object.values(it.columns), - [], - enumTypes, - schemas, - casing, - schema.internal, - ); - - let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; - statement += tablespace ? `.tablespace("${tablespace}")` : ''; - statement += withOption ? `.with(${JSON.stringify(withOption)})` : ''; - statement += `.as(${as});`; - - return statement; - }) - .join('\n\n'); - - const uniquePgImports = ['pgTable', ...new Set(imports.pg)]; - - const importsTs = `import { ${ - uniquePgImports.join( - ', ', - ) - } } from "drizzle-orm/pg-core" -import { sql } from "drizzle-orm"\n\n`; - - let decalrations = schemaStatements; - decalrations += rolesStatements; - decalrations += enumStatements; - decalrations += sequencesStatements; - decalrations += '\n'; - decalrations += tableStatements.join('\n\n'); - decalrations += '\n'; - decalrations += viewsStatements; - - const file = importsTs + decalrations; - - // for drizzle studio query runner - const schemaEntry = ` - { - ${ - Object.values(schema.tables) - .map((it) => withCasing(it.name, casing)) - .join(',\n') - } - } - `; - - return { file, imports: importsTs, decalrations, schemaEntry }; -}; - -const isCyclic = (fk: ForeignKey) => { - const key = `${fk.tableFrom}-${fk.tableTo}`; - const reverse = `${fk.tableTo}-${fk.tableFrom}`; - return relations.has(key) && relations.has(reverse); -}; - -const isSelf = (fk: ForeignKey) => { - return fk.tableFrom === fk.tableTo; -}; - -const buildArrayDefault = (defaultValue: string, typeName: string): string => { - if (typeof defaultValue === 'string' && !(defaultValue.startsWith('{') || defaultValue.startsWith("'{"))) { - return `sql\`${defaultValue}\``; - } - defaultValue = defaultValue.substring(2, defaultValue.length - 2); - return `[${ - defaultValue - .split(/\s*,\s*/g) - .map((value) => { - // if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(typeName)) { - // return value; - // } else if (typeName === 'interval') { - // return value.replaceAll('"', "'"); - // } else if (typeName === 'boolean') { - // return value === 't' ? 'true' : 'false'; - if (typeName === 'json' || typeName === 'jsonb') { - return value.substring(1, value.length - 1).replaceAll('\\', ''); - } - return value; - // } - }) - .join(', ') - }]`; -}; - -const mapDefault = ( - tableName: string, - type: string, - name: string, - enumTypes: Set, - typeSchema: string, - defaultValue?: any, - internals?: PgKitInternals, -) => { - const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; - const isArray = internals?.tables[tableName]?.columns[name]?.isArray ?? false; - const lowered = type.toLowerCase().replace('[]', ''); - - if (isArray) { - return typeof defaultValue !== 'undefined' ? `.default(${buildArrayDefault(defaultValue, lowered)})` : ''; - } - - if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` - : ''; - } - - if (lowered.startsWith('bytea')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('integer')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('smallint')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('bigint')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('boolean')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('double precision')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('real')) { - return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered.startsWith('uuid')) { - return defaultValue === 'gen_random_uuid()' - ? '.defaultRandom()' - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('numeric')) { - defaultValue = defaultValue - ? (defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) - ? defaultValue.substring(1, defaultValue.length - 1) - : defaultValue) - : undefined; - return defaultValue ? `.default('${mapColumnDefault(defaultValue, isExpression)}')` : ''; - } - - if (lowered.startsWith('timestamp')) { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{2}(:\d{2})?)?'$/.test(defaultValue) // Matches 'YYYY-MM-DD HH:MI:SS', 'YYYY-MM-DD HH:MI:SS.FFFFFF', 'YYYY-MM-DD HH:MI:SS+TZ', 'YYYY-MM-DD HH:MI:SS.FFFFFF+TZ' and 'YYYY-MM-DD HH:MI:SS+HH:MI' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('time')) { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{2}:\d{2}(:\d{2})?(\.\d+)?'$/.test(defaultValue) // Matches 'HH:MI', 'HH:MI:SS' and 'HH:MI:SS.FFFFFF' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('interval')) { - return defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - } - - if (lowered === 'date') { - return defaultValue === 'now()' - ? '.defaultNow()' - : /^'\d{4}-\d{2}-\d{2}'$/.test(defaultValue) // Matches 'YYYY-MM-DD' - ? `.default(${defaultValue})` - : defaultValue - ? `.default(sql\`${defaultValue}\`)` - : ''; - } - - if (lowered.startsWith('text')) { - return typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` - : ''; - } - - if (lowered.startsWith('jsonb')) { - const def = typeof defaultValue !== 'undefined' - ? defaultValue.replace(/::(.*?)(?, - typeSchema: string, - casing: Casing, - defaultValue?: any, - internals?: PgKitInternals, -) => { - const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; - const lowered = type.toLowerCase().replace('[]', ''); - - if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { - let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ - dbColumnName({ name, casing }) - })`; - return out; - } - - if (lowered.startsWith('serial')) { - return `${withCasing(name, casing)}: serial(${dbColumnName({ name, casing })})`; - } - - if (lowered.startsWith('smallserial')) { - return `${withCasing(name, casing)}: smallserial(${dbColumnName({ name, casing })})`; - } - - if (lowered.startsWith('bigserial')) { - return `${withCasing(name, casing)}: bigserial(${ - dbColumnName({ name, casing, withMode: true }) - }{ mode: "bigint" })`; - } - - if (lowered.startsWith('bytea')) { - let out = `${withCasing(name, casing)}: bytea(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('integer')) { - let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('smallint')) { - let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('bigint')) { - let out = `// You can use { mode: "bigint" } if numbers are exceeding js number limitations\n\t`; - out += `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing, withMode: true })}{ mode: "number" })`; - return out; - } - - if (lowered.startsWith('boolean')) { - let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('double precision')) { - let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('real')) { - let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('uuid')) { - let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('numeric')) { - let params: { precision: string | undefined; scale: string | undefined } | undefined; - - if (lowered.length > 7) { - const [precision, scale] = lowered.slice(8, lowered.length - 1).split(','); - params = { precision, scale }; - } - - let out = params - ? `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing, withMode: true })}${timeConfig(params)})` - : `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('timestamp')) { - const withTimezone = lowered.includes('with time zone'); - // const split = lowered.split(" "); - let precision = lowered.startsWith('timestamp(') - ? Number(lowered.split(' ')[0].substring('timestamp('.length, lowered.split(' ')[0].length - 1)) - : null; - precision = precision ? precision : null; - - const params = timeConfig({ - precision, - withTimezone, - mode: "'string'", - }); - - let out = params - ? `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing, withMode: true })}${params})` - : `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('time')) { - const withTimezone = lowered.includes('with time zone'); - - let precision = lowered.startsWith('time(') - ? Number(lowered.split(' ')[0].substring('time('.length, lowered.split(' ')[0].length - 1)) - : null; - precision = precision ? precision : null; - - const params = timeConfig({ precision, withTimezone }); - - let out = params - ? `${withCasing(name, casing)}: time(${dbColumnName({ name, casing, withMode: true })}${params})` - : `${withCasing(name, casing)}: time(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('interval')) { - // const withTimezone = lowered.includes("with time zone"); - // const split = lowered.split(" "); - // let precision = split.length >= 2 ? Number(split[1].substring(1, 2)) : null; - // precision = precision ? precision : null; - - const params = intervalConfig(lowered); - - let out = params - ? `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing, withMode: true })}${params})` - : `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered === 'date') { - let out = `${withCasing(name, casing)}: date(${dbColumnName({ name, casing })})`; - - return out; - } - - if (lowered.startsWith('text')) { - let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('jsonb')) { - let out = `${withCasing(name, casing)}: jsonb(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('json')) { - let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('inet')) { - let out = `${withCasing(name, casing)}: inet(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('cidr')) { - let out = `${withCasing(name, casing)}: cidr(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('macaddr8')) { - let out = `${withCasing(name, casing)}: macaddr8(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('macaddr')) { - let out = `${withCasing(name, casing)}: macaddr(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('varchar')) { - let out: string; - if (lowered.length !== 7) { - out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing, withMode: true })}{ length: ${ - lowered.substring(8, lowered.length - 1) - } })`; - } else { - out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing })})`; - } - - return out; - } - - if (lowered.startsWith('point')) { - let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('line')) { - let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`; - return out; - } - - if (lowered.startsWith('geometry')) { - let out: string = ''; - - let isGeoUnknown = false; - - if (lowered.length !== 8) { - const geometryOptions = lowered.slice(9, -1).split(','); - if (geometryOptions.length === 1 && geometryOptions[0] !== '') { - out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ - geometryOptions[0] - }" })`; - } else if (geometryOptions.length === 2) { - out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ - geometryOptions[0] - }", srid: ${geometryOptions[1]} })`; - } else { - isGeoUnknown = true; - } - } else { - out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing })})`; - } - - if (isGeoUnknown) { - let unknown = - `// TODO: failed to parse geometry type because found more than 2 options inside geometry function '${type}'\n// Introspect is currently supporting only type and srid options\n`; - unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; - return unknown; - } - return out; - } - - if (lowered.startsWith('vector')) { - let out: string; - if (lowered.length !== 6) { - out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing, withMode: true })}{ dimensions: ${ - lowered.substring(7, lowered.length - 1) - } })`; - } else { - out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing })})`; - } - - return out; - } - - if (lowered.startsWith('char')) { - let out: string; - if (lowered.length !== 4) { - out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing, withMode: true })}{ length: ${ - lowered.substring(5, lowered.length - 1) - } })`; - } else { - out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing })})`; - } - - return out; - } - - let unknown = `// TODO: failed to parse database type '${type}'\n`; - unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; - return unknown; -}; - -const dimensionsInArray = (size?: number): string => { - let res = ''; - if (typeof size === 'undefined') return res; - for (let i = 0; i < size; i++) { - res += '.array()'; - } - return res; -}; - -const createTableColumns = ( - tableName: string, - columns: Column[], - fks: ForeignKey[], - enumTypes: Set, - schemas: Record, - casing: Casing, - internals: PgKitInternals, -): string => { - let statement = ''; - - // no self refs and no cyclic - const oneColumnsFKs = Object.values(fks) - .filter((it) => { - return !isSelf(it); - }) - .filter((it) => it.columnsFrom.length === 1); - - const fkByColumnName = oneColumnsFKs.reduce((res, it) => { - const arr = res[it.columnsFrom[0]] || []; - arr.push(it); - res[it.columnsFrom[0]] = arr; - return res; - }, {} as Record); - - columns.forEach((it) => { - const columnStatement = column( - tableName, - it.type, - it.name, - enumTypes, - it.typeSchema ?? 'public', - casing, - it.default, - internals, - ); - statement += '\t'; - statement += columnStatement; - // Provide just this in column function - if (internals?.tables[tableName]?.columns[it.name]?.isArray) { - statement += dimensionsInArray(internals?.tables[tableName]?.columns[it.name]?.dimensions); - } - statement += mapDefault(tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', it.default, internals); - statement += it.primaryKey ? '.primaryKey()' : ''; - statement += it.notNull && !it.identity ? '.notNull()' : ''; - - statement += it.identity ? generateIdentityParams(it.identity) : ''; - - statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; - - // const fks = fkByColumnName[it.name]; - // Andrii: I switched it off until we will get a custom naem setting in references - // if (fks) { - // const fksStatement = fks - // .map((it) => { - // const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; - // const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; - // const params = { onDelete, onUpdate }; - - // const typeSuffix = isCyclic(it) ? ': AnyPgColumn' : ''; - - // const paramsStr = objToStatement2(params); - // const tableSchema = schemas[it.schemaTo || '']; - // const paramName = paramNameFor(it.tableTo, tableSchema); - // if (paramsStr) { - // return `.references(()${typeSuffix} => ${ - // withCasing( - // paramName, - // casing, - // ) - // }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; - // } - // return `.references(()${typeSuffix} => ${ - // withCasing( - // paramName, - // casing, - // ) - // }.${withCasing(it.columnsTo[0], casing)})`; - // }) - // .join(''); - // statement += fksStatement; - // } - - statement += ',\n'; - }); - - return statement; -}; - -const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { - let statement = ''; - - idxs.forEach((it) => { - // we have issue when index is called as table called - let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; - idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; - - idxKey = withCasing(idxKey, casing); - - const indexGeneratedName = indexName( - tableName, - it.columns.map((it) => it.expression), - ); - const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; - - statement += `\n\t`; - statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `${escapedIndexName})`; - statement += `${it.concurrently ? `.concurrently()` : ''}`; - - statement += `.using("${it.method}", ${ - it.columns - .map((it) => { - if (it.isExpression) { - return `sql\`${it.expression}\``; - } else { - return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ - it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' - }${ - it.opclass - ? `.op("${it.opclass}")` - : '' - }`; - } - }) - .join(', ') - })`; - statement += it.where ? `.where(sql\`${it.where}\`)` : ''; - - function reverseLogic(mappedWith: Record): string { - let reversedString = '{'; - for (const key in mappedWith) { - if (mappedWith.hasOwnProperty(key)) { - reversedString += `${key}: "${mappedWith[key]}",`; - } - } - reversedString = reversedString.length > 1 ? reversedString.slice(0, reversedString.length - 1) : reversedString; - return `${reversedString}}`; - } - - statement += it.with && Object.keys(it.with).length > 0 ? `.with(${reverseLogic(it.with)})` : ''; - statement += `,`; - }); - - return statement; -}; - -const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { - let statement = ''; - - pks.forEach((it) => { - statement += `\n\t`; - statement += 'primaryKey({ columns: ['; - statement += `${ - it.columns - .map((c) => { - return `table.${withCasing(c, casing)}`; - }) - .join(', ') - }]${it.name ? `, name: "${it.name}"` : ''}}`; - statement += ')'; - statement += `,`; - }); - - return statement; -}; - -// get a map of db role name to ts key -// if to by key is in this map - no quotes, otherwise - quotes - -const createTablePolicies = ( - policies: Policy[], - casing: Casing, - rolesNameToTsKey: Record = {}, -): string => { - let statement = ''; - - policies.forEach((it) => { - const idxKey = withCasing(it.name, casing); - - const mappedItTo = it.to?.map((v) => { - return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; - }); - - statement += `\n\t`; - statement += 'pgPolicy('; - statement += `"${it.name}", { `; - statement += `as: "${it.as?.toLowerCase()}", for: "${it.for?.toLowerCase()}", to: [${mappedItTo?.join(', ')}]${ - it.using ? `, using: sql\`${it.using}\`` : '' - }${it.withCheck ? `, withCheck: sql\`${it.withCheck}\` ` : ''}`; - statement += ` }),`; - }); - - return statement; -}; - -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: Casing, -): string => { - let statement = ''; - - unqs.forEach((it) => { - statement += `\n\t`; - statement += 'unique('; - statement += `"${it.name}")`; - statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; - statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; - statement += `,`; - }); - - return statement; -}; - -const createTableChecks = ( - checkConstraints: CheckConstraint[], - casing: Casing, -) => { - let statement = ''; - - checkConstraints.forEach((it) => { - statement += `\n\t`; - statement += 'check('; - statement += `"${it.name}", `; - statement += `sql\`${it.value}\`)`; - statement += `,`; - }); - - return statement; -}; - -const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { - let statement = ''; - - fks.forEach((it) => { - const tableSchema = schemas[it.schemaTo || '']; - const paramName = paramNameFor(it.tableTo, tableSchema); - - const isSelf = it.tableTo === it.tableFrom; - const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; - statement += `\n\t`; - statement += `foreignKey({\n`; - statement += `\t\t\tcolumns: [${it.columnsFrom.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; - statement += `\t\t\tforeignColumns: [${ - it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') - }],\n`; - statement += `\t\t\tname: "${it.name}"\n`; - statement += `\t\t})`; - - statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; - - statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; - - statement += `,`; - }); - - return statement; -}; diff --git a/drizzle-kit/src/introspect-singlestore.ts b/drizzle-kit/src/introspect-singlestore.ts deleted file mode 100644 index e39c0fe194..0000000000 --- a/drizzle-kit/src/introspect-singlestore.ts +++ /dev/null @@ -1,913 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -import { toCamelCase } from 'drizzle-orm/casing'; -import './@types/utils'; -import type { Casing } from './cli/validations/common'; -import { assertUnreachable } from './global'; -import { - Column, - Index, - PrimaryKey, - SingleStoreSchema, - SingleStoreSchemaInternal, - UniqueConstraint, -} from './serializer/singlestoreSchema'; -import { indexName } from './serializer/singlestoreSerializer'; - -// time precision to fsp -// {mode: "string"} for timestamp by default - -const singlestoreImportsList = new Set([ - 'singlestoreTable', - 'singlestoreEnum', - 'bigint', - 'binary', - 'boolean', - 'char', - 'date', - 'datetime', - 'decimal', - 'double', - 'float', - 'int', - 'json', - // TODO: add new type BSON - // TODO: add new type Blob - // TODO: add new type UUID - // TODO: add new type GUID - // TODO: add new type Vector - // TODO: add new type GeoPoint - 'mediumint', - 'real', - 'serial', - 'smallint', - 'text', - 'tinytext', - 'mediumtext', - 'longtext', - 'time', - 'timestamp', - 'tinyint', - 'varbinary', - 'varchar', - 'vector', - 'year', - 'enum', -]); - -const objToStatement = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); - statement += ' }'; - return statement; -}; - -const objToStatement2 = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys - statement += ' }'; - return statement; -}; - -const timeConfig = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const binaryConfig = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); - statement += ' }'; - return statement; -}; - -const importsPatch = { - 'double precision': 'doublePrecision', - 'timestamp without time zone': 'timestamp', -} as Record; - -const escapeColumnKey = (value: string) => { - if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { - return `"${value}"`; - } - return value; -}; - -const prepareCasing = (casing?: Casing) => (value: string) => { - if (casing === 'preserve') { - return escapeColumnKey(value); - } - if (casing === 'camel') { - return escapeColumnKey(value.camelCase()); - } - - assertUnreachable(casing); -}; - -const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { - if (casing === 'preserve') { - return ''; - } - if (casing === 'camel') { - return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; - } - - assertUnreachable(casing); -}; - -export const schemaToTypeScript = ( - schema: SingleStoreSchemaInternal, - casing: Casing, -) => { - const withCasing = prepareCasing(casing); - - const imports = Object.values(schema.tables).reduce( - (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); - const pkImports = Object.values(it.compositePrimaryKeys).map( - (it) => 'primaryKey', - ); - const uniqueImports = Object.values(it.uniqueConstraints).map( - (it) => 'unique', - ); - - res.singlestore.push(...idxImports); - res.singlestore.push(...pkImports); - res.singlestore.push(...uniqueImports); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched = importsPatch[col.type] ?? col.type; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('binary(') ? 'binary' : patched; - patched = patched.startsWith('decimal(') ? 'decimal' : patched; - patched = patched.startsWith('smallint(') ? 'smallint' : patched; - patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; - patched = patched.startsWith('datetime(') ? 'datetime' : patched; - patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; - patched = patched.startsWith('int(') ? 'int' : patched; - patched = patched.startsWith('double(') ? 'double' : patched; - patched = patched.startsWith('float(') ? 'float' : patched; - patched = patched.startsWith('int unsigned') ? 'int' : patched; - patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; - patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; - patched = patched.startsWith('bigint(') ? 'bigint' : patched; - patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; - patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; - patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; - patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; - return patched; - }) - .filter((type) => { - return singlestoreImportsList.has(type); - }); - - res.singlestore.push(...columnImports); - return res; - }, - { singlestore: [] as string[] }, - ); - - /* Object.values(schema.views).forEach((it) => { - imports.singlestore.push('singlestoreView'); - - const columnImports = Object.values(it.columns) - .map((col) => { - let patched = importsPatch[col.type] ?? col.type; - patched = patched.startsWith('varchar(') ? 'varchar' : patched; - patched = patched.startsWith('char(') ? 'char' : patched; - patched = patched.startsWith('binary(') ? 'binary' : patched; - patched = patched.startsWith('decimal(') ? 'decimal' : patched; - patched = patched.startsWith('smallint(') ? 'smallint' : patched; - patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; - patched = patched.startsWith('datetime(') ? 'datetime' : patched; - patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; - patched = patched.startsWith('int(') ? 'int' : patched; - patched = patched.startsWith('double(') ? 'double' : patched; - patched = patched.startsWith('float(') ? 'float' : patched; - patched = patched.startsWith('int unsigned') ? 'int' : patched; - patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; - patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; - patched = patched.startsWith('bigint(') ? 'bigint' : patched; - patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; - patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; - patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; - patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; - return patched; - }) - .filter((type) => { - return singlestoreImportsList.has(type); - }); - - imports.singlestore.push(...columnImports); - }); */ - - const tableStatements = Object.values(schema.tables).map((table) => { - const func = 'singlestoreTable'; - let statement = ''; - if (imports.singlestore.includes(withCasing(table.name))) { - statement = `// Table name is in conflict with ${ - withCasing( - table.name, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; - statement += createTableColumns( - Object.values(table.columns), - withCasing, - casing, - table.name, - schema, - ); - statement += '}'; - - if ( - Object.keys(table.indexes).length > 0 - || Object.keys(table.compositePrimaryKeys).length > 0 - || Object.keys(table.uniqueConstraints).length > 0 - ) { - statement += ',\n'; - statement += '(table) => ['; - statement += createTableIndexes( - table.name, - Object.values(table.indexes), - withCasing, - ); - statement += createTablePKs( - Object.values(table.compositePrimaryKeys), - withCasing, - ); - statement += createTableUniques( - Object.values(table.uniqueConstraints), - withCasing, - ); - statement += '\n]'; - } - - statement += ');'; - return statement; - }); - - /* const viewsStatements = Object.values(schema.views).map((view) => { - const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; - const func = 'singlestoreView'; - let statement = ''; - - if (imports.singlestore.includes(withCasing(name))) { - statement = `// Table name is in conflict with ${ - withCasing( - view.name, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; - statement += createTableColumns( - Object.values(columns), - withCasing, - casing, - name, - schema, - ); - statement += '})'; - - statement += algorithm ? `.algorithm("${algorithm}")` : ''; - statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; - statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; - statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; - - return statement; - }); */ - - const uniqueSingleStoreImports = [ - 'singlestoreTable', - 'singlestoreSchema', - 'AnySingleStoreColumn', - ...new Set(imports.singlestore), - ]; - const importsTs = `import { ${ - uniqueSingleStoreImports.join( - ', ', - ) - } } from "drizzle-orm/singlestore-core"\nimport { sql } from "drizzle-orm"\n\n`; - - let decalrations = ''; - decalrations += tableStatements.join('\n\n'); - decalrations += '\n'; - /* decalrations += viewsStatements.join('\n\n'); */ - - const file = importsTs + decalrations; - - const schemaEntry = ` - { - ${ - Object.values(schema.tables) - .map((it) => withCasing(it.name)) - .join(',') - } - } - `; - - return { - file, // backward compatible, print to file - imports: importsTs, - decalrations, - schemaEntry, - }; -}; - -const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { - if (isExpression) { - return `sql\`${defaultValue}\``; - } - - return defaultValue; -}; - -const mapColumnDefaultForJson = (defaultValue: any) => { - if ( - typeof defaultValue === 'string' - && defaultValue.startsWith("('") - && defaultValue.endsWith("')") - ) { - return defaultValue.substring(2, defaultValue.length - 2); - } - - return defaultValue; -}; - -const column = ( - type: string, - name: string, - casing: (value: string) => string, - rawCasing: Casing, - defaultValue?: any, - autoincrement?: boolean, - onUpdate?: boolean, - isExpression?: boolean, -) => { - let lowered = type; - if (!type.startsWith('enum(')) { - lowered = type.toLowerCase(); - } - - if (lowered === 'serial') { - return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; - } - - if (lowered.startsWith('int')) { - const isUnsigned = lowered.includes('unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: int(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('tinyint')) { - const isUnsigned = lowered.includes('unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out: string = `${casing(name)}: tinyint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('smallint')) { - const isUnsigned = lowered.includes('unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: smallint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('mediumint')) { - const isUnsigned = lowered.includes('unsigned'); - const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); - let out = `${casing(name)}: mediumint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('bigint')) { - const isUnsigned = lowered.includes('unsigned'); - let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${ - isUnsigned ? ', unsigned: true' : '' - } })`; - out += autoincrement ? `.autoincrement()` : ''; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'boolean') { - let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('double')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) { - const [precision, scale] = lowered - .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - const timeConfigParams = params ? timeConfig(params) : undefined; - - let out = params - ? `${casing(name)}: double(${ - dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) - }${timeConfig(params)})` - : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`; - - // let out = `${name.camelCase()}: double("${name}")`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('float')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) { - const [precision, scale] = lowered - .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'real') { - let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('timestamp')) { - const keyLength = 'timestamp'.length + 1; - let fsp = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - fsp = fsp ? fsp : null; - - const params = timeConfig({ fsp, mode: "'string'" }); - - let out = params - ? `${casing(name)}: timestamp(${ - dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) - }${params})` - : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`; - - // singlestore has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case - defaultValue = defaultValue === 'now()' || defaultValue === 'CURRENT_TIMESTAMP' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - - let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; - out += onUpdateNow; - - return out; - } - - if (lowered.startsWith('time')) { - const keyLength = 'time'.length + 1; - let fsp = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - fsp = fsp ? fsp : null; - - const params = timeConfig({ fsp }); - - let out = params - ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` - : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered === 'date') { - let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ - casing( - name, - ) - }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - // in singlestore text can't have default value. Will leave it in case smth ;) - if (lowered === 'text') { - let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in singlestore text can't have default value. Will leave it in case smth ;) - if (lowered === 'tinytext') { - let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in singlestore text can't have default value. Will leave it in case smth ;) - if (lowered === 'mediumtext') { - let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in singlestore text can't have default value. Will leave it in case smth ;) - if (lowered === 'longtext') { - let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered === 'year') { - let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - // in singlestore json can't have default value. Will leave it in case smth ;) - if (lowered === 'json') { - let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`; - - out += defaultValue - ? `.default(${mapColumnDefaultForJson(defaultValue)})` - : ''; - - return out; - } - - if (lowered.startsWith('varchar')) { - let out: string = `${ - casing( - name, - ) - }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ - lowered.substring( - 'varchar'.length + 1, - lowered.length - 1, - ) - } })`; - - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('char')) { - let out: string = `${ - casing( - name, - ) - }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ - lowered.substring( - 'char'.length + 1, - lowered.length - 1, - ) - } })`; - - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('datetime')) { - let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; - - const fsp = lowered.startsWith('datetime(') - ? lowered.substring('datetime'.length + 1, lowered.length - 1) - : undefined; - - out = fsp - ? `${ - casing( - name, - ) - }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${ - lowered.substring( - 'datetime'.length + 1, - lowered.length - 1, - ) - } })` - : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`; - - defaultValue = defaultValue === 'now()' - ? '.defaultNow()' - : defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('decimal')) { - let params: - | { precision?: string; scale?: string; unsigned?: boolean } - | undefined; - - if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) { - const [precision, scale] = lowered - .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) - .split(','); - params = { precision, scale }; - } - - if (lowered.includes('unsigned')) { - params = { ...(params ?? {}), unsigned: true }; - } - - const timeConfigParams = params ? timeConfig(params) : undefined; - - let out = params - ? `${casing(name)}: decimal(${ - dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) - }${timeConfigParams})` - : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('binary')) { - const keyLength = 'binary'.length + 1; - let length = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - length = length ? length : null; - - const params = binaryConfig({ length }); - - let out = params - ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` - : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('enum')) { - const values = lowered.substring('enum'.length + 1, lowered.length - 1); - let out = `${casing(name)}: singlestoreEnum(${ - dbColumnName({ name, casing: rawCasing, withMode: true }) - }[${values}])`; - out += defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - return out; - } - - if (lowered.startsWith('varbinary')) { - const keyLength = 'varbinary'.length + 1; - let length = lowered.length > keyLength - ? Number(lowered.substring(keyLength, lowered.length - 1)) - : null; - length = length ? length : null; - - const params = binaryConfig({ length }); - - let out = params - ? `${casing(name)}: varbinary(${ - dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) - }${params})` - : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`; - - defaultValue = defaultValue - ? `.default(${mapColumnDefault(defaultValue, isExpression)})` - : ''; - - out += defaultValue; - return out; - } - - if (lowered.startsWith('vector')) { - const [dimensions, elementType] = lowered.substring('vector'.length + 1, lowered.length - 1).split(','); - let out = `${casing(name)}: vector(${ - dbColumnName({ name, casing: rawCasing, withMode: true }) - }{ dimensions: ${dimensions}, elementType: ${elementType} })`; - - out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; - return out; - } - - console.log('uknown', type); - return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; -}; - -const createTableColumns = ( - columns: Column[], - casing: (val: string) => string, - rawCasing: Casing, - tableName: string, - schema: SingleStoreSchemaInternal, -): string => { - let statement = ''; - - columns.forEach((it) => { - statement += '\t'; - statement += column( - it.type, - it.name, - casing, - rawCasing, - it.default, - it.autoincrement, - it.onUpdate, - schema.internal?.tables![tableName]?.columns[it.name] - ?.isDefaultAnExpression ?? false, - ); - statement += it.primaryKey ? '.primaryKey()' : ''; - statement += it.notNull ? '.notNull()' : ''; - - statement += it.generated - ? `.generatedAlwaysAs(sql\`${ - it.generated.as.replace( - /`/g, - '\\`', - ) - }\`, { mode: "${it.generated.type}" })` - : ''; - - statement += ',\n'; - }); - - return statement; -}; - -const createTableIndexes = ( - tableName: string, - idxs: Index[], - casing: (value: string) => string, -): string => { - let statement = ''; - - idxs.forEach((it) => { - let idxKey = it.name.startsWith(tableName) && it.name !== tableName - ? it.name.slice(tableName.length + 1) - : it.name; - idxKey = idxKey.endsWith('_index') - ? idxKey.slice(0, -'_index'.length) + '_idx' - : idxKey; - - idxKey = casing(idxKey); - - const indexGeneratedName = indexName(tableName, it.columns); - const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; - - statement += `\n\t`; - statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `${escapedIndexName})`; - statement += `.on(${ - it.columns - .map((it) => `table.${casing(it)}`) - .join(', ') - }),`; - }); - - return statement; -}; - -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: (value: string) => string, -): string => { - let statement = ''; - - unqs.forEach((it) => { - statement += `\n\t`; - statement += 'unique('; - statement += `"${it.name}")`; - statement += `.on(${ - it.columns - .map((it) => `table.${casing(it)}`) - .join(', ') - }),`; - }); - - return statement; -}; - -const createTablePKs = ( - pks: PrimaryKey[], - casing: (value: string) => string, -): string => { - let statement = ''; - - pks.forEach((it) => { - let idxKey = casing(it.name); - - statement += `\n\t`; - statement += 'primaryKey({ columns: ['; - statement += `${ - it.columns - .map((c) => { - return `table.${casing(c)}`; - }) - .join(', ') - }]${it.name ? `, name: "${it.name}"` : ''}}`; - statement += '),'; - }); - - return statement; -}; diff --git a/drizzle-kit/src/introspect-sqlite.ts b/drizzle-kit/src/introspect-sqlite.ts deleted file mode 100644 index d3aac6f04f..0000000000 --- a/drizzle-kit/src/introspect-sqlite.ts +++ /dev/null @@ -1,533 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unsafe-argument */ -import { toCamelCase } from 'drizzle-orm/casing'; -import './@types/utils'; -import type { Casing } from './cli/validations/common'; -import { assertUnreachable } from './global'; -import { CheckConstraint } from './serializer/mysqlSchema'; -import type { - Column, - ForeignKey, - Index, - PrimaryKey, - SQLiteSchema, - SQLiteSchemaInternal, - UniqueConstraint, -} from './serializer/sqliteSchema'; - -const sqliteImportsList = new Set([ - 'sqliteTable', - 'integer', - 'real', - 'text', - 'numeric', - 'blob', -]); - -export const indexName = (tableName: string, columns: string[]) => { - return `${tableName}_${columns.join('_')}_index`; -}; - -const objToStatement2 = (json: any) => { - json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); - - const keys = Object.keys(json); - if (keys.length === 0) return; - - let statement = '{ '; - statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys - statement += ' }'; - return statement; -}; - -const relations = new Set(); - -const escapeColumnKey = (value: string) => { - if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { - return `"${value}"`; - } - return value; -}; - -const withCasing = (value: string, casing?: Casing) => { - if (casing === 'preserve') { - return escapeColumnKey(value); - } - if (casing === 'camel') { - return escapeColumnKey(value.camelCase()); - } - - return value; -}; - -const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { - if (casing === 'preserve') { - return ''; - } - if (casing === 'camel') { - return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; - } - - assertUnreachable(casing); -}; - -export const schemaToTypeScript = ( - schema: SQLiteSchemaInternal, - casing: Casing, -) => { - // collectFKs - Object.values(schema.tables).forEach((table) => { - Object.values(table.foreignKeys).forEach((fk) => { - const relation = `${fk.tableFrom}-${fk.tableTo}`; - relations.add(relation); - }); - }); - - const imports = Object.values(schema.tables).reduce( - (res, it) => { - const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); - const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); - const pkImports = Object.values(it.compositePrimaryKeys).map( - (it) => 'primaryKey', - ); - const uniqueImports = Object.values(it.uniqueConstraints).map( - (it) => 'unique', - ); - const checkImports = Object.values(it.checkConstraints).map( - (it) => 'check', - ); - - res.sqlite.push(...idxImports); - res.sqlite.push(...fkImpots); - res.sqlite.push(...pkImports); - res.sqlite.push(...uniqueImports); - res.sqlite.push(...checkImports); - - const columnImports = Object.values(it.columns) - .map((col) => { - return col.type; - }) - .filter((type) => { - return sqliteImportsList.has(type); - }); - - res.sqlite.push(...columnImports); - return res; - }, - { sqlite: [] as string[] }, - ); - - Object.values(schema.views).forEach((it) => { - imports.sqlite.push('sqliteView'); - - const columnImports = Object.values(it.columns) - .map((col) => { - return col.type; - }) - .filter((type) => { - return sqliteImportsList.has(type); - }); - - imports.sqlite.push(...columnImports); - }); - - const tableStatements = Object.values(schema.tables).map((table) => { - const func = 'sqliteTable'; - let statement = ''; - if (imports.sqlite.includes(withCasing(table.name, casing))) { - statement = `// Table name is in conflict with ${ - withCasing( - table.name, - casing, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(table.name, casing)} = ${func}("${table.name}", {\n`; - statement += createTableColumns( - Object.values(table.columns), - Object.values(table.foreignKeys), - casing, - ); - statement += '}'; - - // more than 2 fields or self reference or cyclic - const filteredFKs = Object.values(table.foreignKeys).filter((it) => { - return it.columnsFrom.length > 1 || isSelf(it); - }); - - if ( - Object.keys(table.indexes).length > 0 - || filteredFKs.length > 0 - || Object.keys(table.compositePrimaryKeys).length > 0 - || Object.keys(table.uniqueConstraints).length > 0 - || Object.keys(table.checkConstraints).length > 0 - ) { - statement += ',\n'; - statement += '(table) => ['; - statement += createTableIndexes( - table.name, - Object.values(table.indexes), - casing, - ); - statement += createTableFKs(Object.values(filteredFKs), casing); - statement += createTablePKs( - Object.values(table.compositePrimaryKeys), - casing, - ); - statement += createTableUniques( - Object.values(table.uniqueConstraints), - casing, - ); - statement += createTableChecks( - Object.values(table.checkConstraints), - casing, - ); - statement += '\n]'; - } - - statement += ');'; - return statement; - }); - - const viewsStatements = Object.values(schema.views).map((view) => { - const func = 'sqliteView'; - - let statement = ''; - if (imports.sqlite.includes(withCasing(view.name, casing))) { - statement = `// Table name is in conflict with ${ - withCasing( - view.name, - casing, - ) - } import.\n// Please change to any other name, that is not in imports list\n`; - } - statement += `export const ${withCasing(view.name, casing)} = ${func}("${view.name}", {\n`; - statement += createTableColumns( - Object.values(view.columns), - [], - casing, - ); - statement += '})'; - statement += `.as(sql\`${view.definition?.replaceAll('`', '\\`')}\`);`; - - return statement; - }); - - const uniqueSqliteImports = [ - 'sqliteTable', - 'AnySQLiteColumn', - ...new Set(imports.sqlite), - ]; - - const importsTs = `import { ${ - uniqueSqliteImports.join( - ', ', - ) - } } from "drizzle-orm/sqlite-core" - import { sql } from "drizzle-orm"\n\n`; - - let decalrations = tableStatements.join('\n\n'); - decalrations += '\n\n'; - decalrations += viewsStatements.join('\n\n'); - - const file = importsTs + decalrations; - - // for drizzle studio query runner - const schemaEntry = ` - { - ${ - Object.values(schema.tables) - .map((it) => withCasing(it.name, casing)) - .join(',') - } - } - `; - - return { file, imports: importsTs, decalrations, schemaEntry }; -}; - -const isCyclic = (fk: ForeignKey) => { - const key = `${fk.tableFrom}-${fk.tableTo}`; - const reverse = `${fk.tableTo}-${fk.tableFrom}`; - return relations.has(key) && relations.has(reverse); -}; - -const isSelf = (fk: ForeignKey) => { - return fk.tableFrom === fk.tableTo; -}; - -const mapColumnDefault = (defaultValue: any) => { - if ( - typeof defaultValue === 'string' - && defaultValue.startsWith('(') - && defaultValue.endsWith(')') - ) { - return `sql\`${defaultValue}\``; - } - // If default value is NULL as string it will come back from db as "'NULL'" and not just "NULL" - if (defaultValue === 'NULL') { - return `sql\`NULL\``; - } - - if ( - typeof defaultValue === 'string' - ) { - return defaultValue.substring(1, defaultValue.length - 1).replaceAll('"', '\\"').replaceAll("''", "'"); - } - - return defaultValue; -}; - -const column = ( - type: string, - name: string, - defaultValue?: any, - autoincrement?: boolean, - casing?: Casing, -) => { - let lowered = type; - casing = casing!; - - if (lowered === 'integer') { - let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; - // out += autoincrement ? `.autoincrement()` : ""; - out += typeof defaultValue !== 'undefined' - ? `.default(${mapColumnDefault(defaultValue)})` - : ''; - return out; - } - - if (lowered === 'real') { - let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; - out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; - return out; - } - - if (lowered.startsWith('text')) { - const match = lowered.match(/\d+/); - let out: string; - - if (match) { - out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing, withMode: true })}{ length: ${ - match[0] - } })`; - } else { - out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; - } - - out += defaultValue ? `.default("${mapColumnDefault(defaultValue)}")` : ''; - return out; - } - - if (lowered === 'blob') { - let out = `${withCasing(name, casing)}: blob(${dbColumnName({ name, casing })})`; - out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; - return out; - } - - if (lowered === 'numeric') { - let out = `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`; - out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; - return out; - } - - // console.log("uknown", type); - return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; -}; - -const createTableColumns = ( - columns: Column[], - fks: ForeignKey[], - casing: Casing, -): string => { - let statement = ''; - - // no self refs and no cyclic - const oneColumnsFKs = Object.values(fks) - .filter((it) => { - return !isSelf(it); - }) - .filter((it) => it.columnsFrom.length === 1); - - const fkByColumnName = oneColumnsFKs.reduce((res, it) => { - const arr = res[it.columnsFrom[0]] || []; - arr.push(it); - res[it.columnsFrom[0]] = arr; - return res; - }, {} as Record); - - columns.forEach((it) => { - statement += '\t'; - statement += column(it.type, it.name, it.default, it.autoincrement, casing); - statement += it.primaryKey - ? `.primaryKey(${it.autoincrement ? '{ autoIncrement: true }' : ''})` - : ''; - statement += it.notNull ? '.notNull()' : ''; - - statement += it.generated - ? `.generatedAlwaysAs(sql\`${ - it.generated.as - .replace(/`/g, '\\`') - .slice(1, -1) - }\`, { mode: "${it.generated.type}" })` - : ''; - - const fks = fkByColumnName[it.name]; - if (fks) { - const fksStatement = fks - .map((it) => { - const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; - const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; - const params = { onDelete, onUpdate }; - - const typeSuffix = isCyclic(it) ? ': AnySQLiteColumn' : ''; - - const paramsStr = objToStatement2(params); - if (paramsStr) { - return `.references(()${typeSuffix} => ${ - withCasing( - it.tableTo, - casing, - ) - }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; - } - return `.references(()${typeSuffix} => ${ - withCasing( - it.tableTo, - casing, - ) - }.${withCasing(it.columnsTo[0], casing)})`; - }) - .join(''); - statement += fksStatement; - } - - statement += ',\n'; - }); - - return statement; -}; - -const createTableIndexes = ( - tableName: string, - idxs: Index[], - casing: Casing, -): string => { - let statement = ''; - - idxs.forEach((it) => { - let idxKey = it.name.startsWith(tableName) && it.name !== tableName - ? it.name.slice(tableName.length + 1) - : it.name; - idxKey = idxKey.endsWith('_index') - ? idxKey.slice(0, -'_index'.length) + '_idx' - : idxKey; - - idxKey = withCasing(idxKey, casing); - - const indexGeneratedName = indexName(tableName, it.columns); - const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; - - statement += `\n\t`; - statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `${escapedIndexName})`; - statement += `.on(${ - it.columns - .map((it) => `table.${withCasing(it, casing)}`) - .join(', ') - }),`; - }); - - return statement; -}; - -const createTableUniques = ( - unqs: UniqueConstraint[], - casing: Casing, -): string => { - let statement = ''; - - unqs.forEach((it) => { - const idxKey = withCasing(it.name, casing); - - statement += `\n\t`; - statement += 'unique('; - statement += `"${it.name}")`; - statement += `.on(${ - it.columns - .map((it) => `table.${withCasing(it, casing)}`) - .join(', ') - }),`; - }); - - return statement; -}; -const createTableChecks = ( - checks: CheckConstraint[], - casing: Casing, -): string => { - let statement = ''; - - checks.forEach((it) => { - statement += `\n\t`; - statement += 'check('; - statement += `"${it.name}", `; - statement += `sql\`${it.value}\`)`; - statement += `,`; - }); - - return statement; -}; - -const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { - let statement = ''; - - pks.forEach((it, i) => { - statement += `\n\t`; - statement += 'primaryKey({ columns: ['; - statement += `${ - it.columns - .map((c) => { - return `table.${withCasing(c, casing)}`; - }) - .join(', ') - }]${it.name ? `, name: "${it.name}"` : ''}}`; - statement += ')'; - }); - - return statement; -}; - -const createTableFKs = (fks: ForeignKey[], casing: Casing): string => { - let statement = ''; - - fks.forEach((it) => { - const isSelf = it.tableTo === it.tableFrom; - const tableTo = isSelf ? 'table' : `${withCasing(it.tableTo, casing)}`; - statement += `\n\t`; - statement += `foreignKey(() => ({\n`; - statement += `\t\t\tcolumns: [${ - it.columnsFrom - .map((i) => `table.${withCasing(i, casing)}`) - .join(', ') - }],\n`; - statement += `\t\t\tforeignColumns: [${ - it.columnsTo - .map((i) => `${tableTo}.${withCasing(i, casing)}`) - .join(', ') - }],\n`; - statement += `\t\t\tname: "${it.name}"\n`; - statement += `\t\t}))`; - - statement += it.onUpdate && it.onUpdate !== 'no action' - ? `.onUpdate("${it.onUpdate}")` - : ''; - - statement += it.onDelete && it.onDelete !== 'no action' - ? `.onDelete("${it.onDelete}")` - : ''; - - statement += `,`; - }); - - return statement; -}; diff --git a/drizzle-kit/src/legacy/common.ts b/drizzle-kit/src/legacy/common.ts new file mode 100644 index 0000000000..0aa87d9e08 --- /dev/null +++ b/drizzle-kit/src/legacy/common.ts @@ -0,0 +1,194 @@ +import chalk from 'chalk'; +import type { UnionToIntersection } from 'hono/utils/types'; +import type { TypeOf } from 'zod'; +import { any, boolean, enum as enum_, literal, object, string, union } from 'zod'; +import { outputs } from './outputs'; +import { dialect } from './schemaValidator'; + +export type Commands = + | 'introspect' + | 'generate' + | 'check' + | 'up' + | 'drop' + | 'push' + | 'export'; + +// type Expand = T extends infer O ? { [K in keyof O]: O[K] } : never; +type IsUnion = [T] extends [UnionToIntersection] ? false : true; +type LastTupleElement = TArr extends [ + ...start: infer _, + end: infer Last, +] ? Last + : never; + +export type UniqueArrayOfUnion = Exclude< + TUnion, + TArray[number] +> extends never ? [TUnion] + : [...TArray, Exclude]; + +export const assertCollisions = < + T extends Record, + TKeys extends (keyof T)[], + TRemainingKeys extends Exclude[], + Exhaustive extends TRemainingKeys, + UNIQ extends UniqueArrayOfUnion, +>( + command: Commands, + options: T, + whitelist: Exclude, + _remainingKeys: UniqueArrayOfUnion, +): IsUnion> extends false ? 'cli' | 'config' : TKeys => { + const { config, ...rest } = options; + + let atLeastOneParam = false; + for (const key of Object.keys(rest)) { + if (whitelist.includes(key)) continue; + + atLeastOneParam = atLeastOneParam || rest[key] !== undefined; + } + + if (!config && atLeastOneParam) { + return 'cli' as any; + } + + if (!atLeastOneParam) { + return 'config' as any; + } + + // if config and cli - return error - write a reason + console.log(outputs.common.ambiguousParams(command)); + process.exit(1); +}; + +export const sqliteDriversLiterals = [ + literal('d1-http'), + literal('expo'), + literal('durable-sqlite'), +] as const; + +export const postgresqlDriversLiterals = [ + literal('aws-data-api'), + literal('pglite'), +] as const; + +export const prefixes = [ + 'index', + 'timestamp', + 'supabase', + 'unix', + 'none', +] as const; +export const prefix = enum_(prefixes); +export type Prefix = (typeof prefixes)[number]; + +{ + const _: Prefix = '' as TypeOf; +} + +export const casingTypes = ['snake_case', 'camelCase'] as const; +export const casingType = enum_(casingTypes); +export type CasingType = (typeof casingTypes)[number]; + +export const sqliteDriver = union(sqliteDriversLiterals); +export const postgresDriver = union(postgresqlDriversLiterals); +export const driver = union([sqliteDriver, postgresDriver]); + +export const configMigrations = object({ + table: string().optional(), + schema: string().optional(), + prefix: prefix.optional().default('index'), +}).optional(); + +export const configCommonSchema = object({ + dialect: dialect, + schema: union([string(), string().array()]).optional(), + out: string().optional(), + breakpoints: boolean().optional().default(true), + verbose: boolean().optional().default(false), + driver: driver.optional(), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + migrations: configMigrations, + dbCredentials: any().optional(), + casing: casingType.optional(), + sql: boolean().default(true), +}).passthrough(); + +export const casing = union([literal('camel'), literal('preserve')]).default( + 'camel', +); + +export const introspectParams = object({ + schema: union([string(), string().array()]).optional(), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + introspect: object({ + casing, + }).default({ casing: 'camel' }), +}); + +export type IntrospectParams = TypeOf; +export type Casing = TypeOf; + +export const configIntrospectCliSchema = object({ + schema: union([string(), string().array()]).optional(), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + introspectCasing: union([literal('camel'), literal('preserve')]).default( + 'camel', + ), +}); + +export const configGenerateSchema = object({ + schema: union([string(), string().array()]), + out: string().optional().default('./drizzle'), + breakpoints: boolean().default(true), +}); + +export type GenerateSchema = TypeOf; + +export const configPushSchema = object({ + dialect: dialect, + schema: union([string(), string().array()]), + tablesFilter: union([string(), string().array()]).optional(), + schemaFilter: union([string(), string().array()]).default(['public']), + verbose: boolean().default(false), + strict: boolean().default(false), + out: string().optional(), +}); + +export type CliConfig = TypeOf; +export const drivers = ['d1-http', 'expo', 'aws-data-api', 'pglite', 'durable-sqlite'] as const; +export type Driver = (typeof drivers)[number]; +const _: Driver = '' as TypeOf; + +export const wrapParam = ( + name: string, + param: any | undefined, + optional: boolean = false, + type?: 'url' | 'secret', +) => { + const check = `[${chalk.green('✓')}]`; + const cross = `[${chalk.red('x')}]`; + if (typeof param === 'string') { + if (param.length === 0) { + return ` ${cross} ${name}: ''`; + } + if (type === 'secret') { + return ` ${check} ${name}: '*****'`; + } else if (type === 'url') { + return ` ${check} ${name}: '${param.replace(/(?<=:\/\/[^:\n]*:)([^@]*)/, '****')}'`; + } + return ` ${check} ${name}: '${param}'`; + } + if (optional) { + return chalk.gray(` ${name}?: `); + } + return ` ${cross} ${name}: ${chalk.gray('undefined')}`; +}; diff --git a/drizzle-kit/src/global.ts b/drizzle-kit/src/legacy/global.ts similarity index 92% rename from drizzle-kit/src/global.ts rename to drizzle-kit/src/legacy/global.ts index 4cea3d15ea..d7b6f1d5af 100644 --- a/drizzle-kit/src/global.ts +++ b/drizzle-kit/src/legacy/global.ts @@ -1,12 +1,12 @@ export const originUUID = '00000000-0000-0000-0000-000000000000'; export const snapshotVersion = '7'; -export function assertUnreachable(x: never | undefined): never { +export function assertUnreachable(_x: never | undefined): never { throw new Error("Didn't expect to get here"); } // don't fail in runtime, types only -export function softAssertUnreachable(x: never) { +export function softAssertUnreachable(_x: never) { return null as never; } diff --git a/drizzle-kit/src/jsonDiffer.js b/drizzle-kit/src/legacy/jsonDiffer.js similarity index 100% rename from drizzle-kit/src/jsonDiffer.js rename to drizzle-kit/src/legacy/jsonDiffer.js diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/legacy/jsonStatements.ts similarity index 58% rename from drizzle-kit/src/jsonStatements.ts rename to drizzle-kit/src/legacy/jsonStatements.ts index 4929519758..1c449bf1e0 100644 --- a/drizzle-kit/src/jsonStatements.ts +++ b/drizzle-kit/src/legacy/jsonStatements.ts @@ -1,46 +1,18 @@ -import chalk from 'chalk'; -import { getNewTableName } from './cli/commands/sqlitePushUtils'; -import { warning } from './cli/views'; -import { CommonSquashedSchema } from './schemaValidator'; -import { MySqlKitInternals, MySqlSchema, MySqlSquasher, View as MySqlView } from './serializer/mysqlSchema'; -import { +import type { MySqlView } from 'drizzle-orm/mysql-core/view'; +import type { MySqlSchema } from './mysql-v5/mysqlSchema'; +import { MySqlSquasher } from './mysql-v5/mysqlSchema'; +import type { Index, MatViewWithOption, PgSchema, PgSchemaSquashed, - PgSquasher, Policy, Role, View as PgView, ViewWithOption, -} from './serializer/pgSchema'; -import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema'; -import { - SQLiteKitInternals, - SQLiteSchemaInternal, - SQLiteSchemaSquashed, - SQLiteSquasher, - View as SqliteView, -} from './serializer/sqliteSchema'; -import { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer'; - -export interface JsonSqliteCreateTableStatement { - type: 'sqlite_create_table'; - tableName: string; - columns: Column[]; - referenceData: { - name: string; - tableFrom: string; - columnsFrom: string[]; - tableTo: string; - columnsTo: string[]; - onUpdate?: string | undefined; - onDelete?: string | undefined; - }[]; - compositePKs: string[][]; - uniqueConstraints?: string[]; - checkConstraints?: string[]; -} +} from './postgres-v7/pgSchema'; +import { PgSquasher } from './postgres-v7/pgSchema'; +import type { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer'; export interface JsonCreateTableStatement { type: 'create_table'; @@ -52,7 +24,6 @@ export interface JsonCreateTableStatement { uniqueConstraints?: string[]; policies?: string[]; checkConstraints?: string[]; - internals?: MySqlKitInternals | SingleStoreKitInternals; isRLSEnabled?: boolean; } @@ -74,14 +45,6 @@ export interface JsonRecreateTableStatement { checkConstraints: string[]; } -export interface JsonRecreateSingleStoreTableStatement { - type: 'singlestore_recreate_table'; - tableName: string; - columns: Column[]; - compositePKs: string[]; - uniqueConstraints?: string[]; -} - export interface JsonDropTableStatement { type: 'drop_table'; tableName: string; @@ -237,13 +200,6 @@ export interface JsonAddColumnStatement { schema: string; } -export interface JsonSqliteAddColumnStatement { - type: 'sqlite_alter_table_add_column'; - tableName: string; - column: Column; - referenceData?: string; -} - export interface JsonCreatePolicyStatement { type: 'create_policy'; tableName: string; @@ -316,7 +272,6 @@ export interface JsonCreateIndexStatement { tableName: string; data: string; schema: string; - internal?: MySqlKitInternals | SQLiteKitInternals | SingleStoreKitInternals; } export interface JsonPgCreateIndexStatement { @@ -360,6 +315,10 @@ export interface JsonDeleteUniqueConstraint { constraintName?: string; } +export type JsonAlterMySqlViewStatement = { + type: 'alter_mysql_view'; +} & Omit; + export interface JsonAlterUniqueConstraint { type: 'alter_unique_constraint'; tableName: string; @@ -430,6 +389,16 @@ export interface JsonAlterTableSetNewSchema { to: string; } +export type JsonCreateMySqlViewStatement = { + type: 'mysql_create_view'; + replace: boolean; + name: string; + definition: string; + algorithm: 'undefined' | 'merge' | 'temptable'; + sqlSecurity: 'definer' | 'invoker'; + withCheckOption: 'local' | 'cascaded' | undefined; +}; + export interface JsonCreateReferenceStatement extends JsonReferenceStatement { type: 'create_reference'; } @@ -694,20 +663,11 @@ export type JsonCreatePgViewStatement = { type: 'create_view'; } & Omit; -export type JsonCreateMySqlViewStatement = { - type: 'mysql_create_view'; - replace: boolean; -} & Omit; - /* export type JsonCreateSingleStoreViewStatement = { type: 'singlestore_create_view'; replace: boolean; } & Omit; */ -export type JsonCreateSqliteViewStatement = { - type: 'sqlite_create_view'; -} & Omit; - export interface JsonDropViewStatement { type: 'drop_view'; name: string; @@ -783,10 +743,6 @@ export interface JsonAlterViewAlterUsingStatement { materialized: true; } -export type JsonAlterMySqlViewStatement = { - type: 'alter_mysql_view'; -} & Omit; - /* export type JsonAlterSingleStoreViewStatement = { type: 'alter_singlestore_view'; } & Omit; */ @@ -820,7 +776,6 @@ export type JsonAlterColumnStatement = | JsonAlterColumnDropIdentityStatement; export type JsonStatement = - | JsonRecreateSingleStoreTableStatement | JsonRecreateTableStatement | JsonAlterColumnStatement | JsonCreateTableStatement @@ -839,8 +794,6 @@ export type JsonStatement = | JsonDeleteReferenceStatement | JsonDropIndexStatement | JsonReferenceStatement - | JsonSqliteCreateTableStatement - | JsonSqliteAddColumnStatement | JsonCreateCompositePK | JsonDeleteCompositePK | JsonAlterCompositePK @@ -873,29 +826,24 @@ export type JsonStatement = | JsonDropViewStatement | JsonRenameViewStatement | JsonAlterViewStatement - | JsonCreateMySqlViewStatement - | JsonAlterMySqlViewStatement - /* | JsonCreateSingleStoreViewStatement - | JsonAlterSingleStoreViewStatement */ - | JsonCreateSqliteViewStatement | JsonCreateCheckConstraint | JsonDeleteCheckConstraint | JsonDropValueFromEnumStatement | JsonIndRenamePolicyStatement | JsonDropIndPolicyStatement | JsonCreateIndPolicyStatement - | JsonAlterIndPolicyStatement; + | JsonAlterIndPolicyStatement + | JsonAlterMySqlViewStatement + | JsonCreateMySqlViewStatement; export const preparePgCreateTableJson = ( table: Table, - // TODO: remove? json2: PgSchema, ): JsonCreateTableStatement => { const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints, policies, isRLSEnabled } = table; const tableKey = `${schema || 'public'}.${name}`; - // TODO: @AndriiSherman. We need this, will add test cases const compositePkName = Object.values(compositePrimaryKeys).length > 0 ? json2.tables[tableKey].compositePrimaryKeys[ `${PgSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]).name}` @@ -916,92 +864,6 @@ export const preparePgCreateTableJson = ( }; }; -export const prepareMySqlCreateTableJson = ( - table: Table, - // TODO: remove? - json2: MySqlSchema, - // we need it to know if some of the indexes(and in future other parts) are expressions or columns - // didn't change mysqlserialaizer, because it will break snapshots and diffs and it's hard to detect - // if previously it was an expression or column - internals: MySqlKitInternals, -): JsonCreateTableStatement => { - const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints } = table; - - return { - type: 'create_table', - tableName: name, - schema, - columns: Object.values(columns), - compositePKs: Object.values(compositePrimaryKeys), - compositePkName: Object.values(compositePrimaryKeys).length > 0 - ? json2.tables[name].compositePrimaryKeys[ - MySqlSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) - .name - ].name - : '', - uniqueConstraints: Object.values(uniqueConstraints), - internals, - checkConstraints: Object.values(checkConstraints), - }; -}; - -export const prepareSingleStoreCreateTableJson = ( - table: Table, - // TODO: remove? - json2: SingleStoreSchema, - // we need it to know if some of the indexes(and in future other parts) are expressions or columns - // didn't change singlestoreserialaizer, because it will break snapshots and diffs and it's hard to detect - // if previously it was an expression or column - internals: SingleStoreKitInternals, -): JsonCreateTableStatement => { - const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; - - return { - type: 'create_table', - tableName: name, - schema, - columns: Object.values(columns), - compositePKs: Object.values(compositePrimaryKeys), - compositePkName: Object.values(compositePrimaryKeys).length > 0 - ? json2.tables[name].compositePrimaryKeys[ - SingleStoreSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) - .name - ].name - : '', - uniqueConstraints: Object.values(uniqueConstraints), - internals, - }; -}; - -export const prepareSQLiteCreateTable = ( - table: Table, - action?: 'push' | undefined, -): JsonSqliteCreateTableStatement => { - const { name, columns, uniqueConstraints, checkConstraints } = table; - - const references: string[] = Object.values(table.foreignKeys); - - const composites: string[][] = Object.values(table.compositePrimaryKeys).map( - (it) => SQLiteSquasher.unsquashPK(it), - ); - - const fks = references.map((it) => - action === 'push' - ? SQLiteSquasher.unsquashPushFK(it) - : SQLiteSquasher.unsquashFK(it) - ); - - return { - type: 'sqlite_create_table', - tableName: name, - columns: Object.values(columns), - referenceData: fks, - compositePKs: composites, - uniqueConstraints: Object.values(uniqueConstraints), - checkConstraints: Object.values(checkConstraints), - }; -}; - export const prepareDropTableJson = (table: Table): JsonDropTableStatement => { return { type: 'drop_table', @@ -1284,7 +1146,6 @@ export const prepareDeleteSchemasJson = ( export const prepareRenameColumns = ( tableName: string, - // TODO: split for pg and mysql+sqlite and singlestore without schema schema: string, pairs: { from: Column; to: Column }[], ): JsonRenameColumnStatement[] => { @@ -1329,45 +1190,15 @@ export const _prepareAddColumns = ( }); }; -export const _prepareSqliteAddColumns = ( - tableName: string, - columns: Column[], - referenceData: string[], -): JsonSqliteAddColumnStatement[] => { - const unsquashed = referenceData.map((addedFkValue) => SQLiteSquasher.unsquashFK(addedFkValue)); - - return columns - .map((it) => { - const columnsWithReference = unsquashed.find((t) => t.columnsFrom.includes(it.name)); - - if (it.generated?.type === 'stored') { - warning( - `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, - ); - return undefined; - } - - return { - type: 'sqlite_alter_table_add_column', - tableName: tableName, - column: it, - referenceData: columnsWithReference - ? SQLiteSquasher.squashFK(columnsWithReference) - : undefined, - }; - }) - .filter(Boolean) as JsonSqliteAddColumnStatement[]; -}; - -export const prepareAlterColumnsMysql = ( - tableName: string, +export const preparePgAlterColumns = ( + _tableName: string, schema: string, columns: AlteredColumn[], - // TODO: remove? - json1: CommonSquashedSchema, - json2: CommonSquashedSchema, + json2: PgSchemaSquashed, + json1: PgSchemaSquashed, action?: 'push' | undefined, ): JsonAlterColumnStatement[] => { + const tableKey = `${schema || 'public'}.${_tableName}`; let statements: JsonAlterColumnStatement[] = []; let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; @@ -1375,90 +1206,25 @@ export const prepareAlterColumnsMysql = ( for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; - const table = json2.tables[tableName]; - const snapshotColumn = table.columns[columnName]; - - const columnType = snapshotColumn.type; - const columnDefault = snapshotColumn.default; - const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; - const columnNotNull = table.columns[columnName].notNull; - - const columnAutoIncrement = 'autoincrement' in snapshotColumn - ? snapshotColumn.autoincrement ?? false - : false; - - const columnPk = table.columns[columnName].primaryKey; - - if (column.autoincrement?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'changed') { - const type = column.autoincrement.new - ? 'alter_table_alter_column_set_autoincrement' - : 'alter_table_alter_column_drop_autoincrement'; - - statements.push({ - type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - } - - for (const column of columns) { - const columnName = typeof column.name !== 'string' ? column.name.new : column.name; + const tableName = json2.tables[tableKey].name; // I used any, because those fields are available only for mysql dialect // For other dialects it will become undefined, that is fine for json statements - const columnType = json2.tables[tableName].columns[columnName].type; - const columnDefault = json2.tables[tableName].columns[columnName].default; - const columnGenerated = json2.tables[tableName].columns[columnName].generated; - const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) + const columnType = json2.tables[tableKey].columns[columnName].type; + const columnDefault = json2.tables[tableKey].columns[columnName].default; + const columnGenerated = json2.tables[tableKey].columns[columnName].generated; + const columnOnUpdate = (json2.tables[tableKey].columns[columnName] as any) .onUpdate; - const columnNotNull = json2.tables[tableName].columns[columnName].notNull; + const columnNotNull = json2.tables[tableKey].columns[columnName].notNull; const columnAutoIncrement = ( - json2.tables[tableName].columns[columnName] as any + json2.tables[tableKey].columns[columnName] as any ).autoincrement; - const columnPk = (json2.tables[tableName].columns[columnName] as any) + const columnPk = (json2.tables[tableKey].columns[columnName] as any) .primaryKey; + const typeSchema = json2.tables[tableKey].columns[columnName].typeSchema; + const json1ColumnTypeSchema = json1.tables[tableKey].columns[columnName].typeSchema; - const compositePk = json2.tables[tableName].compositePrimaryKeys[ - `${tableName}_${columnName}` - ]; + const compositePk = json2.tables[tableKey].compositePrimaryKeys[`${tableName}_${columnName}`]; if (typeof column.name !== 'string') { statements.push({ @@ -1471,19 +1237,32 @@ export const prepareAlterColumnsMysql = ( } if (column.type?.type === 'changed') { + const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; + const parsedNewColumnType = column.type.new.replace(arrayDefinitionRegex, ''); + const parsedOldColumnType = column.type.old.replace(arrayDefinitionRegex, ''); + + const isNewTypeIsEnum = json2.enums[`${typeSchema}.${parsedNewColumnType}`]; + const isOldTypeIsEnum = json1.enums[`${json1ColumnTypeSchema}.${parsedOldColumnType}`]; + statements.push({ - type: 'alter_table_alter_column_set_type', + type: 'pg_alter_table_alter_column_set_type', tableName, columnName, - newDataType: column.type.new, - oldDataType: column.type.old, + typeSchema: typeSchema, + newDataType: { + name: column.type.new, + isEnum: isNewTypeIsEnum ? true : false, + }, + oldDataType: { + name: column.type.old, + isEnum: isOldTypeIsEnum ? true : false, + }, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, - columnGenerated, }); } @@ -1596,16 +1375,37 @@ export const prepareAlterColumnsMysql = ( }); } + if (column.identity?.type === 'added') { + statements.push({ + type: 'alter_table_alter_column_set_identity', + tableName, + columnName, + schema, + identity: column.identity.value, + }); + } + + if (column.identity?.type === 'changed') { + statements.push({ + type: 'alter_table_alter_column_change_identity', + tableName, + columnName, + schema, + identity: column.identity.new, + oldIdentity: column.identity.old, + }); + } + + if (column.identity?.type === 'deleted') { + statements.push({ + type: 'alter_table_alter_column_drop_identity', + tableName, + columnName, + schema, + }); + } + if (column.generated?.type === 'added') { - if (columnGenerated?.type === 'virtual') { - warning( - `You are trying to add virtual generated constraint to ${ - chalk.blue( - columnName, - ) - } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, - ); - } statements.push({ type: 'alter_table_alter_column_set_generated', tableName, @@ -1638,15 +1438,6 @@ export const prepareAlterColumnsMysql = ( } if (column.generated?.type === 'deleted') { - if (columnGenerated?.type === 'virtual') { - warning( - `You are trying to remove virtual generated constraint from ${ - chalk.blue( - columnName, - ) - } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, - ); - } statements.push({ type: 'alter_table_alter_column_drop_generated', tableName, @@ -1659,7 +1450,6 @@ export const prepareAlterColumnsMysql = ( columnAutoIncrement, columnPk, columnGenerated, - oldColumn: json1.tables[tableName].columns[columnName], }); } @@ -1680,1051 +1470,51 @@ export const prepareAlterColumnsMysql = ( } } - if (column.onUpdate?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } + // if (column.primaryKey?.type === "added") { + // statements.push({ + // type: "alter_table_alter_column_set_primarykey", + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } - if (column.onUpdate?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - } - - return [...dropPkStatements, ...setPkStatements, ...statements]; -}; - -export const prepareAlterColumnsSingleStore = ( - tableName: string, - schema: string, - columns: AlteredColumn[], - // TODO: remove? - json1: CommonSquashedSchema, - json2: CommonSquashedSchema, - action?: 'push' | undefined, -): JsonAlterColumnStatement[] => { - let statements: JsonAlterColumnStatement[] = []; - let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; - let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; - - for (const column of columns) { - const columnName = typeof column.name !== 'string' ? column.name.new : column.name; - - const table = json2.tables[tableName]; - const snapshotColumn = table.columns[columnName]; - - const columnType = snapshotColumn.type; - const columnDefault = snapshotColumn.default; - const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; - const columnNotNull = table.columns[columnName].notNull; - - const columnAutoIncrement = 'autoincrement' in snapshotColumn - ? snapshotColumn.autoincrement ?? false - : false; - - const columnPk = table.columns[columnName].primaryKey; - - if (column.autoincrement?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'changed') { - const type = column.autoincrement.new - ? 'alter_table_alter_column_set_autoincrement' - : 'alter_table_alter_column_drop_autoincrement'; - - statements.push({ - type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - } - - for (const column of columns) { - const columnName = typeof column.name !== 'string' ? column.name.new : column.name; - - // I used any, because those fields are available only for mysql and singlestore dialect - // For other dialects it will become undefined, that is fine for json statements - const columnType = json2.tables[tableName].columns[columnName].type; - const columnDefault = json2.tables[tableName].columns[columnName].default; - const columnGenerated = json2.tables[tableName].columns[columnName].generated; - const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) - .onUpdate; - const columnNotNull = json2.tables[tableName].columns[columnName].notNull; - const columnAutoIncrement = ( - json2.tables[tableName].columns[columnName] as any - ).autoincrement; - const columnPk = (json2.tables[tableName].columns[columnName] as any) - .primaryKey; - - const compositePk = json2.tables[tableName].compositePrimaryKeys[ - `${tableName}_${columnName}` - ]; - - if (typeof column.name !== 'string') { - statements.push({ - type: 'alter_table_rename_column', - tableName, - oldColumnName: column.name.old, - newColumnName: column.name.new, - schema, - }); - } - - if (column.type?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_set_type', - tableName, - columnName, - newDataType: column.type.new, - oldDataType: column.type.old, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if ( - column.primaryKey?.type === 'deleted' - || (column.primaryKey?.type === 'changed' - && !column.primaryKey.new - && typeof compositePk === 'undefined') - ) { - dropPkStatements.push({ - //// - type: 'alter_table_alter_column_drop_pk', - tableName, - columnName, - schema, - }); - } - - if (column.default?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.value, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.new, - oldDefaultValue: column.default.old, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_default', - tableName, - columnName, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.notNull?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'changed') { - const type = column.notNull.new - ? 'alter_table_alter_column_set_notnull' - : 'alter_table_alter_column_drop_notnull'; - statements.push({ - type: type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.generated?.type === 'added') { - if (columnGenerated?.type === 'virtual') { - // TODO: Change warning message according to SingleStore docs - warning( - `You are trying to add virtual generated constraint to ${ - chalk.blue( - columnName, - ) - } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, - ); - } - statements.push({ - type: 'alter_table_alter_column_set_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if (column.generated?.type === 'changed' && action !== 'push') { - statements.push({ - type: 'alter_table_alter_column_alter_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if (column.generated?.type === 'deleted') { - if (columnGenerated?.type === 'virtual') { - // TODO: Change warning message according to SingleStore docs - warning( - `You are trying to remove virtual generated constraint from ${ - chalk.blue( - columnName, - ) - } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, - ); - } - statements.push({ - type: 'alter_table_alter_column_drop_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - oldColumn: json1.tables[tableName].columns[columnName], - }); - } - - if ( - column.primaryKey?.type === 'added' - || (column.primaryKey?.type === 'changed' && column.primaryKey.new) - ) { - const wasAutoincrement = statements.filter( - (it) => it.type === 'alter_table_alter_column_set_autoincrement', - ); - if (wasAutoincrement.length === 0) { - setPkStatements.push({ - type: 'alter_table_alter_column_set_pk', - tableName, - schema, - columnName, - }); - } - } - - if (column.onUpdate?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.onUpdate?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - } - - return [...dropPkStatements, ...setPkStatements, ...statements]; -}; - -export const preparePgAlterColumns = ( - _tableName: string, - schema: string, - columns: AlteredColumn[], - // TODO: remove? - json2: PgSchemaSquashed, - json1: PgSchemaSquashed, - action?: 'push' | undefined, -): JsonAlterColumnStatement[] => { - const tableKey = `${schema || 'public'}.${_tableName}`; - let statements: JsonAlterColumnStatement[] = []; - let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; - let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; - - for (const column of columns) { - const columnName = typeof column.name !== 'string' ? column.name.new : column.name; - - const tableName = json2.tables[tableKey].name; - - // I used any, because those fields are available only for mysql dialect - // For other dialects it will become undefined, that is fine for json statements - const columnType = json2.tables[tableKey].columns[columnName].type; - const columnDefault = json2.tables[tableKey].columns[columnName].default; - const columnGenerated = json2.tables[tableKey].columns[columnName].generated; - const columnOnUpdate = (json2.tables[tableKey].columns[columnName] as any) - .onUpdate; - const columnNotNull = json2.tables[tableKey].columns[columnName].notNull; - const columnAutoIncrement = ( - json2.tables[tableKey].columns[columnName] as any - ).autoincrement; - const columnPk = (json2.tables[tableKey].columns[columnName] as any) - .primaryKey; - const typeSchema = json2.tables[tableKey].columns[columnName].typeSchema; - const json1ColumnTypeSchema = json1.tables[tableKey].columns[columnName].typeSchema; - - const compositePk = json2.tables[tableKey].compositePrimaryKeys[`${tableName}_${columnName}`]; - - if (typeof column.name !== 'string') { - statements.push({ - type: 'alter_table_rename_column', - tableName, - oldColumnName: column.name.old, - newColumnName: column.name.new, - schema, - }); - } - - if (column.type?.type === 'changed') { - const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; - const parsedNewColumnType = column.type.new.replace(arrayDefinitionRegex, ''); - const parsedOldColumnType = column.type.old.replace(arrayDefinitionRegex, ''); - - const isNewTypeIsEnum = json2.enums[`${typeSchema}.${parsedNewColumnType}`]; - const isOldTypeIsEnum = json1.enums[`${json1ColumnTypeSchema}.${parsedOldColumnType}`]; - - statements.push({ - type: 'pg_alter_table_alter_column_set_type', - tableName, - columnName, - typeSchema: typeSchema, - newDataType: { - name: column.type.new, - isEnum: isNewTypeIsEnum ? true : false, - }, - oldDataType: { - name: column.type.old, - isEnum: isOldTypeIsEnum ? true : false, - }, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if ( - column.primaryKey?.type === 'deleted' - || (column.primaryKey?.type === 'changed' - && !column.primaryKey.new - && typeof compositePk === 'undefined') - ) { - dropPkStatements.push({ - //// - type: 'alter_table_alter_column_drop_pk', - tableName, - columnName, - schema, - }); - } - - if (column.default?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.value, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.new, - oldDefaultValue: column.default.old, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_default', - tableName, - columnName, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.notNull?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'changed') { - const type = column.notNull.new - ? 'alter_table_alter_column_set_notnull' - : 'alter_table_alter_column_drop_notnull'; - statements.push({ - type: type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.identity?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_identity', - tableName, - columnName, - schema, - identity: column.identity.value, - }); - } - - if (column.identity?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_change_identity', - tableName, - columnName, - schema, - identity: column.identity.new, - oldIdentity: column.identity.old, - }); - } - - if (column.identity?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_identity', - tableName, - columnName, - schema, - }); - } - - if (column.generated?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if (column.generated?.type === 'changed' && action !== 'push') { - statements.push({ - type: 'alter_table_alter_column_alter_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if (column.generated?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } - - if ( - column.primaryKey?.type === 'added' - || (column.primaryKey?.type === 'changed' && column.primaryKey.new) - ) { - const wasAutoincrement = statements.filter( - (it) => it.type === 'alter_table_alter_column_set_autoincrement', - ); - if (wasAutoincrement.length === 0) { - setPkStatements.push({ - type: 'alter_table_alter_column_set_pk', - tableName, - schema, - columnName, - }); - } - } - - // if (column.primaryKey?.type === "added") { - // statements.push({ - // type: "alter_table_alter_column_set_primarykey", - // tableName, - // columnName, - // schema, - // newDataType: columnType, - // columnDefault, - // columnOnUpdate, - // columnNotNull, - // columnAutoIncrement, - // }); - // } - - // if (column.primaryKey?.type === "changed") { - // const type = column.primaryKey.new - // ? "alter_table_alter_column_set_primarykey" - // : "alter_table_alter_column_drop_primarykey"; - - // statements.push({ - // type, - // tableName, - // columnName, - // schema, - // newDataType: columnType, - // columnDefault, - // columnOnUpdate, - // columnNotNull, - // columnAutoIncrement, - // }); - // } - - // if (column.primaryKey?.type === "deleted") { - // statements.push({ - // type: "alter_table_alter_column_drop_primarykey", - // tableName, - // columnName, - // schema, - // newDataType: columnType, - // columnDefault, - // columnOnUpdate, - // columnNotNull, - // columnAutoIncrement, - // }); - // } - - if (column.onUpdate?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.onUpdate?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_on_update', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - } - - return [...dropPkStatements, ...setPkStatements, ...statements]; -}; - -export const prepareSqliteAlterColumns = ( - tableName: string, - schema: string, - columns: AlteredColumn[], - // TODO: remove? - json2: CommonSquashedSchema, -): JsonAlterColumnStatement[] => { - let statements: JsonAlterColumnStatement[] = []; - let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; - let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; - - for (const column of columns) { - const columnName = typeof column.name !== 'string' ? column.name.new : column.name; - - // I used any, because those fields are available only for mysql dialect - // For other dialects it will become undefined, that is fine for json statements - const columnType = json2.tables[tableName].columns[columnName].type; - const columnDefault = json2.tables[tableName].columns[columnName].default; - const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) - .onUpdate; - const columnNotNull = json2.tables[tableName].columns[columnName].notNull; - const columnAutoIncrement = ( - json2.tables[tableName].columns[columnName] as any - ).autoincrement; - const columnPk = (json2.tables[tableName].columns[columnName] as any) - .primaryKey; - - const columnGenerated = json2.tables[tableName].columns[columnName].generated; - - const compositePk = json2.tables[tableName].compositePrimaryKeys[ - `${tableName}_${columnName}` - ]; - - if (column.autoincrement?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'changed') { - const type = column.autoincrement.new - ? 'alter_table_alter_column_set_autoincrement' - : 'alter_table_alter_column_drop_autoincrement'; - - statements.push({ - type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.autoincrement?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_autoincrement', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (typeof column.name !== 'string') { - statements.push({ - type: 'alter_table_rename_column', - tableName, - oldColumnName: column.name.old, - newColumnName: column.name.new, - schema, - }); - } - - if (column.type?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_set_type', - tableName, - columnName, - newDataType: column.type.new, - oldDataType: column.type.old, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if ( - column.primaryKey?.type === 'deleted' - || (column.primaryKey?.type === 'changed' - && !column.primaryKey.new - && typeof compositePk === 'undefined') - ) { - dropPkStatements.push({ - //// - type: 'alter_table_alter_column_drop_pk', - tableName, - columnName, - schema, - }); - } - - if (column.default?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.value, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'changed') { - statements.push({ - type: 'alter_table_alter_column_set_default', - tableName, - columnName, - newDefaultValue: column.default.new, - oldDefaultValue: column.default.old, - schema, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.default?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_default', - tableName, - columnName, - schema, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - newDataType: columnType, - columnPk, - }); - } - - if (column.notNull?.type === 'added') { - statements.push({ - type: 'alter_table_alter_column_set_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'changed') { - const type = column.notNull.new - ? 'alter_table_alter_column_set_notnull' - : 'alter_table_alter_column_drop_notnull'; - statements.push({ - type: type, - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.notNull?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_notnull', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - }); - } - - if (column.generated?.type === 'added') { - if (columnGenerated?.type === 'virtual') { - statements.push({ - type: 'alter_table_alter_column_set_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } else { - warning( - `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, - ); - } - } - - if (column.generated?.type === 'changed') { - if (columnGenerated?.type === 'virtual') { - statements.push({ - type: 'alter_table_alter_column_alter_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } else { - warning( - `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, - ); - } - } - - if (column.generated?.type === 'deleted') { - statements.push({ - type: 'alter_table_alter_column_drop_generated', - tableName, - columnName, - schema, - newDataType: columnType, - columnDefault, - columnOnUpdate, - columnNotNull, - columnAutoIncrement, - columnPk, - columnGenerated, - }); - } + // if (column.primaryKey?.type === "changed") { + // const type = column.primaryKey.new + // ? "alter_table_alter_column_set_primarykey" + // : "alter_table_alter_column_drop_primarykey"; - if ( - column.primaryKey?.type === 'added' - || (column.primaryKey?.type === 'changed' && column.primaryKey.new) - ) { - const wasAutoincrement = statements.filter( - (it) => it.type === 'alter_table_alter_column_set_autoincrement', - ); - if (wasAutoincrement.length === 0) { - setPkStatements.push({ - type: 'alter_table_alter_column_set_pk', - tableName, - schema, - columnName, - }); - } - } + // statements.push({ + // type, + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } + + // if (column.primaryKey?.type === "deleted") { + // statements.push({ + // type: "alter_table_alter_column_drop_primarykey", + // tableName, + // columnName, + // schema, + // newDataType: columnType, + // columnDefault, + // columnOnUpdate, + // columnNotNull, + // columnAutoIncrement, + // }); + // } if (column.onUpdate?.type === 'added') { statements.push({ @@ -2909,7 +1699,6 @@ export const prepareCreateIndexesJson = ( tableName: string, schema: string, indexes: Record, - internal?: MySqlKitInternals | SQLiteKitInternals, ): JsonCreateIndexStatement[] => { return Object.values(indexes).map((indexData) => { return { @@ -2917,7 +1706,6 @@ export const prepareCreateIndexesJson = ( tableName, data: indexData, schema, - internal, }; }); }; @@ -2936,54 +1724,6 @@ export const prepareCreateReferencesJson = ( }; }); }; -export const prepareLibSQLCreateReferencesJson = ( - tableName: string, - schema: string, - foreignKeys: Record, - json2: SQLiteSchemaSquashed, - action?: 'push', -): JsonCreateReferenceStatement[] => { - return Object.values(foreignKeys).map((fkData) => { - const { columnsFrom, tableFrom, columnsTo } = action === 'push' - ? SQLiteSquasher.unsquashPushFK(fkData) - : SQLiteSquasher.unsquashFK(fkData); - - // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc. - // If it is multicolumn reference it is not possible to pass this data for all columns - // Pass multicolumn flag for sql statements to not generate migration - let isMulticolumn = false; - - if (columnsFrom.length > 1 || columnsTo.length > 1) { - isMulticolumn = true; - - return { - type: 'create_reference', - tableName, - data: fkData, - schema, - isMulticolumn, - }; - } - - const columnFrom = columnsFrom[0]; - - const { - notNull: columnNotNull, - default: columnDefault, - type: columnType, - } = json2.tables[tableFrom].columns[columnFrom]; - - return { - type: 'create_reference', - tableName, - data: fkData, - schema, - columnNotNull, - columnDefault, - columnType, - }; - }); -}; export const prepareDropReferencesJson = ( tableName: string, @@ -2999,77 +1739,6 @@ export const prepareDropReferencesJson = ( }; }); }; -export const prepareLibSQLDropReferencesJson = ( - tableName: string, - schema: string, - foreignKeys: Record, - json2: SQLiteSchemaSquashed, - meta: SQLiteSchemaInternal['_meta'], - action?: 'push', -): JsonDeleteReferenceStatement[] => { - const statements = Object.values(foreignKeys).map((fkData) => { - const { columnsFrom, tableFrom, columnsTo, name, tableTo, onDelete, onUpdate } = action === 'push' - ? SQLiteSquasher.unsquashPushFK(fkData) - : SQLiteSquasher.unsquashFK(fkData); - - // If all columns from where were references were deleted -> skip this logic - // Drop columns will cover this scenario - const keys = Object.keys(json2.tables[tableName].columns); - const filtered = columnsFrom.filter((it) => keys.includes(it)); - const fullDrop = filtered.length === 0; - if (fullDrop) return; - - // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc. - // If it is multicolumn reference it is not possible to pass this data for all columns - // Pass multicolumn flag for sql statements to not generate migration - let isMulticolumn = false; - - if (columnsFrom.length > 1 || columnsTo.length > 1) { - isMulticolumn = true; - - return { - type: 'delete_reference', - tableName, - data: fkData, - schema, - isMulticolumn, - }; - } - - const columnFrom = columnsFrom[0]; - const newTableName = getNewTableName(tableFrom, meta); - - const { - notNull: columnNotNull, - default: columnDefault, - type: columnType, - } = json2.tables[newTableName].columns[columnFrom]; - - const fkToSquash = { - columnsFrom, - columnsTo, - name, - tableFrom: newTableName, - tableTo, - onDelete, - onUpdate, - }; - const foreignKey = action === 'push' - ? SQLiteSquasher.squashPushFK(fkToSquash) - : SQLiteSquasher.squashFK(fkToSquash); - return { - type: 'delete_reference', - tableName, - data: foreignKey, - schema, - columnNotNull, - columnDefault, - columnType, - }; - }); - - return statements.filter((it) => it) as JsonDeleteReferenceStatement[]; -}; // alter should create 2 statements. It's important to make only 1 sql per statement(for breakpoints) export const prepareAlterReferencesJson = ( @@ -3155,11 +1824,8 @@ export const prepareAddCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, - // TODO: remove? - json2: PgSchema, ): JsonCreateCompositePK[] => { return Object.values(pks).map((it) => { - const unsquashed = PgSquasher.unsquashPK(it); return { type: 'create_composite_pk', tableName, @@ -3174,8 +1840,6 @@ export const prepareDeleteCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, - // TODO: remove? - json1: PgSchema, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { return { @@ -3192,9 +1856,6 @@ export const prepareAlterCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, - // TODO: remove? - json1: PgSchema, - json2: PgSchema, ): JsonAlterCompositePK[] => { return Object.values(pks).map((it) => { return { @@ -3239,68 +1900,12 @@ export const prepareDeleteUniqueConstraintPg = ( }); }; -export const prepareAddCheckConstraint = ( - tableName: string, - schema: string, - check: Record, -): JsonCreateCheckConstraint[] => { - return Object.values(check).map((it) => { - return { - type: 'create_check_constraint', - tableName, - data: it, - schema, - } as JsonCreateCheckConstraint; - }); -}; - -export const prepareDeleteCheckConstraint = ( - tableName: string, - schema: string, - check: Record, -): JsonDeleteCheckConstraint[] => { - return Object.values(check).map((it) => { - return { - type: 'delete_check_constraint', - tableName, - constraintName: PgSquasher.unsquashCheck(it).name, - schema, - } as JsonDeleteCheckConstraint; - }); -}; - -// add create table changes -// add handler to make drop and add and not alter(looking at __old and __new) -// add serializer for mysql and sqlite + types -// add introspect serializer for pg+sqlite+mysql -// add introspect actual code -// add push sqlite handler -// add push mysql warning if data exists and may have unique conflict -// add release notes -// add docs changes - -export const prepareAlterUniqueConstraintPg = ( - tableName: string, - schema: string, - unqs: Record, -): JsonAlterUniqueConstraint[] => { - return Object.values(unqs).map((it) => { - return { - type: 'alter_unique_constraint', - tableName, - old: it.__old, - new: it.__new, - schema, - } as JsonAlterUniqueConstraint; - }); -}; - export const prepareAddCompositePrimaryKeyMySql = ( tableName: string, pks: Record, // TODO: remove? json1: MySqlSchema, - json2: MySqlSchema, + _json2: MySqlSchema, ): JsonCreateCompositePK[] => { const res: JsonCreateCompositePK[] = []; for (const it of Object.values(pks)) { @@ -3326,11 +1931,8 @@ export const prepareAddCompositePrimaryKeyMySql = ( export const prepareDeleteCompositePrimaryKeyMySql = ( tableName: string, pks: Record, - // TODO: remove? - json1: MySqlSchema, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { - const unsquashed = MySqlSquasher.unsquashPK(it); return { type: 'delete_composite_pk', tableName, @@ -3362,6 +1964,62 @@ export const prepareAlterCompositePrimaryKeyMySql = ( }); }; +export const prepareAddCheckConstraint = ( + tableName: string, + schema: string, + check: Record, +): JsonCreateCheckConstraint[] => { + return Object.values(check).map((it) => { + return { + type: 'create_check_constraint', + tableName, + data: it, + schema, + } as JsonCreateCheckConstraint; + }); +}; + +export const prepareDeleteCheckConstraint = ( + tableName: string, + schema: string, + check: Record, +): JsonDeleteCheckConstraint[] => { + return Object.values(check).map((it) => { + return { + type: 'delete_check_constraint', + tableName, + constraintName: PgSquasher.unsquashCheck(it).name, + schema, + } as JsonDeleteCheckConstraint; + }); +}; + +// add create table changes +// add handler to make drop and add and not alter(looking at __old and __new) +// add serializer for mysql and sqlite + types +// add introspect serializer for pg+sqlite+mysql +// add introspect actual code +// add push sqlite handler +// add push mysql warning if data exists and may have unique conflict +// add release notes +// add docs changes + +export const prepareAlterUniqueConstraintPg = ( + tableName: string, + schema: string, + unqs: Record, +): JsonAlterUniqueConstraint[] => { + return Object.values(unqs).map((it) => { + return { + type: 'alter_unique_constraint', + tableName, + old: it.__old, + new: it.__new, + schema, + } as JsonAlterUniqueConstraint; + }); +}; + export const preparePgCreateViewJson = ( name: string, schema: string, @@ -3385,24 +2043,6 @@ export const preparePgCreateViewJson = ( }; }; -export const prepareMySqlCreateViewJson = ( - name: string, - definition: string, - meta: string, - replace: boolean = false, -): JsonCreateMySqlViewStatement => { - const { algorithm, sqlSecurity, withCheckOption } = MySqlSquasher.unsquashView(meta); - return { - type: 'mysql_create_view', - name: name, - definition: definition, - algorithm, - sqlSecurity, - withCheckOption, - replace, - }; -}; - /* export const prepareSingleStoreCreateViewJson = ( name: string, definition: string, @@ -3421,17 +2061,6 @@ export const prepareMySqlCreateViewJson = ( }; }; */ -export const prepareSqliteCreateViewJson = ( - name: string, - definition: string, -): JsonCreateSqliteViewStatement => { - return { - type: 'sqlite_create_view', - name: name, - definition: definition, - }; -}; - export const prepareDropViewJson = ( name: string, schema?: string, @@ -3464,6 +2093,47 @@ export const prepareRenameViewJson = ( return resObject; }; +export const prepareMySqlCreateTableJson = ( + table: Table, + json2: MySqlSchema, +): JsonCreateTableStatement => { + const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints } = table; + + return { + type: 'create_table', + tableName: name, + schema, + columns: Object.values(columns), + compositePKs: Object.values(compositePrimaryKeys), + compositePkName: Object.values(compositePrimaryKeys).length > 0 + ? json2.tables[name].compositePrimaryKeys[ + MySqlSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) + .name + ].name + : '', + uniqueConstraints: Object.values(uniqueConstraints), + checkConstraints: Object.values(checkConstraints), + }; +}; + +export const prepareMySqlCreateViewJson = ( + name: string, + definition: string, + meta: string, + replace: boolean = false, +): JsonCreateMySqlViewStatement => { + const { algorithm, sqlSecurity, withCheckOption } = MySqlSquasher.unsquashView(meta); + return { + type: 'mysql_create_view', + name: name, + definition: definition, + algorithm, + sqlSecurity, + withCheckOption, + replace, + }; +}; + export const preparePgAlterViewAlterSchemaJson = ( to: string, from: string, @@ -3541,12 +2211,6 @@ export const preparePgAlterViewAlterUsingJson = ( } as JsonAlterViewAlterUsingStatement; }; -export const prepareMySqlAlterView = ( - view: Omit, -): JsonAlterMySqlViewStatement => { - return { type: 'alter_mysql_view', ...view }; -}; - /* export const prepareSingleStoreAlterView = ( view: Omit, ): JsonAlterSingleStoreViewStatement => { diff --git a/drizzle-kit/src/legacy/mysql-v5/mysqlDiff.ts b/drizzle-kit/src/legacy/mysql-v5/mysqlDiff.ts new file mode 100644 index 0000000000..a297502fc1 --- /dev/null +++ b/drizzle-kit/src/legacy/mysql-v5/mysqlDiff.ts @@ -0,0 +1,656 @@ +import { applyJsonDiff, diffColumns, diffSchemasOrTables } from '../jsonDiffer'; +import { fromJson } from '../sqlgenerator2'; + +import type { + JsonAddColumnStatement, + JsonAlterCompositePK, + JsonAlterUniqueConstraint, + JsonCreateCheckConstraint, + JsonCreateCompositePK, + JsonCreateMySqlViewStatement, + JsonCreateReferenceStatement, + JsonCreateUniqueConstraint, + JsonDeleteCheckConstraint, + JsonDeleteCompositePK, + JsonDeleteUniqueConstraint, + JsonDropColumnStatement, + JsonDropViewStatement, + JsonReferenceStatement, + JsonRenameColumnStatement, + JsonRenameViewStatement, + JsonStatement, +} from '../jsonStatements'; +import { + _prepareAddColumns, + _prepareDropColumns, + prepareAddCheckConstraint, + prepareAddCompositePrimaryKeyMySql, + prepareAddUniqueConstraintPg as prepareAddUniqueConstraint, + prepareAlterCompositePrimaryKeyMySql, + prepareAlterReferencesJson, + prepareCreateIndexesJson, + prepareCreateReferencesJson, + prepareDeleteCheckConstraint, + prepareDeleteCompositePrimaryKeyMySql, + prepareDeleteUniqueConstraintPg as prepareDeleteUniqueConstraint, + prepareDropIndexesJson, + prepareDropReferencesJson, + prepareDropTableJson, + prepareDropViewJson, + prepareMySqlCreateTableJson, + prepareMySqlCreateViewJson, + prepareRenameColumns, + prepareRenameTableJson, + prepareRenameViewJson, +} from '../jsonStatements'; + +import { mapEntries, mapKeys } from '../global'; +import type { + Column, + ColumnsResolverInput, + ColumnsResolverOutput, + DiffResultMysql, + Named, + ResolverInput, + ResolverOutputWithMoved, + Table, +} from '../snapshotsDiffer'; +import { + columnChangeFor, + columnsResolver, + diffResultSchemeMysql, + mySqlViewsResolver, + nameChangeFor, + tablesResolver, +} from '../snapshotsDiffer'; +import { copy } from '../utils'; +import type { MySqlSchema, MySqlSchemaSquashed, ViewSquashed } from './mysqlSchema'; +import { dryMySql, MySqlSquasher, squashMysqlScheme } from './mysqlSchema'; + +export const diff = async (opts: { + left?: MySqlSchema; + right: MySqlSchema; + mode?: 'push'; +}) => { + const left = opts.left ?? dryMySql; + const json1 = squashMysqlScheme(left); + const json2 = squashMysqlScheme(opts.right); + return _diff( + json1, + json2, + tablesResolver, + columnsResolver, + mySqlViewsResolver, + left, + opts.right, + opts.mode, + ); +}; + +export const _diff = async ( + json1: MySqlSchemaSquashed, + json2: MySqlSchemaSquashed, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, + prevFull: MySqlSchema, + curFull: MySqlSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + // squash indexes and fks + + // squash uniqueIndexes and uniqueConstraint into constraints object + // it should be done for mysql only because it has no diffs for it + + // TODO: @AndriiSherman + // Add an upgrade to v6 and move all snaphosts to this strcutre + // After that we can generate mysql in 1 object directly(same as sqlite) + for (const tableName in json1.tables) { + const table = json1.tables[tableName]; + for (const indexName in table.indexes) { + const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json1.tables[tableName].indexes[index.name]; + } + } + } + + for (const tableName in json2.tables) { + const table = json2.tables[tableName]; + for (const indexName in table.indexes) { + const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); + if (index.isUnique) { + table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ + name: index.name, + columns: index.columns, + }); + delete json2.tables[tableName].indexes[index.name]; + } + } + } + + const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); + + const { + created: createdTables, + deleted: deletedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(json1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { name } = nameChangeFor(it, renamedTables); + it.name = name; + return [name, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + const columnRenames = [] as { + table: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[it.table] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[tableValue.name] || []; + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const viewsDiff = diffSchemasOrTables(json1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, // renamed or moved + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; + }); + + const viewsPatchedSnap1 = copy(columnsPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[viewValue.name]; + + if (rename) { + viewValue.name = rename.to; + viewKey = rename.to; + } + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); + + const typedResult: DiffResultMysql = diffResultSchemeMysql.parse(diffResult); + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.indexes, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames + .map((it) => prepareRenameColumns(it.table, '', it.renames)) + .flat(); + + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates + .map((it) => _prepareAddColumns(it.table, '', it.columns)) + .flat(); + + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes + .map((it) => _prepareDropColumns(it.table, '', it.columns)) + .flat(); + + alteredTables.forEach((it) => { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + // let addedColumns: string[] = []; + // for (const addedPkName of Object.keys(it.addedCompositePKs)) { + // const addedPkColumns = it.addedCompositePKs[addedPkName]; + // addedColumns = MySqlSquasher.unsquashPK(addedPkColumns).columns; + // } + + // let deletedColumns: string[] = []; + // for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + // const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + // deletedColumns = MySqlSquasher.unsquashPK(deletedPkColumns).columns; + // } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + // const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + + addedCompositePKs = prepareAddCompositePrimaryKeyMySql( + it.name, + it.addedCompositePKs, + prevFull, + curFull, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeyMySql( + it.name, + it.deletedCompositePKs, + ); + // } + alteredCompositePKs = prepareAlterCompositePrimaryKeyMySql( + it.name, + it.alteredCompositePKs, + prevFull, + curFull, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + let createdCheckConstraints: JsonCreateCheckConstraint[] = []; + let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraint( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraint( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraint(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), + ); + } + + createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deletedCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + // skip for push + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + + jsonCreatedCheckConstraints.push(...createdCheckConstraints); + jsonDeletedCheckConstraints.push(...deletedCheckConstraints); + }); + + // const rColumns = jsonRenameColumnsStatements.map((it) => { + // const tableName = it.tableName; + // const schema = it.schema; + // return { + // from: { schema, table: tableName, column: it.oldColumnName }, + // to: { schema, table: tableName, column: it.newColumnName }, + // }; + // }); + + const jsonTableAlternations = alteredTables + .map(() => { + throw new Error('unexpected'); + }) + .flat(); + + const jsonCreateIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + alteredTables.forEach((it) => { + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesForAllAlteredTables.push( + ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables + .map((it) => { + return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); + }) + .flat(); + + const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = alteredTables + .map((it) => { + const forAdded = prepareCreateReferencesJson( + it.name, + it.schema, + it.addedForeignKeys, + ); + + const forAltered = prepareDropReferencesJson( + it.name, + it.schema, + it.deletedForeignKeys, + ); + + const alteredFKs = prepareAlterReferencesJson( + it.name, + it.schema, + it.alteredForeignKeys, + ); + + return [...forAdded, ...forAltered, ...alteredFKs]; + }) + .flat(); + + const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'create_reference', + ); + const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( + (t) => t.type === 'delete_reference', + ); + + const jsonMySqlCreateTables = createdTables.map((it) => { + return prepareMySqlCreateTableJson( + it, + curFull as MySqlSchema, + ); + }); + + const createViews: JsonCreateMySqlViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + const renameViews: JsonRenameViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return prepareMySqlCreateViewJson( + it.name, + it.definition!, + it.meta, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name); + }), + ); + + renameViews.push( + ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { + return prepareRenameViewJson(it.to.name, it.from.name); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); + + for (const alteredView of alteredViews) { + const { definition, meta } = json2.views[alteredView.name]; + + if (alteredView.alteredExisting) { + dropViews.push(prepareDropViewJson(alteredView.name)); + + createViews.push( + prepareMySqlCreateViewJson( + alteredView.name, + definition!, + meta, + ), + ); + + continue; + } + + if (alteredView.alteredDefinition && action !== 'push') { + createViews.push( + prepareMySqlCreateViewJson( + alteredView.name, + definition!, + meta, + true, + ), + ); + continue; + } + + if (alteredView.alteredMeta) { + throw new Error('unexpected'); + } + } + + jsonStatements.push(...jsonMySqlCreateTables); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...dropViews); + jsonStatements.push(...renameViews); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + jsonStatements.push(...jsonDeletedCheckConstraints); + + jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAddColumnsStatemets); + + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonDeletedUniqueConstraints); + + jsonStatements.push(...jsonCreateReferencesForCreatedTables); + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + jsonStatements.push(...jsonCreatedCheckConstraints); + + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); + jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + + // jsonStatements.push(...jsonDeletedCompositePKs); + // jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...createViews); + + jsonStatements.push(...jsonAlteredUniqueConstraints); + + const sqlStatements = fromJson(jsonStatements, 'mysql'); + + // const rTables = renamedTables.map((it) => { + // return { from: it.from, to: it.to }; + // }); + + return { + statements: jsonStatements, + sqlStatements, + _meta: { columns: [], schemas: [], tables: [] }, + }; +}; diff --git a/drizzle-kit/src/serializer/mysqlSchema.ts b/drizzle-kit/src/legacy/mysql-v5/mysqlSchema.ts similarity index 98% rename from drizzle-kit/src/serializer/mysqlSchema.ts rename to drizzle-kit/src/legacy/mysql-v5/mysqlSchema.ts index 3a6fb91799..5b0005a4ae 100644 --- a/drizzle-kit/src/serializer/mysqlSchema.ts +++ b/drizzle-kit/src/legacy/mysql-v5/mysqlSchema.ts @@ -1,4 +1,5 @@ -import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; +import type { TypeOf } from 'zod'; +import { any, boolean, enum as enumType, literal, object, record, string, union } from 'zod'; import { mapValues, originUUID } from '../global'; // ------- V3 -------- @@ -30,7 +31,7 @@ const column = object({ default: any().optional(), onUpdate: any().optional(), generated: object({ - type: enumType(['stored', 'virtual']), + type: enumType(['stored', 'virtual', 'persisted']), as: string(), }).optional(), }).strict(); diff --git a/drizzle-kit/src/legacy/mysql-v5/mysqlSerializer.ts b/drizzle-kit/src/legacy/mysql-v5/mysqlSerializer.ts new file mode 100644 index 0000000000..3464057557 --- /dev/null +++ b/drizzle-kit/src/legacy/mysql-v5/mysqlSerializer.ts @@ -0,0 +1,558 @@ +import chalk from 'chalk'; +import { getTableName, is, SQL } from 'orm044'; +import type { AnyMySqlTable, MySqlView } from 'orm044/mysql-core'; +import { + getTableConfig, + getViewConfig, + MySqlColumn, + MySqlDialect, + type PrimaryKey as PrimaryKeyORM, + uniqueKeyName, +} from 'orm044/mysql-core'; +import type { CasingType } from 'src/cli/validations/common'; +import { withStyle } from '../outputs'; +import { escapeSingleQuotes } from '../utils'; +import { getColumnCasing, sqlToStr } from '../utils'; +import type { + CheckConstraint, + Column, + ForeignKey, + Index, + MySqlKitInternals, + MySqlSchemaInternal, + PrimaryKey, + Table, + UniqueConstraint, + View, +} from './mysqlSchema'; + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +const handleEnumType = (type: string) => { + let str = type.split('(')[1]; + str = str.substring(0, str.length - 1); + const values = str.split(',').map((v) => `'${escapeSingleQuotes(v.substring(1, v.length - 1))}'`); + return `enum(${values.join(',')})`; +}; + +export const generateMySqlSnapshot = ( + tables: AnyMySqlTable[], + views: MySqlView[], + casing: CasingType | undefined, +): MySqlSchemaInternal => { + const dialect = new MySqlDialect({ casing }); + const result: Record = {}; + const resultViews: Record = {}; + const internal: MySqlKitInternals = { tables: {}, indexes: {} }; + + for (const table of tables) { + const { + name: tableName, + columns, + indexes, + foreignKeys, + schema, + checks, + primaryKeys, + uniqueConstraints, + } = getTableConfig(table); + + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const foreignKeysObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + const checkConstraintObject: Record = {}; + + // this object will help to identify same check names + let checksInTable: Record = {}; + + columns.forEach((column) => { + const name = getColumnCasing(column, casing); + const notNull: boolean = column.notNull; + const sqlType = column.getSQLType(); + const sqlTypeLowered = sqlType.toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name, + type: sqlType.startsWith('enum') ? handleEnumType(sqlType) : sqlType, + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.primary) { + primaryKeysObject[`${tableName}_${name}`] = { + name: `${tableName}_${name}`, + columns: [name], + }; + } + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We've found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + if (['blob', 'text', 'json'].includes(column.getSQLType())) { + columnToSet.default = `(${columnToSet.default})`; + } + } + } + columnsObject[name] = columnToSet; + }); + + primaryKeys.map((pk: PrimaryKeyORM) => { + const originalColumnNames = pk.columns.map((c) => c.name); + const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); + + let name = pk.getName(); + if (casing !== undefined) { + for (let i = 0; i < originalColumnNames.length; i++) { + name = name.replace(originalColumnNames[i], columnNames[i]); + } + } + + primaryKeysObject[name] = { + name, + columns: columnNames, + }; + + // all composite pk's should be treated as notNull + for (const column of pk.columns) { + columnsObject[getColumnCasing(column, casing)].notNull = true; + } + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique constraint ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + columns: columnNames, + }; + }); + + const fks: ForeignKey[] = foreignKeys.map((fk) => { + const tableFrom = tableName; + const onDelete = fk.onDelete ?? 'no action'; + const onUpdate = fk.onUpdate ?? 'no action'; + const reference = fk.reference(); + + const referenceFT = reference.foreignTable; + + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + const tableTo = getTableName(referenceFT); + + const originalColumnsFrom = reference.columns.map((it) => it.name); + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const originalColumnsTo = reference.foreignColumns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + let name = fk.getName(); + if (casing !== undefined) { + for (let i = 0; i < originalColumnsFrom.length; i++) { + name = name.replace(originalColumnsFrom[i], columnsFrom[i]); + } + for (let i = 0; i < originalColumnsTo.length; i++) { + name = name.replace(originalColumnsTo[i], columnsTo[i]); + } + } + + return { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } as ForeignKey; + }); + + fks.forEach((it) => { + foreignKeysObject[it.name] = it; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + const name = value.config.name; + + let indexColumns = columns.map((it) => { + if (is(it, SQL)) { + const sql = dialect.sqlToQuery(it, 'indexes').sql; + if (typeof internal!.indexes![name] === 'undefined') { + internal!.indexes![name] = { + columns: { + [sql]: { + isExpression: true, + }, + }, + }; + } else { + if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { + internal!.indexes![name]!.columns[sql] = { + isExpression: true, + }; + } else { + internal!.indexes![name]!.columns[sql]!.isExpression = true; + } + } + return sql; + } else { + return `${getColumnCasing(it, casing)}`; + } + }); + + if (value.config.unique) { + if (typeof uniqueConstraintObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. \nThe unique index ${ + chalk.underline.blue( + name, + ) + } on the ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue( + uniqueConstraintObject[name].columns.join(','), + ) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + } else { + if (typeof foreignKeysObject[name] !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `In MySQL, when creating a foreign key, an index is automatically generated with the same name as the foreign key constraint.\n\nWe have encountered a collision between the index name on columns ${ + chalk.underline.blue( + indexColumns.join(','), + ) + } and the foreign key on columns ${ + chalk.underline.blue( + foreignKeysObject[name].columnsFrom.join(','), + ) + }. Please change either the index name or the foreign key name. For more information, please refer to https://dev.mysql.com/doc/refman/8.0/en/constraint-foreign-key.html\n + `, + ) + }`, + ); + process.exit(1); + } + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + using: value.config.using, + algorithm: value.config.algorythm, + lock: value.config.lock, + }; + }); + + checks.forEach((check) => { + const checkName = check.name; + if (typeof checksInTable[tableName] !== 'undefined') { + if (checksInTable[tableName].includes(check.name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated check constraint name in ${ + chalk.underline.blue( + tableName, + ) + }. Please rename your check constraint in the ${ + chalk.underline.blue( + tableName, + ) + } table`, + ) + }`, + ); + process.exit(1); + } + checksInTable[tableName].push(checkName); + } else { + checksInTable[tableName] = [check.name]; + } + + checkConstraintObject[checkName] = { + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }); + + // only handle tables without schemas + if (!schema) { + result[tableName] = { + name: tableName, + columns: columnsObject, + indexes: indexesObject, + foreignKeys: foreignKeysObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + checkConstraint: checkConstraintObject, + }; + } + } + + for (const view of views) { + const { + isExisting, + name, + query, + schema, + selectedFields, + algorithm, + sqlSecurity, + withCheckOption, + } = getViewConfig(view); + + const columnsObject: Record = {}; + + const existingView = resultViews[name]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated view name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + + for (const key in selectedFields) { + if (is(selectedFields[key], MySqlColumn)) { + const column = selectedFields[key]; + + const notNull: boolean = column.notNull; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + const autoIncrement = typeof (column as any).autoIncrement === 'undefined' + ? false + : (column as any).autoIncrement; + + const generated = column.generated; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + primaryKey: false, + // If field is autoincrement it's notNull by default + // notNull: autoIncrement ? true : notNull, + notNull, + autoincrement: autoIncrement, + onUpdate: (column as any).hasOnUpdateNow, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: generated.mode ?? 'stored', + } + : undefined, + }; + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if ( + sqlTypeLowered.startsWith('datetime') + || sqlTypeLowered.startsWith('timestamp') + ) { + columnToSet.default = `'${ + column.default + .toISOString() + .replace('T', ' ') + .slice(0, 23) + }'`; + } + } else { + columnToSet.default = column.default; + } + } + if (['blob', 'text', 'json'].includes(column.getSQLType())) { + columnToSet.default = `(${columnToSet.default})`; + } + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[name] = { + columns: columnsObject, + name, + isExisting, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, + withCheckOption, + algorithm: algorithm ?? 'undefined', // set default values + sqlSecurity: sqlSecurity ?? 'definer', // set default values + }; + } + + return { + version: '5', + dialect: 'mysql', + tables: result, + views: resultViews, + _meta: { + tables: {}, + columns: {}, + }, + internal, + }; +}; + +// function clearDefaults(defaultValue: any, collate: string) { +// if (typeof collate === 'undefined' || collate === null) { +// collate = `utf8mb4`; +// } + +// let resultDefault = defaultValue; +// collate = `_${collate}`; +// if (defaultValue.startsWith(collate)) { +// resultDefault = resultDefault +// .substring(collate.length, defaultValue.length) +// .replace(/\\/g, ''); +// if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { +// return `('${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}')`; +// } else { +// return `'${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}'`; +// } +// } else { +// return `(${resultDefault})`; +// } +// } diff --git a/drizzle-kit/src/legacy/mysql-v5/serializer.ts b/drizzle-kit/src/legacy/mysql-v5/serializer.ts new file mode 100644 index 0000000000..2a4a467ac7 --- /dev/null +++ b/drizzle-kit/src/legacy/mysql-v5/serializer.ts @@ -0,0 +1,30 @@ +import { is } from 'orm044'; +import { MySqlTable, MySqlView } from 'orm044/mysql-core'; +import type { CasingType } from '../common'; +import type { MySqlSchema as SCHEMA } from './mysqlSchema'; +import { generateMySqlSnapshot } from './mysqlSerializer'; + +export type MysqlSchema = Record< + string, + | MySqlTable + | MySqlView + | unknown +>; + +export const serializeMysql = async ( + schema: MysqlSchema, + casing: CasingType | undefined, +): Promise => { + const tables = Object.values(schema).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + const views = Object.values(schema).filter((it) => is(it, MySqlView)) as MySqlView[]; + const snapshot = generateMySqlSnapshot( + tables, + views, + casing, + ); + return { + id: 'id', + prevId: 'prev_id', + ...snapshot, + }; +}; diff --git a/drizzle-kit/src/legacy/outputs.ts b/drizzle-kit/src/legacy/outputs.ts new file mode 100644 index 0000000000..6e9d520dd6 --- /dev/null +++ b/drizzle-kit/src/legacy/outputs.ts @@ -0,0 +1,91 @@ +import chalk from 'chalk'; +import { sqliteDriversLiterals } from './common'; + +export const withStyle = { + error: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Invalid input ')} ${str}`)}`, + warning: (str: string) => `${chalk.white.bgGray(' Warning ')} ${str}`, + errorWarning: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Warning ')} ${str}`)}`, + fullWarning: (str: string) => `${chalk.black.bgYellow(' Warning ')} ${chalk.bold(str)}`, + suggestion: (str: string) => `${chalk.white.bgGray(' Suggestion ')} ${str}`, + info: (str: string) => `${chalk.grey(str)}`, +}; + +export const outputs = { + studio: { + drivers: (param: string) => + withStyle.error( + `"${param}" is not a valid driver. Available drivers: "pg", "mysql2", "better-sqlite", "libsql", "turso". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noCredentials: () => + withStyle.error( + `Please specify a 'dbCredentials' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noDriver: () => + withStyle.error( + `Please specify a 'driver' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + noDialect: () => + withStyle.error( + `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite', turso or singlestore`, + ), + }, + common: { + ambiguousParams: (command: string) => + withStyle.error( + `You can't use both --config and other cli options for ${command} command`, + ), + schema: (command: string) => withStyle.error(`"--schema" is a required field for ${command} command`), + }, + postgres: { + connection: { + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + awsDataApi: () => + withStyle.error( + "You need to provide 'database', 'secretArn' and 'resourceArn' for Drizzle Kit to connect to AWS Data API", + ), + }, + }, + mysql: { + connection: { + driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, + sqlite: { + connection: { + driver: () => { + const listOfDrivers = sqliteDriversLiterals + .map((it) => `'${it.value}'`) + .join(', '); + return withStyle.error( + `Either ${listOfDrivers} are available options for 'driver' param`, + ); + }, + url: (driver: string) => + withStyle.error( + `"url" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + authToken: (driver: string) => + withStyle.error( + `"authToken" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, + ), + }, + introspect: {}, + push: {}, + }, + singlestore: { + connection: { + driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), + required: () => + withStyle.error( + `Either "url" or "host", "database" are required for database connection`, + ), + }, + }, +}; diff --git a/drizzle-kit/src/legacy/postgres-v7/pgDiff.ts b/drizzle-kit/src/legacy/postgres-v7/pgDiff.ts new file mode 100644 index 0000000000..e2fd6e95b5 --- /dev/null +++ b/drizzle-kit/src/legacy/postgres-v7/pgDiff.ts @@ -0,0 +1,1725 @@ +import { mapEntries, mapKeys, mapValues } from '../global'; +import { applyJsonDiff, diffColumns, diffIndPolicies, diffPolicies, diffSchemasOrTables } from '../jsonDiffer'; +import type { + JsonAddColumnStatement, + JsonAlterCompositePK, + JsonAlterIndPolicyStatement, + JsonAlterPolicyStatement, + JsonAlterTableSetSchema, + JsonAlterUniqueConstraint, + JsonAlterViewStatement, + JsonCreateCheckConstraint, + JsonCreateCompositePK, + JsonCreateIndPolicyStatement, + JsonCreatePgViewStatement, + JsonCreatePolicyStatement, + JsonCreateReferenceStatement, + JsonCreateUniqueConstraint, + JsonDeleteCheckConstraint, + JsonDeleteCompositePK, + JsonDeleteUniqueConstraint, + JsonDisableRLSStatement, + JsonDropColumnStatement, + JsonDropIndPolicyStatement, + JsonDropPolicyStatement, + JsonDropViewStatement, + JsonEnableRLSStatement, + JsonIndRenamePolicyStatement, + JsonReferenceStatement, + JsonRenameColumnStatement, + JsonRenamePolicyStatement, + JsonRenameViewStatement, + JsonStatement, +} from '../jsonStatements'; +import { + _prepareAddColumns, + _prepareDropColumns, + prepareAddCheckConstraint, + prepareAddCompositePrimaryKeyPg, + prepareAddUniqueConstraintPg, + prepareAddValuesToEnumJson, + prepareAlterCompositePrimaryKeyPg, + prepareAlterIndPolicyJson, + prepareAlterPolicyJson, + prepareAlterReferencesJson, + prepareAlterRoleJson, + prepareAlterSequenceJson, + prepareCreateEnumJson, + prepareCreateIndPolicyJsons, + prepareCreatePolicyJsons, + prepareCreateReferencesJson, + prepareCreateRoleJson, + prepareCreateSchemasJson, + prepareCreateSequenceJson, + prepareDeleteCheckConstraint, + prepareDeleteCompositePrimaryKeyPg, + prepareDeleteSchemasJson, + prepareDeleteUniqueConstraintPg, + prepareDropEnumJson, + prepareDropEnumValues, + prepareDropIndexesJson, + prepareDropIndPolicyJsons, + prepareDropPolicyJsons, + prepareDropReferencesJson, + prepareDropRoleJson, + prepareDropSequenceJson, + prepareDropTableJson, + prepareDropViewJson, + prepareMoveEnumJson, + prepareMoveSequenceJson, + preparePgAlterColumns, + preparePgAlterViewAddWithOptionJson, + preparePgAlterViewAlterSchemaJson, + preparePgAlterViewAlterTablespaceJson, + preparePgAlterViewAlterUsingJson, + preparePgAlterViewDropWithOptionJson, + preparePgCreateIndexesJson, + preparePgCreateTableJson, + preparePgCreateViewJson, + prepareRenameColumns, + prepareRenameEnumJson, + prepareRenameIndPolicyJsons, + prepareRenamePolicyJsons, + prepareRenameRoleJson, + prepareRenameSchemasJson, + prepareRenameSequenceJson, + prepareRenameTableJson, + prepareRenameViewJson, +} from '../jsonStatements'; +import { copy } from '../utils'; + +import type { + Column, + ColumnsResolverInput, + ColumnsResolverOutput, + DiffResult, + Enum, + Named, + PolicyResolverInput, + PolicyResolverOutput, + ResolverInput, + ResolverOutput, + ResolverOutputWithMoved, + RolesResolverInput, + RolesResolverOutput, + Sequence, + Table, + TablePolicyResolverInput, + TablePolicyResolverOutput, +} from '../snapshotsDiffer'; +import { + columnChangeFor, + columnsResolver, + diffResultScheme, + enumsResolver, + indPolicyResolver, + nameChangeFor, + nameSchemaChangeFor, + policyResolver, + roleResolver, + schemaChangeFor, + schemasResolver, + sequencesResolver, + tablesResolver, + viewsResolver, +} from '../snapshotsDiffer'; +import { fromJson } from '../sqlgenerator'; +import type { PgSchema, PgSchemaSquashed, Policy, Role, View } from './pgSchema'; +import { dryPg, PgSquasher, squashPgScheme } from './pgSchema'; + +export const diff = async (opts: { + left?: PgSchema; + right: PgSchema; + mode?: 'push'; +}) => { + const left = opts.left ?? dryPg; + const json1 = squashPgScheme(left); + const json2 = squashPgScheme(opts.right); + return _diff( + json1, + json2, + schemasResolver, + enumsResolver, + sequencesResolver, + policyResolver, + indPolicyResolver, + roleResolver, + tablesResolver, + columnsResolver, + viewsResolver, + left, + opts.right, + opts.mode, + ); +}; + +export const _diff = async ( + json1: PgSchemaSquashed, + json2: PgSchemaSquashed, + schemasResolver: ( + input: ResolverInput, + ) => Promise>, + enumsResolver: ( + input: ResolverInput, + ) => Promise>, + sequencesResolver: ( + input: ResolverInput, + ) => Promise>, + policyResolver: ( + input: TablePolicyResolverInput, + ) => Promise>, + indPolicyResolver: ( + input: PolicyResolverInput, + ) => Promise>, + roleResolver: ( + input: RolesResolverInput, + ) => Promise>, + tablesResolver: ( + input: ResolverInput
, + ) => Promise>, + columnsResolver: ( + input: ColumnsResolverInput, + ) => Promise>, + viewsResolver: ( + input: ResolverInput, + ) => Promise>, + prevFull: PgSchema, + curFull: PgSchema, + action?: 'push' | undefined, +): Promise<{ + statements: JsonStatement[]; + sqlStatements: string[]; + _meta: + | { + schemas: {}; + tables: {}; + columns: {}; + } + | undefined; +}> => { + const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas); + + const { + created: createdSchemas, + deleted: deletedSchemas, + renamed: renamedSchemas, + } = await schemasResolver({ + created: schemasDiff.added.map((it) => ({ name: it })), + deleted: schemasDiff.deleted.map((it) => ({ name: it })), + }); + + const schemasPatchedSnap1 = copy(json1); + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }, + ); + + schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }); + + const enumsDiff = diffSchemasOrTables(schemasPatchedSnap1.enums, json2.enums); + + const { + created: createdEnums, + deleted: deletedEnums, + renamed: renamedEnums, + moved: movedEnums, + } = await enumsResolver({ + created: enumsDiff.added, + deleted: enumsDiff.deleted, + }); + + schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedEnums); + it.name = name; + it.schema = schema; + return [key, it]; + }); + + const columnTypesChangeMap = renamedEnums.reduce( + (acc, it) => { + acc[`${it.from.schema}.${it.from.name}`] = { + nameFrom: it.from.name, + nameTo: it.to.name, + schemaFrom: it.from.schema, + schemaTo: it.to.schema, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + const columnTypesMovesMap = movedEnums.reduce( + (acc, it) => { + acc[`${it.schemaFrom}.${it.name}`] = { + nameFrom: it.name, + nameTo: it.name, + schemaFrom: it.schemaFrom, + schemaTo: it.schemaTo, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapValues(tableValue.columns, (column) => { + const key = `${column.typeSchema || 'public'}.${column.type}`; + const change = columnTypesChangeMap[key] || columnTypesMovesMap[key]; + + if (change) { + column.type = change.nameTo; + column.typeSchema = change.schemaTo; + } + + return column; + }); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + schemasPatchedSnap1.sequences = mapEntries( + schemasPatchedSnap1.sequences, + (_, it) => { + const { key, schema } = schemaChangeFor(it, renamedSchemas); + it.schema = schema; + return [key, it]; + }, + ); + + const sequencesDiff = diffSchemasOrTables( + schemasPatchedSnap1.sequences, + json2.sequences, + ); + + const { + created: createdSequences, + deleted: deletedSequences, + renamed: renamedSequences, + moved: movedSequences, + } = await sequencesResolver({ + created: sequencesDiff.added, + deleted: sequencesDiff.deleted, + }); + + schemasPatchedSnap1.sequences = mapEntries( + schemasPatchedSnap1.sequences, + (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedSequences); + it.name = name; + it.schema = schema; + return [key, it]; + }, + ); + + const sequencesChangeMap = renamedSequences.reduce( + (acc, it) => { + acc[`${it.from.schema}.${it.from.name}`] = { + nameFrom: it.from.name, + nameTo: it.to.name, + schemaFrom: it.from.schema, + schemaTo: it.to.schema, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + const sequencesMovesMap = movedSequences.reduce( + (acc, it) => { + acc[`${it.schemaFrom}.${it.name}`] = { + nameFrom: it.name, + nameTo: it.name, + schemaFrom: it.schemaFrom, + schemaTo: it.schemaTo, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + schemaFrom: string; + schemaTo: string; + } + >, + ); + + schemasPatchedSnap1.tables = mapEntries( + schemasPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapValues(tableValue.columns, (column) => { + const key = `${column.typeSchema || 'public'}.${column.type}`; + const change = sequencesChangeMap[key] || sequencesMovesMap[key]; + + if (change) { + column.type = change.nameTo; + column.typeSchema = change.schemaTo; + } + + return column; + }); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + const rolesDiff = diffSchemasOrTables( + schemasPatchedSnap1.roles, + json2.roles, + ); + + const { + created: createdRoles, + deleted: deletedRoles, + renamed: renamedRoles, + } = await roleResolver({ + created: rolesDiff.added, + deleted: rolesDiff.deleted, + }); + + schemasPatchedSnap1.roles = mapEntries( + schemasPatchedSnap1.roles, + (_, it) => { + const { name } = nameChangeFor(it, renamedRoles); + it.name = name; + return [name, it]; + }, + ); + + const rolesChangeMap = renamedRoles.reduce( + (acc, it) => { + acc[it.from.name] = { + nameFrom: it.from.name, + nameTo: it.to.name, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + } + >, + ); + + schemasPatchedSnap1.roles = mapEntries( + schemasPatchedSnap1.roles, + (roleKey, roleValue) => { + const key = roleKey; + const change = rolesChangeMap[key]; + + if (change) { + roleValue.name = change.nameTo; + } + + return [roleKey, roleValue]; + }, + ); + + const tablesDiff = diffSchemasOrTables( + schemasPatchedSnap1.tables as Record, + json2.tables, + ); + + const { + created: createdTables, + deleted: deletedTables, + moved: movedTables, + renamed: renamedTables, // renamed or moved + } = await tablesResolver({ + created: tablesDiff.added, + deleted: tablesDiff.deleted, + }); + + const tablesPatchedSnap1 = copy(schemasPatchedSnap1); + tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { + const { key, name, schema } = nameSchemaChangeFor(it, renamedTables); + it.name = name; + it.schema = schema; + return [key, it]; + }); + + const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); + + const columnRenames = [] as { + table: string; + schema: string; + renames: { from: Column; to: Column }[]; + }[]; + + const columnCreates = [] as { + table: string; + schema: string; + columns: Column[]; + }[]; + + const columnDeletes = [] as { + table: string; + schema: string; + columns: Column[]; + }[]; + + for (let entry of Object.values(res)) { + const { renamed, created, deleted } = await columnsResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.columns.deleted, + created: entry.columns.added, + }); + + if (created.length > 0) { + columnCreates.push({ + table: entry.name, + schema: entry.schema, + columns: created, + }); + } + + if (deleted.length > 0) { + columnDeletes.push({ + table: entry.name, + schema: entry.schema, + columns: deleted, + }); + } + + if (renamed.length > 0) { + columnRenames.push({ + table: entry.name, + schema: entry.schema, + renames: renamed, + }); + } + } + + const columnRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[`${it.schema || 'public'}.${it.table}`] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const columnsPatchedSnap1 = copy(tablesPatchedSnap1); + columnsPatchedSnap1.tables = mapEntries( + columnsPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedColumns = mapKeys( + tableValue.columns, + (columnKey, column) => { + const rens = columnRenamesDict[ + `${tableValue.schema || 'public'}.${tableValue.name}` + ] || []; + + const newName = columnChangeFor(columnKey, rens); + column.name = newName; + return newName; + }, + ); + + tableValue.columns = patchedColumns; + return [tableKey, tableValue]; + }, + ); + + //// Policies + + const policyRes = diffPolicies(tablesPatchedSnap1.tables, json2.tables); + + const policyRenames = [] as { + table: string; + schema: string; + renames: { from: Policy; to: Policy }[]; + }[]; + + const policyCreates = [] as { + table: string; + schema: string; + columns: Policy[]; + }[]; + + const policyDeletes = [] as { + table: string; + schema: string; + columns: Policy[]; + }[]; + + for (let entry of Object.values(policyRes)) { + const { renamed, created, deleted } = await policyResolver({ + tableName: entry.name, + schema: entry.schema, + deleted: entry.policies.deleted.map( + action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy, + ), + created: entry.policies.added.map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), + }); + + if (created.length > 0) { + policyCreates.push({ + table: entry.name, + schema: entry.schema, + columns: created, + }); + } + + if (deleted.length > 0) { + policyDeletes.push({ + table: entry.name, + schema: entry.schema, + columns: deleted, + }); + } + + if (renamed.length > 0) { + policyRenames.push({ + table: entry.name, + schema: entry.schema, + renames: renamed, + }); + } + } + + const policyRenamesDict = columnRenames.reduce( + (acc, it) => { + acc[`${it.schema || 'public'}.${it.table}`] = it.renames; + return acc; + }, + {} as Record< + string, + { + from: Named; + to: Named; + }[] + >, + ); + + const policyPatchedSnap1 = copy(tablesPatchedSnap1); + policyPatchedSnap1.tables = mapEntries( + policyPatchedSnap1.tables, + (tableKey, tableValue) => { + const patchedPolicies = mapKeys( + tableValue.policies, + (policyKey, policy) => { + const rens = policyRenamesDict[ + `${tableValue.schema || 'public'}.${tableValue.name}` + ] || []; + + const newName = columnChangeFor(policyKey, rens); + const unsquashedPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(policy) + : PgSquasher.unsquashPolicy(policy); + unsquashedPolicy.name = newName; + policy = PgSquasher.squashPolicy(unsquashedPolicy); + return newName; + }, + ); + + tableValue.policies = patchedPolicies; + return [tableKey, tableValue]; + }, + ); + + //// Individual policies + + const indPolicyRes = diffIndPolicies(policyPatchedSnap1.policies, json2.policies); + + const indPolicyCreates = [] as { + policies: Policy[]; + }[]; + + const indPolicyDeletes = [] as { + policies: Policy[]; + }[]; + + const { renamed: indPolicyRenames, created, deleted } = await indPolicyResolver({ + deleted: indPolicyRes.deleted.map((t) => + action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) + ), + created: indPolicyRes.added.map((t) => + action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) + ), + }); + + if (created.length > 0) { + indPolicyCreates.push({ + policies: created, + }); + } + + if (deleted.length > 0) { + indPolicyDeletes.push({ + policies: deleted, + }); + } + + const indPolicyRenamesDict = indPolicyRenames.reduce( + (acc, it) => { + acc[it.from.name] = { + nameFrom: it.from.name, + nameTo: it.to.name, + }; + return acc; + }, + {} as Record< + string, + { + nameFrom: string; + nameTo: string; + } + >, + ); + + const indPolicyPatchedSnap1 = copy(policyPatchedSnap1); + indPolicyPatchedSnap1.policies = mapEntries( + indPolicyPatchedSnap1.policies, + (policyKey, policyValue) => { + const key = policyKey; + const change = indPolicyRenamesDict[key]; + + if (change) { + policyValue.name = change.nameTo; + } + + return [policyKey, policyValue]; + }, + ); + + //// + const viewsDiff = diffSchemasOrTables(indPolicyPatchedSnap1.views, json2.views); + + const { + created: createdViews, + deleted: deletedViews, + renamed: renamedViews, + moved: movedViews, + } = await viewsResolver({ + created: viewsDiff.added, + deleted: viewsDiff.deleted, + }); + + const renamesViewDic: Record = {}; + renamedViews.forEach((it) => { + renamesViewDic[`${it.from.schema}.${it.from.name}`] = { to: it.to.name, from: it.from.name }; + }); + + const movedViewDic: Record = {}; + movedViews.forEach((it) => { + movedViewDic[`${it.schemaFrom}.${it.name}`] = { to: it.schemaTo, from: it.schemaFrom }; + }); + + const viewsPatchedSnap1 = copy(policyPatchedSnap1); + viewsPatchedSnap1.views = mapEntries( + viewsPatchedSnap1.views, + (viewKey, viewValue) => { + const rename = renamesViewDic[`${viewValue.schema}.${viewValue.name}`]; + const moved = movedViewDic[`${viewValue.schema}.${viewValue.name}`]; + + if (rename) { + viewValue.name = rename.to; + viewKey = `${viewValue.schema}.${viewValue.name}`; + } + + if (moved) viewKey = `${moved.to}.${viewValue.name}`; + + return [viewKey, viewValue]; + }, + ); + + const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); + + const typedResult: DiffResult = diffResultScheme.parse(diffResult); + + const jsonStatements: JsonStatement[] = []; + + const jsonCreateIndexesForCreatedTables = createdTables + .map((it) => { + return preparePgCreateIndexesJson( + it.name, + it.schema, + it.indexes, + curFull, + action, + ); + }) + .flat(); + + const jsonDropTables = deletedTables.map((it) => { + return prepareDropTableJson(it); + }); + + const jsonRenameTables = renamedTables.map((it) => { + return prepareRenameTableJson(it.from, it.to); + }); + + const alteredTables = typedResult.alteredTablesWithColumns; + + const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = []; + const jsonDropColumnsStatemets: JsonDropColumnStatement[] = []; + const jsonAddColumnsStatemets: JsonAddColumnStatement[] = []; + + for (let it of columnRenames) { + jsonRenameColumnsStatements.push( + ...prepareRenameColumns(it.table, it.schema, it.renames), + ); + } + + for (let it of columnDeletes) { + jsonDropColumnsStatemets.push( + ..._prepareDropColumns(it.table, it.schema, it.columns), + ); + } + + for (let it of columnCreates) { + jsonAddColumnsStatemets.push( + ..._prepareAddColumns(it.table, it.schema, it.columns), + ); + } + + const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; + const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; + const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; + + const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + + const jsonSetTableSchemas: JsonAlterTableSetSchema[] = []; + + if (movedTables) { + for (let it of movedTables) { + jsonSetTableSchemas.push({ + type: 'alter_table_set_schema', + tableName: it.name, + schemaFrom: it.schemaFrom || 'public', + schemaTo: it.schemaTo || 'public', + }); + } + } + + const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; + const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; + + for (let it of alteredTables) { + // This part is needed to make sure that same columns in a table are not triggered for change + // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name + // We double-check that pk with same set of columns are both in added and deleted diffs + let addedColumns: { name: string; columns: string[] } | undefined; + for (const addedPkName of Object.keys(it.addedCompositePKs)) { + const addedPkColumns = it.addedCompositePKs[addedPkName]; + addedColumns = PgSquasher.unsquashPK(addedPkColumns); + } + + let deletedColumns: { name: string; columns: string[] } | undefined; + for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { + const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; + deletedColumns = PgSquasher.unsquashPK(deletedPkColumns); + } + + // Don't need to sort, but need to add tests for it + // addedColumns.sort(); + // deletedColumns.sort(); + const doPerformDeleteAndCreate = JSON.stringify(addedColumns ?? {}) !== JSON.stringify(deletedColumns ?? {}); + + let addedCompositePKs: JsonCreateCompositePK[] = []; + let deletedCompositePKs: JsonDeleteCompositePK[] = []; + let alteredCompositePKs: JsonAlterCompositePK[] = []; + if (doPerformDeleteAndCreate) { + addedCompositePKs = prepareAddCompositePrimaryKeyPg( + it.name, + it.schema, + it.addedCompositePKs, + ); + deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg( + it.name, + it.schema, + it.deletedCompositePKs, + ); + } + alteredCompositePKs = prepareAlterCompositePrimaryKeyPg( + it.name, + it.schema, + it.alteredCompositePKs, + ); + + // add logic for unique constraints + let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; + let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; + let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; + let createCheckConstraints: JsonCreateCheckConstraint[] = []; + let deleteCheckConstraints: JsonDeleteCheckConstraint[] = []; + + addedUniqueConstraints = prepareAddUniqueConstraintPg( + it.name, + it.schema, + it.addedUniqueConstraints, + ); + deletedUniqueConstraints = prepareDeleteUniqueConstraintPg( + it.name, + it.schema, + it.deletedUniqueConstraints, + ); + if (it.alteredUniqueConstraints) { + const added: Record = {}; + const deleted: Record = {}; + for (const k of Object.keys(it.alteredUniqueConstraints)) { + added[k] = it.alteredUniqueConstraints[k].__new; + deleted[k] = it.alteredUniqueConstraints[k].__old; + } + addedUniqueConstraints.push( + ...prepareAddUniqueConstraintPg(it.name, it.schema, added), + ); + deletedUniqueConstraints.push( + ...prepareDeleteUniqueConstraintPg(it.name, it.schema, deleted), + ); + } + + createCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); + deleteCheckConstraints = prepareDeleteCheckConstraint( + it.name, + it.schema, + it.deletedCheckConstraints, + ); + + if (it.alteredCheckConstraints && action !== 'push') { + const added: Record = {}; + const deleted: Record = {}; + + for (const k of Object.keys(it.alteredCheckConstraints)) { + added[k] = it.alteredCheckConstraints[k].__new; + deleted[k] = it.alteredCheckConstraints[k].__old; + } + createCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); + deleteCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); + } + + jsonCreatedCheckConstraints.push(...createCheckConstraints); + jsonDeletedCheckConstraints.push(...deleteCheckConstraints); + + jsonAddedCompositePKs.push(...addedCompositePKs); + jsonDeletedCompositePKs.push(...deletedCompositePKs); + jsonAlteredCompositePKs.push(...alteredCompositePKs); + + jsonAddedUniqueConstraints.push(...addedUniqueConstraints); + jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); + jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); + } + + // const rColumns = jsonRenameColumnsStatements.map((it) => { + // const tableName = it.tableName; + // const schema = it.schema; + // return { + // from: { schema, table: tableName, column: it.oldColumnName }, + // to: { schema, table: tableName, column: it.newColumnName }, + // }; + // }); + + const jsonTableAlternations = alteredTables + .map((it) => { + return preparePgAlterColumns( + it.name, + it.schema, + it.altered, + json2, + json1, + action, + ); + }) + .flat(); + + const jsonCreateIndexesFoAlteredTables = alteredTables + .map((it) => { + return preparePgCreateIndexesJson( + it.name, + it.schema, + it.addedIndexes || {}, + curFull, + action, + ); + }) + .flat(); + + const jsonDropIndexesForAllAlteredTables = alteredTables + .map((it) => { + return prepareDropIndexesJson( + it.name, + it.schema, + it.deletedIndexes || {}, + ); + }) + .flat(); + + const jsonCreatePoliciesStatements: JsonCreatePolicyStatement[] = []; + const jsonDropPoliciesStatements: JsonDropPolicyStatement[] = []; + const jsonAlterPoliciesStatements: JsonAlterPolicyStatement[] = []; + const jsonRenamePoliciesStatements: JsonRenamePolicyStatement[] = []; + + const jsonRenameIndPoliciesStatements: JsonIndRenamePolicyStatement[] = []; + const jsonCreateIndPoliciesStatements: JsonCreateIndPolicyStatement[] = []; + const jsonDropIndPoliciesStatements: JsonDropIndPolicyStatement[] = []; + const jsonAlterIndPoliciesStatements: JsonAlterIndPolicyStatement[] = []; + + const jsonEnableRLSStatements: JsonEnableRLSStatement[] = []; + const jsonDisableRLSStatements: JsonDisableRLSStatement[] = []; + + for (let it of indPolicyRenames) { + jsonRenameIndPoliciesStatements.push( + ...prepareRenameIndPolicyJsons([it]), + ); + } + + for (const it of indPolicyCreates) { + jsonCreateIndPoliciesStatements.push( + ...prepareCreateIndPolicyJsons( + it.policies, + ), + ); + } + + for (const it of indPolicyDeletes) { + jsonDropIndPoliciesStatements.push( + ...prepareDropIndPolicyJsons( + it.policies, + ), + ); + } + + typedResult.alteredPolicies.forEach(({ values }) => { + // return prepareAlterIndPolicyJson(json1.policies[it.name], json2.policies[it.name]); + + const policy = action === 'push' ? PgSquasher.unsquashPolicyPush(values) : PgSquasher.unsquashPolicy(values); + + const newPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) + : PgSquasher.unsquashPolicy(json2.policies[policy.name].values); + const oldPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) + : PgSquasher.unsquashPolicy(json1.policies[policy.name].values); + + if (newPolicy.as !== oldPolicy.as) { + jsonDropIndPoliciesStatements.push( + ...prepareDropIndPolicyJsons( + [oldPolicy], + ), + ); + + jsonCreateIndPoliciesStatements.push( + ...prepareCreateIndPolicyJsons( + [newPolicy], + ), + ); + return; + } + + if (newPolicy.for !== oldPolicy.for) { + jsonDropIndPoliciesStatements.push( + ...prepareDropIndPolicyJsons( + [oldPolicy], + ), + ); + + jsonCreateIndPoliciesStatements.push( + ...prepareCreateIndPolicyJsons( + [newPolicy], + ), + ); + return; + } + + // alter + jsonAlterIndPoliciesStatements.push( + prepareAlterIndPolicyJson( + oldPolicy, + newPolicy, + ), + ); + }); + + for (let it of policyRenames) { + jsonRenamePoliciesStatements.push( + ...prepareRenamePolicyJsons(it.table, it.schema, it.renames), + ); + } + + for (const it of policyCreates) { + jsonCreatePoliciesStatements.push( + ...prepareCreatePolicyJsons( + it.table, + it.schema, + it.columns, + ), + ); + } + + for (const it of policyDeletes) { + jsonDropPoliciesStatements.push( + ...prepareDropPolicyJsons( + it.table, + it.schema, + it.columns, + ), + ); + } + + alteredTables.forEach((it) => { + // handle policies + Object.keys(it.alteredPolicies).forEach((policyName: string) => { + const newPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__new) + : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__new); + const oldPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__old) + : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__old); + + if (newPolicy.as !== oldPolicy.as) { + jsonDropPoliciesStatements.push( + ...prepareDropPolicyJsons( + it.name, + it.schema, + [oldPolicy], + ), + ); + + jsonCreatePoliciesStatements.push( + ...prepareCreatePolicyJsons( + it.name, + it.schema, + [newPolicy], + ), + ); + return; + } + + if (newPolicy.for !== oldPolicy.for) { + jsonDropPoliciesStatements.push( + ...prepareDropPolicyJsons( + it.name, + it.schema, + [oldPolicy], + ), + ); + + jsonCreatePoliciesStatements.push( + ...prepareCreatePolicyJsons( + it.name, + it.schema, + [newPolicy], + ), + ); + return; + } + + // alter + jsonAlterPoliciesStatements.push( + prepareAlterPolicyJson( + it.name, + it.schema, + it.alteredPolicies[policyName].__old, + it.alteredPolicies[policyName].__new, + ), + ); + }); + + // Handle enabling and disabling RLS + for (const table of Object.values(json2.tables)) { + const policiesInCurrentState = Object.keys(table.policies); + const tableInPreviousState = + columnsPatchedSnap1.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; + const policiesInPreviousState = tableInPreviousState ? Object.keys(tableInPreviousState.policies) : []; + + // const indPoliciesInCurrentState = Object.keys(table.policies); + // const indPoliciesInPreviousState = Object.keys(columnsPatchedSnap1.policies); + + if ( + (policiesInPreviousState.length === 0 && policiesInCurrentState.length > 0) && !table.isRLSEnabled + ) { + jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); + } + + if ( + (policiesInPreviousState.length > 0 && policiesInCurrentState.length === 0) && !table.isRLSEnabled + ) { + jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); + } + + // handle table.isRLSEnabled + const wasRlsEnabled = tableInPreviousState ? tableInPreviousState.isRLSEnabled : false; + if (table.isRLSEnabled !== wasRlsEnabled) { + if (table.isRLSEnabled) { + // was force enabled + jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); + } else if ( + !table.isRLSEnabled && policiesInCurrentState.length === 0 + ) { + // was force disabled + jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); + } + } + } + + for (const table of Object.values(columnsPatchedSnap1.tables)) { + const tableInCurrentState = json2.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; + + if (tableInCurrentState === undefined && !table.isRLSEnabled) { + jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); + } + } + + // handle indexes + const droppedIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__old; + return current; + }, + {} as Record, + ); + const createdIndexes = Object.keys(it.alteredIndexes).reduce( + (current, item: string) => { + current[item] = it.alteredIndexes[item].__new; + return current; + }, + {} as Record, + ); + + jsonCreateIndexesFoAlteredTables.push( + ...preparePgCreateIndexesJson( + it.name, + it.schema, + createdIndexes || {}, + curFull, + action, + ), + ); + jsonDropIndexesForAllAlteredTables.push( + ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), + ); + }); + + const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables + .map((it) => { + return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); + }) + .flat(); + + const jsonReferencesForAlteredTables: JsonReferenceStatement[] = alteredTables + .map((it) => { + const forAdded = prepareCreateReferencesJson( + it.name, + it.schema, + it.addedForeignKeys, + ); + + const forAltered = prepareDropReferencesJson( + it.name, + it.schema, + it.deletedForeignKeys, + ); + + const alteredFKs = prepareAlterReferencesJson( + it.name, + it.schema, + it.alteredForeignKeys, + ); + + return [...forAdded, ...forAltered, ...alteredFKs]; + }) + .flat(); + + const jsonCreatedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => + t.type === 'create_reference' + ); + + const jsonDroppedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => + t.type === 'delete_reference' + ); + + // Sequences + // - create sequence ✅ + // - create sequence inside schema ✅ + // - rename sequence ✅ + // - change sequence schema ✅ + // - change sequence schema + name ✅ + // - drop sequence - check if sequence is in use. If yes - ??? + // - change sequence values ✅ + + // Generated columns + // - add generated + // - drop generated + // - create table with generated + // - alter - should be not triggered, but should get warning + + const createEnums = createdEnums.map((it) => { + return prepareCreateEnumJson(it.name, it.schema, it.values); + }) ?? []; + + const dropEnums = deletedEnums.map((it) => { + return prepareDropEnumJson(it.name, it.schema); + }); + + const moveEnums = movedEnums.map((it) => { + return prepareMoveEnumJson(it.name, it.schemaFrom, it.schemaTo); + }); + + const renameEnums = renamedEnums.map((it) => { + return prepareRenameEnumJson(it.from.name, it.to.name, it.to.schema); + }); + + const jsonAlterEnumsWithAddedValues = typedResult.alteredEnums + .map((it) => { + return prepareAddValuesToEnumJson(it.name, it.schema, it.addedValues); + }) + .flat() ?? []; + + const jsonAlterEnumsWithDroppedValues = typedResult.alteredEnums + .map((it) => { + return prepareDropEnumValues(it.name, it.schema, it.deletedValues, curFull); + }) + .flat() ?? []; + + const createSequences = createdSequences.map((it) => { + return prepareCreateSequenceJson(it); + }) ?? []; + + const dropSequences = deletedSequences.map((it) => { + return prepareDropSequenceJson(it.name, it.schema); + }); + + const moveSequences = movedSequences.map((it) => { + return prepareMoveSequenceJson(it.name, it.schemaFrom, it.schemaTo); + }); + + const renameSequences = renamedSequences.map((it) => { + return prepareRenameSequenceJson(it.from.name, it.to.name, it.to.schema); + }); + + const jsonAlterSequences = typedResult.alteredSequences + .map((it) => { + return prepareAlterSequenceJson(it); + }) + .flat() ?? []; + + //////////// + + const createRoles = createdRoles.map((it) => { + return prepareCreateRoleJson(it); + }) ?? []; + + const dropRoles = deletedRoles.map((it) => { + return prepareDropRoleJson(it.name); + }); + + const renameRoles = renamedRoles.map((it) => { + return prepareRenameRoleJson(it.from.name, it.to.name); + }); + + const jsonAlterRoles = typedResult.alteredRoles + .map((it) => { + return prepareAlterRoleJson(it); + }) + .flat() ?? []; + + //////////// + const createSchemas = prepareCreateSchemasJson( + createdSchemas.map((it) => it.name), + ); + + const renameSchemas = prepareRenameSchemasJson( + renamedSchemas.map((it) => ({ from: it.from.name, to: it.to.name })), + ); + + const dropSchemas = prepareDeleteSchemasJson( + deletedSchemas.map((it) => it.name), + ); + + const createTables = createdTables.map((it) => { + return preparePgCreateTableJson(it, curFull); + }); + + jsonCreatePoliciesStatements.push(...([] as JsonCreatePolicyStatement[]).concat( + ...(createdTables.map((it) => + prepareCreatePolicyJsons( + it.name, + it.schema, + Object.values(it.policies).map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), + ) + )), + )); + const createViews: JsonCreatePgViewStatement[] = []; + const dropViews: JsonDropViewStatement[] = []; + const renameViews: JsonRenameViewStatement[] = []; + const alterViews: JsonAlterViewStatement[] = []; + + createViews.push( + ...createdViews.filter((it) => !it.isExisting).map((it) => { + return preparePgCreateViewJson( + it.name, + it.schema, + it.definition!, + it.materialized, + it.withNoData, + it.with, + it.using, + it.tablespace, + ); + }), + ); + + dropViews.push( + ...deletedViews.filter((it) => !it.isExisting).map((it) => { + return prepareDropViewJson(it.name, it.schema, it.materialized); + }), + ); + + renameViews.push( + ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[`${it.from.schema}.${it.from.name}`].isExisting) + .map((it) => { + return prepareRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized); + }), + ); + + alterViews.push( + ...movedViews.filter((it) => + !json2.views[`${it.schemaTo}.${it.name}`].isExisting && !json1.views[`${it.schemaFrom}.${it.name}`].isExisting + ).map((it) => { + return preparePgAlterViewAlterSchemaJson( + it.schemaTo, + it.schemaFrom, + it.name, + json2.views[`${it.schemaTo}.${it.name}`].materialized, + ); + }), + ); + + const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting); + + for (const alteredView of alteredViews) { + const viewKey = `${alteredView.schema}.${alteredView.name}`; + + const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey]; + + if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { + dropViews.push(prepareDropViewJson(alteredView.name, alteredView.schema, materialized)); + + createViews.push( + preparePgCreateViewJson( + alteredView.name, + alteredView.schema, + definition!, + materialized, + withNoData, + withOption, + using, + tablespace, + ), + ); + + continue; + } + + if (alteredView.addedWithOption) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.addedWithOption, + ), + ); + } + + if (alteredView.deletedWithOption) { + alterViews.push( + preparePgAlterViewDropWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.deletedWithOption, + ), + ); + } + + if (alteredView.addedWith) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.addedWith, + ), + ); + } + + if (alteredView.deletedWith) { + alterViews.push( + preparePgAlterViewDropWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.deletedWith, + ), + ); + } + + if (alteredView.alteredWith) { + alterViews.push( + preparePgAlterViewAddWithOptionJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredWith, + ), + ); + } + + if (alteredView.alteredTablespace) { + alterViews.push( + preparePgAlterViewAlterTablespaceJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredTablespace.__new, + ), + ); + } + + if (alteredView.alteredUsing) { + alterViews.push( + preparePgAlterViewAlterUsingJson( + alteredView.name, + alteredView.schema, + materialized, + alteredView.alteredUsing.__new, + ), + ); + } + } + + jsonStatements.push(...createSchemas); + jsonStatements.push(...renameSchemas); + jsonStatements.push(...createEnums); + jsonStatements.push(...moveEnums); + jsonStatements.push(...renameEnums); + jsonStatements.push(...jsonAlterEnumsWithAddedValues); + + jsonStatements.push(...createSequences); + jsonStatements.push(...moveSequences); + jsonStatements.push(...renameSequences); + jsonStatements.push(...jsonAlterSequences); + + jsonStatements.push(...renameRoles); + jsonStatements.push(...dropRoles); + jsonStatements.push(...createRoles); + jsonStatements.push(...jsonAlterRoles); + + jsonStatements.push(...createTables); + + jsonStatements.push(...jsonEnableRLSStatements); + jsonStatements.push(...jsonDisableRLSStatements); + jsonStatements.push(...dropViews); + jsonStatements.push(...renameViews); + jsonStatements.push(...alterViews); + + jsonStatements.push(...jsonDropTables); + jsonStatements.push(...jsonSetTableSchemas); + jsonStatements.push(...jsonRenameTables); + jsonStatements.push(...jsonRenameColumnsStatements); + + jsonStatements.push(...jsonDeletedUniqueConstraints); + jsonStatements.push(...jsonDeletedCheckConstraints); + + jsonStatements.push(...jsonDroppedReferencesForAlteredTables); + + jsonStatements.push(...jsonAlterEnumsWithDroppedValues); + + // Will need to drop indexes before changing any columns in table + // Then should go column alternations and then index creation + jsonStatements.push(...jsonDropIndexesForAllAlteredTables); + + jsonStatements.push(...jsonDeletedCompositePKs); + jsonStatements.push(...jsonTableAlternations); + jsonStatements.push(...jsonAddedCompositePKs); + jsonStatements.push(...jsonAddColumnsStatemets); + + // PATCHED, need to run before fks + jsonStatements.push(...jsonAddedUniqueConstraints); + jsonStatements.push(...jsonCreateIndexesForCreatedTables); + jsonStatements.push(...jsonCreateIndexesFoAlteredTables); + jsonStatements.push(...jsonAlteredUniqueConstraints); + + // ---- + + jsonStatements.push(...jsonCreateReferencesForCreatedTables); + + jsonStatements.push(...jsonCreatedReferencesForAlteredTables); + + jsonStatements.push(...jsonDropColumnsStatemets); + jsonStatements.push(...jsonAlteredCompositePKs); + + jsonStatements.push(...jsonCreatedCheckConstraints); + + jsonStatements.push(...createViews); + + jsonStatements.push(...jsonRenamePoliciesStatements); + jsonStatements.push(...jsonDropPoliciesStatements); + jsonStatements.push(...jsonCreatePoliciesStatements); + jsonStatements.push(...jsonAlterPoliciesStatements); + + jsonStatements.push(...jsonRenameIndPoliciesStatements); + jsonStatements.push(...jsonDropIndPoliciesStatements); + jsonStatements.push(...jsonCreateIndPoliciesStatements); + jsonStatements.push(...jsonAlterIndPoliciesStatements); + + jsonStatements.push(...dropEnums); + jsonStatements.push(...dropSequences); + jsonStatements.push(...dropSchemas); + + // generate filters + const filteredJsonStatements = jsonStatements.filter((st) => { + if (st.type === 'alter_table_alter_column_drop_notnull') { + if ( + jsonStatements.find( + (it) => + it.type === 'alter_table_alter_column_drop_identity' + && it.tableName === st.tableName + && it.schema === st.schema, + ) + ) { + return false; + } + } + if (st.type === 'alter_table_alter_column_set_notnull') { + if ( + jsonStatements.find( + (it) => + it.type === 'alter_table_alter_column_set_identity' + && it.tableName === st.tableName + && it.schema === st.schema, + ) + ) { + return false; + } + } + return true; + }); + + // enum filters + // Need to find add and drop enum values in same enum and remove add values + const filteredEnumsJsonStatements = filteredJsonStatements.filter((st) => { + if (st.type === 'alter_type_add_value') { + if ( + filteredJsonStatements.find( + (it) => + it.type === 'alter_type_drop_value' + && it.name === st.name + && it.enumSchema === st.schema, + ) + ) { + return false; + } + } + return true; + }); + + // This is needed because in sql generator on type pg_alter_table_alter_column_set_type and alter_type_drop_value + // drizzle kit checks whether column has defaults to cast them to new types properly + const filteredEnums2JsonStatements = filteredEnumsJsonStatements.filter((st) => { + if (st.type === 'alter_table_alter_column_set_default') { + if ( + filteredEnumsJsonStatements.find( + (it) => + it.type === 'pg_alter_table_alter_column_set_type' + && it.columnDefault === st.newDefaultValue + && it.columnName === st.columnName + && it.tableName === st.tableName + && it.schema === st.schema, + ) + ) { + return false; + } + + if ( + filteredEnumsJsonStatements.find( + (it) => + it.type === 'alter_type_drop_value' + && it.columnsWithEnum.find((column) => + column.default === st.newDefaultValue + && column.column === st.columnName + && column.table === st.tableName + && column.tableSchema === st.schema + ), + ) + ) { + return false; + } + } + return true; + }); + + const sqlStatements = fromJson(filteredEnums2JsonStatements, 'postgresql', action); + + const uniqueSqlStatements: string[] = []; + sqlStatements.forEach((ss) => { + if (!uniqueSqlStatements.includes(ss)) { + uniqueSqlStatements.push(ss); + } + }); + + // const rSchemas = renamedSchemas.map((it) => ({ + // from: it.from.name, + // to: it.to.name, + // })); + + // const rTables = renamedTables.map((it) => { + // return { from: it.from, to: it.to }; + // }); + + return { + statements: filteredEnums2JsonStatements, + sqlStatements: uniqueSqlStatements, + _meta: { columns: [], schemas: [], tables: [] }, + }; +}; diff --git a/drizzle-kit/src/serializer/pgSchema.ts b/drizzle-kit/src/legacy/postgres-v7/pgSchema.ts similarity index 99% rename from drizzle-kit/src/serializer/pgSchema.ts rename to drizzle-kit/src/legacy/postgres-v7/pgSchema.ts index d7604d645c..0b5f2b2197 100644 --- a/drizzle-kit/src/serializer/pgSchema.ts +++ b/drizzle-kit/src/legacy/postgres-v7/pgSchema.ts @@ -1,7 +1,7 @@ +import type { TypeOf } from 'zod'; +import { any, array, boolean, enum as enumType, literal, number, object, record, string, union } from 'zod'; import { mapValues, originUUID, snapshotVersion } from '../global'; -import { any, array, boolean, enum as enumType, literal, number, object, record, string, TypeOf, union } from 'zod'; - const indexV2 = object({ name: string(), columns: record( @@ -610,7 +610,7 @@ export const PgSquasher = { const columns: IndexColumnType[] = []; for (const column of columnString) { - const [expression, asc, nulls, opclass] = column.split(','); + const [expression, asc, nulls] = column.split(','); columns.push({ nulls: nulls as IndexColumnType['nulls'], isExpression: expression === '', diff --git a/drizzle-kit/src/legacy/postgres-v7/pgSerializer.ts b/drizzle-kit/src/legacy/postgres-v7/pgSerializer.ts new file mode 100644 index 0000000000..bc9a3da0be --- /dev/null +++ b/drizzle-kit/src/legacy/postgres-v7/pgSerializer.ts @@ -0,0 +1,958 @@ +import chalk from 'chalk'; +import { getTableName, is, SQL } from 'orm044'; +import { CasingCache, toCamelCase, toSnakeCase } from 'orm044/casing'; +import type { + AnyPgTable, + IndexedColumn, + PgEnum, + PgMaterializedView, + PgPolicy, + PgSchema, + PgSequence, +} from 'orm044/pg-core'; +import { + getMaterializedViewConfig, + getTableConfig, + getViewConfig, + PgArray, + PgColumn, + PgDialect, + PgEnumColumn, + PgRole, + PgView, + uniqueKeyName, +} from 'orm044/pg-core'; +import type { CasingType } from '../common'; +import { withStyle } from '../outputs'; +import { escapeSingleQuotes, isPgArrayType } from '../utils'; +import type { + CheckConstraint, + Column, + Enum, + ForeignKey, + Index, + IndexColumnType, + PgSchemaInternal, + Policy, + PrimaryKey, + Role, + Sequence, + Table, + UniqueConstraint, + View, +} from './pgSchema'; +import { vectorOps } from './vector'; + +export function getColumnCasing( + column: { keyAsName: boolean; name: string | undefined }, + casing: CasingType | undefined, +) { + if (!column.name) return ''; + return !column.keyAsName || casing === undefined + ? column.name + : casing === 'camelCase' + ? toCamelCase(column.name) + : toSnakeCase(column.name); +} + +export const sqlToStr = (sql: SQL, casing: CasingType | undefined) => { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + casing: new CasingCache(casing), + }).sql; +}; + +export const sqlToStrGenerated = (sql: SQL, casing: CasingType | undefined) => { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + casing: new CasingCache(casing), + }).sql; +}; + +export const indexName = (tableName: string, columns: string[]) => { + return `${tableName}_${columns.join('_')}_index`; +}; + +function stringFromIdentityProperty(field: string | number | undefined): string | undefined { + return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); +} + +function maxRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; +} + +function minRangeForIdentityBasedOn(columnType: string) { + return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; +} + +// function stringFromDatabaseIdentityProperty(field: any): string | undefined { +// return typeof field === 'string' +// ? (field as string) +// : typeof field === 'undefined' +// ? undefined +// : typeof field === 'bigint' +// ? field.toString() +// : String(field); +// } + +export function buildArrayString(array: any[], sqlType: string): string { + // patched + if (array.flat(5).length === 0) { + return '{}'; + } + + sqlType = sqlType.split('[')[0]; + const values = array + .map((value) => { + if (typeof value === 'number' || typeof value === 'bigint') { + return value.toString(); + } else if (typeof value === 'boolean') { + return value ? 'true' : 'false'; + } else if (Array.isArray(value)) { + return buildArrayString(value, sqlType); + } else if (value instanceof Date) { + if (sqlType === 'date') { + return `"${value.toISOString().split('T')[0]}"`; + } else if (sqlType === 'timestamp') { + return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; + } else { + return `"${value.toISOString()}"`; + } + } else if (typeof value === 'object') { + return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; + } + + return `"${value}"`; + }) + .join(','); + + return `{${values}}`; +} + +export const generatePgSnapshot = ( + tables: AnyPgTable[], + enums: PgEnum[], + schemas: PgSchema[], + sequences: PgSequence[], + roles: PgRole[], + policies: PgPolicy[], + views: PgView[], + matViews: PgMaterializedView[], + casing: CasingType | undefined, + schemaFilter?: string[], +): PgSchemaInternal => { + const dialect = new PgDialect({ casing }); + const result: Record = {}; + const resultViews: Record = {}; + const sequencesToReturn: Record = {}; + const rolesToReturn: Record = {}; + // this policies are a separate objects that were linked to a table outside of it + const policiesToReturn: Record = {}; + + // This object stores unique names for indexes and will be used to detect if you have the same names for indexes + // within the same PostgreSQL schema + + const indexesInSchema: Record = {}; + + for (const table of tables) { + // This object stores unique names for checks and will be used to detect if you have the same names for checks + // within the same PostgreSQL table + const checksInTable: Record = {}; + + const { + name: tableName, + columns, + indexes, + foreignKeys, + checks, + schema, + primaryKeys, + uniqueConstraints, + policies, + enableRLS, + } = getTableConfig(table); + + if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { + continue; + } + + const columnsObject: Record = {}; + const indexesObject: Record = {}; + const checksObject: Record = {}; + const foreignKeysObject: Record = {}; + const primaryKeysObject: Record = {}; + const uniqueConstraintObject: Record = {}; + const policiesObject: Record = {}; + + columns.forEach((column) => { + const name = getColumnCasing(column, casing); + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + + const getEnumSchema = (column: PgColumn) => { + while (is(column, PgArray)) { + column = column.baseColumn; + } + return is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; + }; + const typeSchema: string | undefined = getEnumSchema(column); + + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; + + const columnToSet: Column = { + name, + type: column.getSQLType(), + typeSchema: typeSchema, + primaryKey, + notNull, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: 'stored', + } + : undefined, + identity: identity + ? { + type: identity.type, + name: identity.sequenceName ?? `${tableName}_${name}_seq`, + schema: schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: identity?.sequenceOptions?.cycle ?? false, + } + : undefined, + }; + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning(`We've found duplicated unique constraint names in ${ + chalk.underline.blue( + tableName, + ) + } table. + The unique constraint ${ + chalk.underline.blue( + column.uniqueName, + ) + } on the ${ + chalk.underline.blue( + name, + ) + } column is conflicting with a unique constraint name already defined for ${ + chalk.underline.blue( + existingUnique.columns.join(','), + ) + } columns\n`) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + nullsNotDistinct: column.uniqueType === 'not distinct', + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; + } else { + if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if (sqlTypeLowered === 'timestamp') { + columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; + } else { + columnToSet.default = `'${column.default.toISOString()}'`; + } + } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { + columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; + } else { + // Should do for all types + // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; + columnToSet.default = column.default; + } + } + } + } + columnsObject[name] = columnToSet; + }); + + primaryKeys.map((pk) => { + const originalColumnNames = pk.columns.map((c) => c.name); + const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); + + let name = pk.getName(); + if (casing !== undefined) { + for (let i = 0; i < originalColumnNames.length; i++) { + name = name.replace(originalColumnNames[i], columnNames[i]); + } + } + + primaryKeysObject[name] = { + name, + columns: columnNames, + }; + }); + + uniqueConstraints?.map((unq) => { + const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); + + const name = unq.name ?? uniqueKeyName(table, columnNames); + + const existingUnique = uniqueConstraintObject[name]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. + The unique constraint ${chalk.underline.blue(name)} on the ${ + chalk.underline.blue( + columnNames.join(','), + ) + } columns is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue(existingUnique.columns.join(',')) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + + uniqueConstraintObject[name] = { + name: unq.name!, + nullsNotDistinct: unq.nullsNotDistinct, + columns: columnNames, + }; + }); + + const fks: ForeignKey[] = foreignKeys.map((fk) => { + const tableFrom = tableName; + const onDelete = fk.onDelete; + const onUpdate = fk.onUpdate; + const reference = fk.reference(); + const tableTo = getTableName(reference.foreignTable); + // getTableConfig(reference.foreignTable).schema || "public"; + const schemaTo = getTableConfig(reference.foreignTable).schema; + + const originalColumnsFrom = reference.columns.map((it) => it.name); + const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); + const originalColumnsTo = reference.foreignColumns.map((it) => it.name); + const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); + + let name = fk.getName(); + if (casing !== undefined) { + for (let i = 0; i < originalColumnsFrom.length; i++) { + name = name.replace(originalColumnsFrom[i], columnsFrom[i]); + } + for (let i = 0; i < originalColumnsTo.length; i++) { + name = name.replace(originalColumnsTo[i], columnsTo[i]); + } + } + + return { + name, + tableFrom, + tableTo, + schemaTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + } as ForeignKey; + }); + + fks.forEach((it) => { + foreignKeysObject[it.name] = it; + }); + + indexes.forEach((value) => { + const columns = value.config.columns; + + let indexColumnNames: string[] = []; + columns.forEach((it) => { + if (is(it, SQL)) { + if (typeof value.config.name === 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `Please specify an index name in ${getTableName(value.config.table)} table that has "${ + dialect.sqlToQuery(it).sql + }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, + ) + }`, + ); + process.exit(1); + } + } + it = it as IndexedColumn; + const name = getColumnCasing(it as IndexedColumn, casing); + if ( + !is(it, SQL) + && it.type! === 'PgVector' + && typeof it.indexConfig!.opClass === 'undefined' + ) { + console.log( + `\n${ + withStyle.errorWarning( + `You are specifying an index on the ${ + chalk.blueBright( + name, + ) + } column inside the ${ + chalk.blueBright( + tableName, + ) + } table with the ${ + chalk.blueBright( + 'vector', + ) + } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ + vectorOps + .map((it) => `${chalk.underline(`${it}`)}`) + .join(', ') + }].\n\nYou can specify it using current syntax: ${ + chalk.underline( + `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ + vectorOps[0] + }"))`, + ) + }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, + ) + }`, + ); + process.exit(1); + } + indexColumnNames.push(name); + }); + + const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); + + let indexColumns: IndexColumnType[] = columns.map( + (it): IndexColumnType => { + if (is(it, SQL)) { + return { + expression: dialect.sqlToQuery(it, 'indexes').sql, + asc: true, + isExpression: true, + nulls: 'last', + }; + } else { + it = it as IndexedColumn; + return { + expression: getColumnCasing(it as IndexedColumn, casing), + isExpression: false, + asc: it.indexConfig?.order === 'asc', + nulls: it.indexConfig?.nulls + ? it.indexConfig?.nulls + : it.indexConfig?.order === 'desc' + ? 'first' + : 'last', + opclass: it.indexConfig?.opClass, + }; + } + }, + ); + + // check for index names duplicates + if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { + if (indexesInSchema[schema ?? 'public'].includes(name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated index name across ${ + chalk.underline.blue(schema ?? 'public') + } schema. Please rename your index in either the ${ + chalk.underline.blue( + tableName, + ) + } table or the table with the duplicated index name`, + ) + }`, + ); + process.exit(1); + } + indexesInSchema[schema ?? 'public'].push(name); + } else { + indexesInSchema[schema ?? 'public'] = [name]; + } + + indexesObject[name] = { + name, + columns: indexColumns, + isUnique: value.config.unique ?? false, + where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, + concurrently: value.config.concurrently ?? false, + method: value.config.method ?? 'btree', + with: value.config.with ?? {}, + }; + }); + + policies.forEach((policy) => { + const mappedTo = []; + + if (!policy.to) { + mappedTo.push('public'); + } else { + if (policy.to && typeof policy.to === 'string') { + mappedTo.push(policy.to); + } else if (policy.to && is(policy.to, PgRole)) { + mappedTo.push(policy.to.name); + } else if (policy.to && Array.isArray(policy.to)) { + policy.to.forEach((it) => { + if (typeof it === 'string') { + mappedTo.push(it); + } else if (is(it, PgRole)) { + mappedTo.push(it.name); + } + }); + } + } + + if (policiesObject[policy.name] !== undefined) { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated policy name across ${ + chalk.underline.blue(tableKey) + } table. Please rename one of the policies with ${ + chalk.underline.blue( + policy.name, + ) + } name`, + ) + }`, + ); + process.exit(1); + } + + policiesObject[policy.name] = { + name: policy.name, + as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', + for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', + to: mappedTo.sort(), + using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, + withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, + }; + }); + + checks.forEach((check) => { + const checkName = check.name; + + if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { + if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated check constraint name across ${ + chalk.underline.blue( + schema ?? 'public', + ) + } schema in ${ + chalk.underline.blue( + tableName, + ) + }. Please rename your check constraint in either the ${ + chalk.underline.blue( + tableName, + ) + } table or the table with the duplicated check contraint name`, + ) + }`, + ); + process.exit(1); + } + checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); + } else { + checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; + } + + checksObject[checkName] = { + name: checkName, + value: dialect.sqlToQuery(check.value).sql, + }; + }); + + const tableKey = `${schema ?? 'public'}.${tableName}`; + + result[tableKey] = { + name: tableName, + schema: schema ?? '', + columns: columnsObject, + indexes: indexesObject, + foreignKeys: foreignKeysObject, + compositePrimaryKeys: primaryKeysObject, + uniqueConstraints: uniqueConstraintObject, + policies: policiesObject, + checkConstraints: checksObject, + isRLSEnabled: enableRLS, + }; + } + + for (const policy of policies) { + // @ts-ignore + if (!policy._linkedTable) { + console.log( + `\n${ + withStyle.errorWarning( + `"Policy ${policy.name} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, + ) + }`, + ); + continue; + } + + // @ts-ignore + const tableConfig = getTableConfig(policy._linkedTable); + + const tableKey = `${tableConfig.schema ?? 'public'}.${tableConfig.name}`; + + const mappedTo = []; + + if (!policy.to) { + mappedTo.push('public'); + } else { + if (policy.to && typeof policy.to === 'string') { + mappedTo.push(policy.to); + } else if (policy.to && is(policy.to, PgRole)) { + mappedTo.push(policy.to.name); + } else if (policy.to && Array.isArray(policy.to)) { + policy.to.forEach((it) => { + if (typeof it === 'string') { + mappedTo.push(it); + } else if (is(it, PgRole)) { + mappedTo.push(it.name); + } + }); + } + } + + // add separate policies object, that will be only responsible for policy creation + // but we would need to track if a policy was enabled for a specific table or not + // enable only if jsonStatements for enable rls was not already there + filter it + + if (result[tableKey]?.policies[policy.name] !== undefined || policiesToReturn[policy.name] !== undefined) { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated policy name across ${ + chalk.underline.blue(tableKey) + } table. Please rename one of the policies with ${ + chalk.underline.blue( + policy.name, + ) + } name`, + ) + }`, + ); + process.exit(1); + } + + const mappedPolicy = { + name: policy.name, + as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', + for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', + to: mappedTo.sort(), + using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, + withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, + }; + + if (result[tableKey]) { + result[tableKey].policies[policy.name] = mappedPolicy; + } else { + policiesToReturn[policy.name] = { + ...mappedPolicy, + schema: tableConfig.schema ?? 'public', + on: `"${tableConfig.schema ?? 'public'}"."${tableConfig.name}"`, + }; + } + } + + for (const sequence of sequences) { + const name = sequence.seqName!; + if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { + const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) + ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); + const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); + const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; + + sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { + name, + schema: sequence.schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: sequence.seqOptions?.cycle ?? false, + }; + } else { + // duplicate seq error + } + } + + for (const role of roles) { + if (!(role as any)._existing) { + rolesToReturn[role.name] = { + name: role.name, + createDb: (role as any).createDb === undefined ? false : (role as any).createDb, + createRole: (role as any).createRole === undefined ? false : (role as any).createRole, + inherit: (role as any).inherit === undefined ? true : (role as any).inherit, + }; + } + } + const combinedViews = [...views, ...matViews]; + for (const view of combinedViews) { + let viewName; + let schema; + let query; + let selectedFields; + let isExisting; + let withOption; + let tablespace; + let using; + let withNoData; + let materialized: boolean = false; + + if (is(view, PgView)) { + ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); + } else { + ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = + getMaterializedViewConfig(view)); + + materialized = true; + } + + const viewSchema = schema ?? 'public'; + + const viewKey = `${viewSchema}.${viewName}`; + + const columnsObject: Record = {}; + const uniqueConstraintObject: Record = {}; + + const existingView = resultViews[viewKey]; + if (typeof existingView !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated view name across ${ + chalk.underline.blue(schema ?? 'public') + } schema. Please rename your view`, + ) + }`, + ); + process.exit(1); + } + + for (const key in selectedFields) { + if (is(selectedFields[key], PgColumn)) { + const column = selectedFields[key]; + + const notNull: boolean = column.notNull; + const primaryKey: boolean = column.primary; + const sqlTypeLowered = column.getSQLType().toLowerCase(); + + const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; + const generated = column.generated; + const identity = column.generatedIdentity; + + const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; + const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) + ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); + const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) + ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); + const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) + ?? (parseFloat(increment) < 0 ? maxValue : minValue); + const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; + + const columnToSet: Column = { + name: column.name, + type: column.getSQLType(), + typeSchema: typeSchema, + primaryKey, + notNull, + generated: generated + ? { + as: is(generated.as, SQL) + ? dialect.sqlToQuery(generated.as as SQL).sql + : typeof generated.as === 'function' + ? dialect.sqlToQuery(generated.as() as SQL).sql + : (generated.as as any), + type: 'stored', + } + : undefined, + identity: identity + ? { + type: identity.type, + name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, + schema: schema ?? 'public', + increment, + startWith, + minValue, + maxValue, + cache, + cycle: identity?.sequenceOptions?.cycle ?? false, + } + : undefined, + }; + + if (column.isUnique) { + const existingUnique = uniqueConstraintObject[column.uniqueName!]; + if (typeof existingUnique !== 'undefined') { + console.log( + `\n${ + withStyle.errorWarning( + `We've found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. + The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ + chalk.underline.blue( + column.name, + ) + } column is confilcting with a unique constraint name already defined for ${ + chalk.underline.blue(existingUnique.columns.join(',')) + } columns\n`, + ) + }`, + ); + process.exit(1); + } + uniqueConstraintObject[column.uniqueName!] = { + name: column.uniqueName!, + nullsNotDistinct: column.uniqueType === 'not distinct', + columns: [columnToSet.name], + }; + } + + if (column.default !== undefined) { + if (is(column.default, SQL)) { + columnToSet.default = sqlToStr(column.default, casing); + } else { + if (typeof column.default === 'string') { + columnToSet.default = `'${column.default}'`; + } else { + if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { + columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; + } else if (column.default instanceof Date) { + if (sqlTypeLowered === 'date') { + columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; + } else if (sqlTypeLowered === 'timestamp') { + columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; + } else { + columnToSet.default = `'${column.default.toISOString()}'`; + } + } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { + columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; + } else { + // Should do for all types + // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; + columnToSet.default = column.default; + } + } + } + } + columnsObject[column.name] = columnToSet; + } + } + + resultViews[viewKey] = { + columns: columnsObject, + definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, + name: viewName, + schema: viewSchema, + isExisting, + with: withOption, + withNoData, + materialized, + tablespace, + using, + }; + } + + const enumsToReturn: Record = enums.reduce<{ + [key: string]: Enum; + }>((map, obj) => { + const enumSchema = obj.schema || 'public'; + const key = `${enumSchema}.${obj.enumName}`; + map[key] = { + name: obj.enumName, + schema: enumSchema, + values: obj.enumValues, + }; + return map; + }, {}); + + const schemasObject = Object.fromEntries( + schemas + .filter((it) => { + if (schemaFilter) { + return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; + } else { + return it.schemaName !== 'public'; + } + }) + .map((it) => [it.schemaName, it.schemaName]), + ); + + return { + version: '7', + dialect: 'postgresql', + tables: result, + enums: enumsToReturn, + schemas: schemasObject, + sequences: sequencesToReturn, + roles: rolesToReturn, + policies: policiesToReturn, + views: resultViews, + _meta: { + schemas: {}, + tables: {}, + columns: {}, + }, + }; +}; diff --git a/drizzle-kit/src/legacy/postgres-v7/serializer.ts b/drizzle-kit/src/legacy/postgres-v7/serializer.ts new file mode 100644 index 0000000000..dbe0ac083c --- /dev/null +++ b/drizzle-kit/src/legacy/postgres-v7/serializer.ts @@ -0,0 +1,61 @@ +import { is } from 'orm044'; +import type { PgEnum, PgEnumObject, PgMaterializedView, PgSequence, PgView } from 'orm044/pg-core'; +import { + isPgEnum, + isPgMaterializedView, + isPgSequence, + isPgView, + PgPolicy, + PgRole, + PgSchema, + PgTable, +} from 'orm044/pg-core'; +import type { CasingType } from '../common'; +import type { PgSchema as SCHEMA } from './pgSchema'; +import { generatePgSnapshot } from './pgSerializer'; + +export type PostgresSchema = Record< + string, + | PgTable + | PgEnum + | PgEnumObject + | PgSchema + | PgSequence + | PgView + | PgMaterializedView + | PgRole + | PgPolicy + | unknown +>; + +export const serializePg = async ( + schema: PostgresSchema, + casing: CasingType | undefined, + schemaFilter?: string[], +): Promise => { + const tables = Object.values(schema).filter((it) => is(it, PgTable)) as PgTable[]; + const schemas = Object.values(schema).filter((it) => is(it, PgSchema)) as PgSchema[]; + const enums = Object.values(schema).filter((it) => isPgEnum(it)) as PgEnum[]; + const sequences = Object.values(schema).filter((it) => isPgSequence(it)) as PgSequence[]; + const roles = Object.values(schema).filter((it) => is(it, PgRole)) as PgRole[]; + const policies = Object.values(schema).filter((it) => is(it, PgPolicy)) as PgPolicy[]; + const views = Object.values(schema).filter((it) => isPgView(it)) as PgView[]; + const materializedViews = Object.values(schema).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + + return { + id: 'id', + prevId: 'prev_id', + ...generatePgSnapshot( + tables, + enums, + schemas, + sequences, + roles, + policies, + views, + materializedViews, + casing, + schemaFilter, + ), + }; +}; diff --git a/drizzle-kit/src/extensions/vector.ts b/drizzle-kit/src/legacy/postgres-v7/vector.ts similarity index 100% rename from drizzle-kit/src/extensions/vector.ts rename to drizzle-kit/src/legacy/postgres-v7/vector.ts diff --git a/drizzle-kit/src/legacy/schemaValidator.ts b/drizzle-kit/src/legacy/schemaValidator.ts new file mode 100644 index 0000000000..c575070fd4 --- /dev/null +++ b/drizzle-kit/src/legacy/schemaValidator.ts @@ -0,0 +1,8 @@ +import type { TypeOf } from 'zod'; +import { enum as enumType } from 'zod'; + +export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore', 'gel'] as const; +export const dialect = enumType(dialects); + +export type Dialect = (typeof dialects)[number]; +const _: Dialect = '' as TypeOf; diff --git a/drizzle-kit/src/legacy/snapshotsDiffer.ts b/drizzle-kit/src/legacy/snapshotsDiffer.ts new file mode 100644 index 0000000000..dd9834bd9c --- /dev/null +++ b/drizzle-kit/src/legacy/snapshotsDiffer.ts @@ -0,0 +1,898 @@ +import chalk from 'chalk'; +import { render } from 'hanji'; +import type { TypeOf, ZodTypeAny } from 'zod'; +import { any, array, boolean, enum as enumType, literal, never, object, record, string, union } from 'zod'; +import { ResolveColumnSelect, ResolveSchemasSelect, ResolveSelect, ResolveSelectNamed } from '../cli/views'; +import { _prepareAddColumns, _prepareDropColumns } from './jsonStatements'; +import type { ViewSquashed } from './mysql-v5/mysqlSchema'; +import type { Policy, Role, View } from './postgres-v7/pgSchema'; +import { mergedViewWithOption, policySquashed, roleSchema, sequenceSquashed } from './postgres-v7/pgSchema'; + +export type Named = { name: string }; +export type NamedWithSchema = { + name: string; + schema: string; +}; + +const makeChanged = (schema: T) => { + return object({ + type: enumType(['changed']), + old: schema, + new: schema, + }); +}; + +const makeSelfOrChanged = (schema: T) => { + return union([ + schema, + object({ + type: enumType(['changed']), + old: schema, + new: schema, + }), + ]); +}; + +export const makePatched = (schema: T) => { + return union([ + object({ + type: literal('added'), + value: schema, + }), + object({ + type: literal('deleted'), + value: schema, + }), + object({ + type: literal('changed'), + old: schema, + new: schema, + }), + ]); +}; + +export const makeSelfOrPatched = (schema: T) => { + return union([ + object({ + type: literal('none'), + value: schema, + }), + object({ + type: literal('added'), + value: schema, + }), + object({ + type: literal('deleted'), + value: schema, + }), + object({ + type: literal('changed'), + old: schema, + new: schema, + }), + ]); +}; + +const columnSchema = object({ + name: string(), + type: string(), + typeSchema: string().optional(), + primaryKey: boolean().optional(), + default: any().optional(), + notNull: boolean().optional(), + // should it be optional? should if be here? + autoincrement: boolean().optional(), + onUpdate: boolean().optional(), + isUnique: any().optional(), + uniqueName: string().optional(), + nullsNotDistinct: boolean().optional(), + generated: object({ + as: string(), + type: enumType(['stored', 'virtual']).default('stored'), + }).optional(), + identity: string().optional(), +}).strict(); + +const alteredColumnSchema = object({ + name: makeSelfOrChanged(string()), + type: makeChanged(string()).optional(), + default: makePatched(any()).optional(), + primaryKey: makePatched(boolean()).optional(), + notNull: makePatched(boolean()).optional(), + typeSchema: makePatched(string()).optional(), + onUpdate: makePatched(boolean()).optional(), + autoincrement: makePatched(boolean()).optional(), + generated: makePatched( + object({ + as: string(), + type: enumType(['stored', 'virtual']).default('stored'), + }), + ).optional(), + + identity: makePatched(string()).optional(), +}).strict(); + +const enumSchema = object({ + name: string(), + schema: string(), + values: array(string()), +}).strict(); + +const changedEnumSchema = object({ + name: string(), + schema: string(), + addedValues: object({ + before: string(), + value: string(), + }).array(), + deletedValues: array(string()), +}).strict(); + +const tableScheme = object({ + name: string(), + schema: string().default(''), + columns: record(string(), columnSchema), + indexes: record(string(), string()), + foreignKeys: record(string(), string()), + compositePrimaryKeys: record(string(), string()).default({}), + uniqueConstraints: record(string(), string()).default({}), + policies: record(string(), string()).default({}), + checkConstraints: record(string(), string()).default({}), + isRLSEnabled: boolean().default(false), +}).strict(); + +export const alteredTableScheme = object({ + name: string(), + schema: string(), + altered: alteredColumnSchema.array(), + addedIndexes: record(string(), string()), + deletedIndexes: record(string(), string()), + alteredIndexes: record( + string(), + object({ + __new: string(), + __old: string(), + }).strict(), + ), + addedForeignKeys: record(string(), string()), + deletedForeignKeys: record(string(), string()), + alteredForeignKeys: record( + string(), + object({ + __new: string(), + __old: string(), + }).strict(), + ), + addedCompositePKs: record(string(), string()), + deletedCompositePKs: record(string(), string()), + alteredCompositePKs: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), + addedUniqueConstraints: record(string(), string()), + deletedUniqueConstraints: record(string(), string()), + alteredUniqueConstraints: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), + addedPolicies: record(string(), string()), + deletedPolicies: record(string(), string()), + alteredPolicies: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), + addedCheckConstraints: record( + string(), + string(), + ), + deletedCheckConstraints: record( + string(), + string(), + ), + alteredCheckConstraints: record( + string(), + object({ + __new: string(), + __old: string(), + }), + ), +}).strict(); + +const alteredViewCommon = object({ + name: string(), + alteredDefinition: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredExisting: object({ + __old: boolean(), + __new: boolean(), + }).strict().optional(), +}); + +export const alteredPgViewSchema = alteredViewCommon.merge( + object({ + schema: string(), + deletedWithOption: mergedViewWithOption.optional(), + addedWithOption: mergedViewWithOption.optional(), + addedWith: mergedViewWithOption.optional(), + deletedWith: mergedViewWithOption.optional(), + alteredWith: mergedViewWithOption.optional(), + alteredSchema: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredTablespace: object({ + __old: string(), + __new: string(), + }).strict().optional(), + alteredUsing: object({ + __old: string(), + __new: string(), + }).strict().optional(), + }).strict(), +); + +const alteredMySqlViewSchema = alteredViewCommon.merge( + object({ + alteredMeta: object({ + __old: string(), + __new: string(), + }).strict().optional(), + }).strict(), +); + +export const diffResultScheme = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: changedEnumSchema.array(), + alteredSequences: sequenceSquashed.array(), + alteredRoles: roleSchema.array(), + alteredPolicies: policySquashed.array(), + alteredViews: alteredPgViewSchema.array(), +}).strict(); + +export const diffResultSchemeMysql = object({ + alteredTablesWithColumns: alteredTableScheme.array(), + alteredEnums: never().array(), + alteredViews: alteredMySqlViewSchema.array(), +}); + +export type Column = TypeOf; +export type AlteredColumn = TypeOf; +export type Enum = TypeOf; +export type Sequence = TypeOf; +export type Table = TypeOf; +export type AlteredTable = TypeOf; +export type DiffResult = TypeOf; + +export type DiffResultMysql = TypeOf; + +export interface ResolverInput { + created: T[]; + deleted: T[]; +} + +export interface ResolverOutput { + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface ResolverOutputWithMoved { + created: T[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface ColumnsResolverInput { + tableName: string; + schema: string; + created: T[]; + deleted: T[]; +} + +export interface TablePolicyResolverInput { + tableName: string; + schema: string; + created: T[]; + deleted: T[]; +} + +export interface TablePolicyResolverOutput { + tableName: string; + schema: string; + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface PolicyResolverInput { + created: T[]; + deleted: T[]; +} + +export interface PolicyResolverOutput { + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface RolesResolverInput { + created: T[]; + deleted: T[]; +} + +export interface RolesResolverOutput { + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export interface ColumnsResolverOutput { + tableName: string; + schema: string; + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +} + +export const schemaChangeFor = ( + table: NamedWithSchema, + renamedSchemas: { from: Named; to: Named }[], +) => { + for (let ren of renamedSchemas) { + if (table.schema === ren.from.name) { + return { key: `${ren.to.name}.${table.name}`, schema: ren.to.name }; + } + } + + return { + key: `${table.schema || 'public'}.${table.name}`, + schema: table.schema, + }; +}; + +export const nameChangeFor = (table: Named, renamed: { from: Named; to: Named }[]) => { + for (let ren of renamed) { + if (table.name === ren.from.name) { + return { name: ren.to.name }; + } + } + + return { + name: table.name, + }; +}; + +export const nameSchemaChangeFor = ( + table: NamedWithSchema, + renamedTables: { from: NamedWithSchema; to: NamedWithSchema }[], +) => { + for (let ren of renamedTables) { + if (table.name === ren.from.name && table.schema === ren.from.schema) { + return { + key: `${ren.to.schema || 'public'}.${ren.to.name}`, + name: ren.to.name, + schema: ren.to.schema, + }; + } + } + + return { + key: `${table.schema || 'public'}.${table.name}`, + name: table.name, + schema: table.schema, + }; +}; + +export const columnChangeFor = ( + column: string, + renamedColumns: { from: Named; to: Named }[], +) => { + for (let ren of renamedColumns) { + if (column === ren.from.name) { + return ren.to.name; + } + } + + return column; +}; + +export const schemasResolver = async ( + input: ResolverInput
, +): Promise> => { + try { + const { created, deleted, renamed } = await promptSchemasConflict( + input.created, + input.deleted, + ); + + return { created: created, deleted: deleted, renamed: renamed }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const tablesResolver = async ( + input: ResolverInput
, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'table', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const mySqlViewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const viewsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'view', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export interface RenamePropmtItem { + from: T; + to: T; +} + +export const isRenamePromptItem = ( + item: RenamePropmtItem | T, +): item is RenamePropmtItem => { + return 'from' in item && 'to' in item; +}; + +export const sequencesResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'sequence', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const roleResolver = async ( + input: RolesResolverInput, +): Promise> => { + const result = await promptNamedConflict( + input.created, + input.deleted, + 'role', + ); + return { + created: result.created, + deleted: result.deleted, + renamed: result.renamed, + }; +}; + +export const policyResolver = async ( + input: TablePolicyResolverInput, +): Promise> => { + const result = await promptColumnsConflicts( + input.tableName, + input.created, + input.deleted, + ); + return { + tableName: input.tableName, + schema: input.schema, + created: result.created, + deleted: result.deleted, + renamed: result.renamed, + }; +}; + +export const indPolicyResolver = async ( + input: PolicyResolverInput, +): Promise> => { + const result = await promptNamedConflict( + input.created, + input.deleted, + 'policy', + ); + return { + created: result.created, + deleted: result.deleted, + renamed: result.renamed, + }; +}; + +export const enumsResolver = async ( + input: ResolverInput, +): Promise> => { + try { + const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( + input.created, + input.deleted, + 'enum', + ); + + return { + created: created, + deleted: deleted, + moved: moved, + renamed: renamed, + }; + } catch (e) { + console.error(e); + throw e; + } +}; + +export const columnsResolver = async ( + input: ColumnsResolverInput, +): Promise> => { + const result = await promptColumnsConflicts( + input.tableName, + input.created, + input.deleted, + ); + return { + tableName: input.tableName, + schema: input.schema, + created: result.created, + deleted: result.deleted, + renamed: result.renamed, + }; +}; + +export const promptColumnsConflicts = async ( + tableName: string, + newColumns: T[], + missingColumns: T[], +) => { + if (newColumns.length === 0 || missingColumns.length === 0) { + return { created: newColumns, renamed: [], deleted: missingColumns }; + } + const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { + created: [], + renamed: [], + deleted: [], + }; + + let index = 0; + let leftMissing = [...missingColumns]; + + do { + const created = newColumns[index]; + + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveColumnSelect(tableName, created, promptData), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + console.log( + `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ + chalk.gray( + 'column will be renamed', + ) + }`, + ); + result.renamed.push(data); + // this will make [item1, undefined, item2] + delete leftMissing[leftMissing.indexOf(data.from)]; + // this will make [item1, item2] + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + 'column will be created', + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newColumns.length); + console.log( + chalk.gray(`--- all columns conflicts in ${tableName} table resolved ---\n`), + ); + + result.deleted.push(...leftMissing); + return result; +}; + +export const promptNamedConflict = async ( + newItems: T[], + missingItems: T[], + entity: 'role' | 'policy', +): Promise<{ + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; +}> => { + if (missingItems.length === 0 || newItems.length === 0) { + return { + created: newItems, + renamed: [], + deleted: missingItems, + }; + } + + const result: { + created: T[]; + renamed: { from: T; to: T }[]; + deleted: T[]; + } = { created: [], renamed: [], deleted: [] }; + let index = 0; + let leftMissing = [...missingItems]; + do { + const created = newItems[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveSelectNamed(created, promptData, entity), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + console.log( + `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ + chalk.gray( + `${entity} will be renamed/moved`, + ) + }`, + ); + + if (data.from.name !== data.to.name) { + result.renamed.push(data); + } + + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + `${entity} will be created`, + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newItems.length); + console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); + result.deleted.push(...leftMissing); + return result; +}; + +export const promptNamedWithSchemasConflict = async ( + newItems: T[], + missingItems: T[], + entity: 'table' | 'enum' | 'sequence' | 'view', +): Promise<{ + created: T[]; + renamed: { from: T; to: T }[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + deleted: T[]; +}> => { + if (missingItems.length === 0 || newItems.length === 0) { + return { + created: newItems, + renamed: [], + moved: [], + deleted: missingItems, + }; + } + + const result: { + created: T[]; + renamed: { from: T; to: T }[]; + moved: { name: string; schemaFrom: string; schemaTo: string }[]; + deleted: T[]; + } = { created: [], renamed: [], moved: [], deleted: [] }; + let index = 0; + let leftMissing = [...missingItems]; + do { + const created = newItems[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveSelect(created, promptData, entity), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + const schemaFromPrefix = !data.from.schema || data.from.schema === 'public' + ? '' + : `${data.from.schema}.`; + const schemaToPrefix = !data.to.schema || data.to.schema === 'public' + ? '' + : `${data.to.schema}.`; + + console.log( + `${chalk.yellow('~')} ${schemaFromPrefix}${data.from.name} › ${schemaToPrefix}${data.to.name} ${ + chalk.gray( + `${entity} will be renamed/moved`, + ) + }`, + ); + + if (data.from.name !== data.to.name) { + result.renamed.push(data); + } + + if (data.from.schema !== data.to.schema) { + result.moved.push({ + name: data.from.name, + schemaFrom: data.from.schema || 'public', + schemaTo: data.to.schema || 'public', + }); + } + + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + `${entity} will be created`, + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newItems.length); + console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); + result.deleted.push(...leftMissing); + return result; +}; + +export const promptSchemasConflict = async ( + newSchemas: T[], + missingSchemas: T[], +): Promise<{ created: T[]; renamed: { from: T; to: T }[]; deleted: T[] }> => { + if (missingSchemas.length === 0 || newSchemas.length === 0) { + return { created: newSchemas, renamed: [], deleted: missingSchemas }; + } + + const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { + created: [], + renamed: [], + deleted: [], + }; + let index = 0; + let leftMissing = [...missingSchemas]; + do { + const created = newSchemas[index]; + const renames: RenamePropmtItem[] = leftMissing.map((it) => { + return { from: it, to: created }; + }); + + const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; + + const { status, data } = await render( + new ResolveSchemasSelect(created, promptData), + ); + if (status === 'aborted') { + console.error('ERROR'); + process.exit(1); + } + + if (isRenamePromptItem(data)) { + console.log( + `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ + chalk.gray( + 'schema will be renamed', + ) + }`, + ); + result.renamed.push(data); + delete leftMissing[leftMissing.indexOf(data.from)]; + leftMissing = leftMissing.filter(Boolean); + } else { + console.log( + `${chalk.green('+')} ${data.name} ${ + chalk.gray( + 'schema will be created', + ) + }`, + ); + result.created.push(created); + } + index += 1; + } while (index < newSchemas.length); + console.log(chalk.gray('--- all schemas conflicts resolved ---\n')); + result.deleted.push(...leftMissing); + return result; +}; diff --git a/drizzle-kit/src/legacy/sqlgenerator.ts b/drizzle-kit/src/legacy/sqlgenerator.ts new file mode 100644 index 0000000000..e332e706ff --- /dev/null +++ b/drizzle-kit/src/legacy/sqlgenerator.ts @@ -0,0 +1,2125 @@ +import type { + JsonAddColumnStatement, + JsonAddValueToEnumStatement, + JsonAlterColumnAlterGeneratedStatement, + JsonAlterColumnAlterIdentityStatement, + JsonAlterColumnDropDefaultStatement, + JsonAlterColumnDropGeneratedStatement, + JsonAlterColumnDropIdentityStatement, + JsonAlterColumnDropNotNullStatement, + JsonAlterColumnDropPrimaryKeyStatement, + JsonAlterColumnPgTypeStatement, + JsonAlterColumnSetDefaultStatement, + JsonAlterColumnSetGeneratedStatement, + JsonAlterColumnSetIdentityStatement, + JsonAlterColumnSetNotNullStatement, + JsonAlterColumnSetPrimaryKeyStatement, + JsonAlterCompositePK, + JsonAlterIndPolicyStatement, + JsonAlterPolicyStatement, + JsonAlterReferenceStatement, + JsonAlterRoleStatement, + JsonAlterSequenceStatement, + JsonAlterTableRemoveFromSchema, + JsonAlterTableSetNewSchema, + JsonAlterTableSetSchema, + JsonAlterViewAddWithOptionStatement, + JsonAlterViewAlterSchemaStatement, + JsonAlterViewAlterTablespaceStatement, + JsonAlterViewAlterUsingStatement, + JsonAlterViewDropWithOptionStatement, + JsonCreateCheckConstraint, + JsonCreateCompositePK, + JsonCreateEnumStatement, + JsonCreateIndPolicyStatement, + JsonCreatePgViewStatement, + JsonCreatePolicyStatement, + JsonCreateReferenceStatement, + JsonCreateRoleStatement, + JsonCreateSchema, + JsonCreateSequenceStatement, + JsonCreateTableStatement, + JsonCreateUniqueConstraint, + JsonDeleteCheckConstraint, + JsonDeleteCompositePK, + JsonDeleteReferenceStatement, + JsonDeleteUniqueConstraint, + JsonDisableRLSStatement, + JsonDropColumnStatement, + JsonDropEnumStatement, + JsonDropIndexStatement, + JsonDropIndPolicyStatement, + JsonDropPolicyStatement, + JsonDropRoleStatement, + JsonDropSequenceStatement, + JsonDropTableStatement, + JsonDropValueFromEnumStatement, + JsonDropViewStatement, + JsonEnableRLSStatement, + JsonIndRenamePolicyStatement, + JsonMoveEnumStatement, + JsonMoveSequenceStatement, + JsonPgCreateIndexStatement, + JsonRenameColumnStatement, + JsonRenameEnumStatement, + JsonRenamePolicyStatement, + JsonRenameRoleStatement, + JsonRenameSchema, + JsonRenameSequenceStatement, + JsonRenameTableStatement, + JsonRenameViewStatement, + JsonStatement, +} from './jsonStatements'; +import { PgSquasher } from './postgres-v7/pgSchema'; +import type { Dialect } from './schemaValidator'; + +export const BREAKPOINT = '--> statement-breakpoint\n'; + +import { escapeSingleQuotes } from './utils'; + +const parseType = (schemaPrefix: string, type: string) => { + const pgNativeTypes = [ + 'uuid', + 'smallint', + 'integer', + 'bigint', + 'boolean', + 'text', + 'varchar', + 'serial', + 'bigserial', + 'decimal', + 'numeric', + 'real', + 'json', + 'jsonb', + 'time', + 'time with time zone', + 'time without time zone', + 'time', + 'timestamp', + 'timestamp with time zone', + 'timestamp without time zone', + 'date', + 'interval', + 'bigint', + 'bigserial', + 'double precision', + 'interval year', + 'interval month', + 'interval day', + 'interval hour', + 'interval minute', + 'interval second', + 'interval year to month', + 'interval day to hour', + 'interval day to minute', + 'interval day to second', + 'interval hour to minute', + 'interval hour to second', + 'interval minute to second', + 'char', + 'vector', + 'geometry', + ]; + const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; + const arrayDefinition = (type.match(arrayDefinitionRegex) ?? []).join(''); + const withoutArrayDefinition = type.replace(arrayDefinitionRegex, ''); + return pgNativeTypes.some((it) => type.startsWith(it)) + ? `${withoutArrayDefinition}${arrayDefinition}` + : `${schemaPrefix}"${withoutArrayDefinition}"${arrayDefinition}`; +}; + +abstract class Convertor { + abstract can( + statement: JsonStatement, + dialect: Dialect, + ): boolean; + abstract convert( + statement: JsonStatement, + action?: 'push', + ): string | string[]; +} + +class PgCreateRoleConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_role' && dialect === 'postgresql'; + } + override convert(statement: JsonCreateRoleStatement): string | string[] { + return `CREATE ROLE "${statement.name}"${ + statement.values.createDb || statement.values.createRole || !statement.values.inherit + ? ` WITH${statement.values.createDb ? ' CREATEDB' : ''}${statement.values.createRole ? ' CREATEROLE' : ''}${ + statement.values.inherit ? '' : ' NOINHERIT' + }` + : '' + };`; + } +} + +class PgDropRoleConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_role' && dialect === 'postgresql'; + } + override convert(statement: JsonDropRoleStatement): string | string[] { + return `DROP ROLE "${statement.name}";`; + } +} + +class PgRenameRoleConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_role' && dialect === 'postgresql'; + } + override convert(statement: JsonRenameRoleStatement): string | string[] { + return `ALTER ROLE "${statement.nameFrom}" RENAME TO "${statement.nameTo}";`; + } +} + +class PgAlterRoleConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_role' && dialect === 'postgresql'; + } + override convert(statement: JsonAlterRoleStatement): string | string[] { + return `ALTER ROLE "${statement.name}"${` WITH${statement.values.createDb ? ' CREATEDB' : ' NOCREATEDB'}${ + statement.values.createRole ? ' CREATEROLE' : ' NOCREATEROLE' + }${statement.values.inherit ? ' INHERIT' : ' NOINHERIT'}`};`; + } +} + +///// + +class PgCreatePolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonCreatePolicyStatement): string | string[] { + const policy = statement.data; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + const usingPart = policy.using ? ` USING (${policy.using})` : ''; + + const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; + + const policyToPart = policy.to?.map((v) => + ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` + ).join(', '); + + return `CREATE POLICY "${policy.name}" ON ${tableNameWithSchema} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; + } +} + +class PgDropPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonDropPolicyStatement): string | string[] { + const policy = statement.data; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `DROP POLICY "${policy.name}" ON ${tableNameWithSchema} CASCADE;`; + } +} + +class PgRenamePolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonRenamePolicyStatement): string | string[] { + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER POLICY "${statement.oldName}" ON ${tableNameWithSchema} RENAME TO "${statement.newName}";`; + } +} + +class PgAlterPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonAlterPolicyStatement, _dialect: any, action?: string): string | string[] { + const newPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(statement.newData) + : PgSquasher.unsquashPolicy(statement.newData); + const oldPolicy = action === 'push' + ? PgSquasher.unsquashPolicyPush(statement.oldData) + : PgSquasher.unsquashPolicy(statement.oldData); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + const usingPart = newPolicy.using + ? ` USING (${newPolicy.using})` + : oldPolicy.using + ? ` USING (${oldPolicy.using})` + : ''; + + const withCheckPart = newPolicy.withCheck + ? ` WITH CHECK (${newPolicy.withCheck})` + : oldPolicy.withCheck + ? ` WITH CHECK (${oldPolicy.withCheck})` + : ''; + + return `ALTER POLICY "${oldPolicy.name}" ON ${tableNameWithSchema} TO ${newPolicy.to}${usingPart}${withCheckPart};`; + } +} + +//// + +class PgCreateIndPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_ind_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonCreateIndPolicyStatement): string | string[] { + const policy = statement.data; + + const usingPart = policy.using ? ` USING (${policy.using})` : ''; + + const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; + + const policyToPart = policy.to?.map((v) => + ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` + ).join(', '); + + return `CREATE POLICY "${policy.name}" ON ${policy.on} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; + } +} + +class PgDropIndPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_ind_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonDropIndPolicyStatement): string | string[] { + const policy = statement.data; + + return `DROP POLICY "${policy.name}" ON ${policy.on} CASCADE;`; + } +} + +class PgRenameIndPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_ind_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonIndRenamePolicyStatement): string | string[] { + return `ALTER POLICY "${statement.oldName}" ON ${statement.tableKey} RENAME TO "${statement.newName}";`; + } +} + +class PgAlterIndPolicyConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_ind_policy' && dialect === 'postgresql'; + } + override convert(statement: JsonAlterIndPolicyStatement): string | string[] { + const newPolicy = statement.newData; + const oldPolicy = statement.oldData; + + const usingPart = newPolicy.using + ? ` USING (${newPolicy.using})` + : oldPolicy.using + ? ` USING (${oldPolicy.using})` + : ''; + + const withCheckPart = newPolicy.withCheck + ? ` WITH CHECK (${newPolicy.withCheck})` + : oldPolicy.withCheck + ? ` WITH CHECK (${oldPolicy.withCheck})` + : ''; + + return `ALTER POLICY "${oldPolicy.name}" ON ${oldPolicy.on} TO ${newPolicy.to}${usingPart}${withCheckPart};`; + } +} + +//// + +class PgEnableRlsConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'enable_rls' && dialect === 'postgresql'; + } + override convert(statement: JsonEnableRLSStatement): string { + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ENABLE ROW LEVEL SECURITY;`; + } +} + +class PgDisableRlsConvertor extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'disable_rls' && dialect === 'postgresql'; + } + override convert(statement: JsonDisableRLSStatement): string { + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DISABLE ROW LEVEL SECURITY;`; + } +} + +class PgCreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_table' && dialect === 'postgresql'; + } + + convert(st: JsonCreateTableStatement) { + const { tableName, schema, columns, compositePKs, uniqueConstraints, checkConstraints, policies, isRLSEnabled } = + st; + + let statement = ''; + const name = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; + + statement += `CREATE TABLE ${name} (\n`; + for (let i = 0; i < columns.length; i++) { + const column = columns[i]; + + const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; + const notNullStatement = column.notNull && !column.identity ? ' NOT NULL' : ''; + const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}::${column.type}` : ''; + + const uniqueConstraint = column.isUnique + ? ` CONSTRAINT "${column.uniqueName}" UNIQUE${column.nullsNotDistinct ? ' NULLS NOT DISTINCT' : ''}` + : ''; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const type = parseType(schemaPrefix, column.type); + const generated = column.generated; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + const unsquashedIdentity = column.identity + ? PgSquasher.unsquashIdentity(column.identity) + : undefined; + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identity = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + statement += '\t' + + `"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${uniqueConstraint}${identity}`; + statement += i === columns.length - 1 ? '' : ',\n'; + } + + if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { + statement += ',\n'; + const compositePK = PgSquasher.unsquashPK(compositePKs[0]); + statement += `\tCONSTRAINT "${st.compositePkName}" PRIMARY KEY("${compositePK.columns.join(`","`)}")`; + // statement += `\n`; + } + + if ( + typeof uniqueConstraints !== 'undefined' + && uniqueConstraints.length > 0 + ) { + for (const uniqueConstraint of uniqueConstraints) { + statement += ',\n'; + const unsquashedUnique = PgSquasher.unsquashUnique(uniqueConstraint); + statement += `\tCONSTRAINT "${unsquashedUnique.name}" UNIQUE${ + unsquashedUnique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' + }("${unsquashedUnique.columns.join(`","`)}")`; + // statement += `\n`; + } + } + + if (typeof checkConstraints !== 'undefined' && checkConstraints.length > 0) { + for (const checkConstraint of checkConstraints) { + statement += ',\n'; + const unsquashedCheck = PgSquasher.unsquashCheck(checkConstraint); + statement += `\tCONSTRAINT "${unsquashedCheck.name}" CHECK (${unsquashedCheck.value})`; + } + } + + statement += `\n);`; + statement += `\n`; + + const enableRls = new PgEnableRlsConvertor().convert({ + type: 'enable_rls', + tableName, + schema, + }); + + return [statement, ...(policies && policies.length > 0 || isRLSEnabled ? [enableRls] : [])]; + } +} + +class PgCreateViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_view' && dialect === 'postgresql'; + } + + convert(st: JsonCreatePgViewStatement) { + const { definition, name: viewName, schema, with: withOption, materialized, withNoData, tablespace, using } = st; + + const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; + + let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`; + + if (using) statement += ` USING "${using}"`; + + const options: string[] = []; + if (withOption) { + statement += ` WITH (`; + + Object.entries(withOption).forEach(([key, value]) => { + if (typeof value === 'undefined') return; + + options.push(`${key.snake_case()} = ${value}`); + }); + + statement += options.join(', '); + + statement += `)`; + } + + if (tablespace) statement += ` TABLESPACE ${tablespace}`; + + statement += ` AS (${definition})`; + + if (withNoData) statement += ` WITH NO DATA`; + + statement += `;`; + + return statement; + } +} + +class PgDropViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_view' && dialect === 'postgresql'; + } + + convert(st: JsonDropViewStatement) { + const { name: viewName, schema, materialized } = st; + + const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; + + return `DROP${materialized ? ' MATERIALIZED' : ''} VIEW ${name};`; + } +} + +class PgRenameViewConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_view' && dialect === 'postgresql'; + } + + convert(st: JsonRenameViewStatement) { + const { nameFrom: from, nameTo: to, schema, materialized } = st; + + const nameFrom = `"${schema}"."${from}"`; + + return `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW ${nameFrom} RENAME TO "${to}";`; + } +} + +class PgAlterViewSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_schema' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterSchemaStatement) { + const { fromSchema, toSchema, name, materialized } = st; + + const statement = `ALTER${ + materialized ? ' MATERIALIZED' : '' + } VIEW "${fromSchema}"."${name}" SET SCHEMA "${toSchema}";`; + + return statement; + } +} + +class PgAlterViewAddWithOptionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_add_with_option' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAddWithOptionStatement) { + const { schema, with: withOption, name, materialized } = st; + + let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" SET (`; + + const options: string[] = []; + + Object.entries(withOption).forEach(([key, value]) => { + options.push(`${key.snake_case()} = ${value}`); + }); + + statement += options.join(', '); + + statement += `);`; + + return statement; + } +} + +class PgAlterViewDropWithOptionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_drop_with_option' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewDropWithOptionStatement) { + const { schema, name, materialized, with: withOptions } = st; + + let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" RESET (`; + + const options: string[] = []; + + Object.entries(withOptions).forEach(([key]) => { + options.push(`${key.snake_case()}`); + }); + + statement += options.join(', '); + + statement += ');'; + + return statement; + } +} + +class PgAlterViewAlterTablespaceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_tablespace' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterTablespaceStatement) { + const { schema, name, toTablespace } = st; + + const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET TABLESPACE ${toTablespace};`; + + return statement; + } +} + +class PgAlterViewAlterUsingConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_view_alter_using' && dialect === 'postgresql'; + } + + convert(st: JsonAlterViewAlterUsingStatement) { + const { schema, name, toUsing } = st; + + const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET ACCESS METHOD "${toUsing}";`; + + return statement; + } +} + +class PgAlterTableAlterColumnSetGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnSetIdentityStatement, + ): string | string[] { + const { identity, tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identityStatement = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" ADD${identityStatement};`; + } +} + +class PgAlterTableAlterColumnDropGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnDropIdentityStatement, + ): string | string[] { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP IDENTITY;`; + } +} + +class PgAlterTableAlterColumnAlterGenerated extends Convertor { + override can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_change_identity' + && dialect === 'postgresql' + ); + } + override convert( + statement: JsonAlterColumnAlterIdentityStatement, + ): string | string[] { + const { identity, oldIdentity, tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); + const unsquashedOldIdentity = PgSquasher.unsquashIdentity(oldIdentity); + + const statementsToReturn: string[] = []; + + if (unsquashedOldIdentity.type !== unsquashedIdentity.type) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + };`, + ); + } + + if (unsquashedOldIdentity.minValue !== unsquashedIdentity.minValue) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MINVALUE ${unsquashedIdentity.minValue};`, + ); + } + + if (unsquashedOldIdentity.maxValue !== unsquashedIdentity.maxValue) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MAXVALUE ${unsquashedIdentity.maxValue};`, + ); + } + + if (unsquashedOldIdentity.increment !== unsquashedIdentity.increment) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET INCREMENT BY ${unsquashedIdentity.increment};`, + ); + } + + if (unsquashedOldIdentity.startWith !== unsquashedIdentity.startWith) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET START WITH ${unsquashedIdentity.startWith};`, + ); + } + + if (unsquashedOldIdentity.cache !== unsquashedIdentity.cache) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET CACHE ${unsquashedIdentity.cache};`, + ); + } + + if (unsquashedOldIdentity.cycle !== unsquashedIdentity.cycle) { + statementsToReturn.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET ${ + unsquashedIdentity.cycle ? `CYCLE` : 'NO CYCLE' + };`, + ); + } + + return statementsToReturn; + } +} + +class PgAlterTableAddUniqueConstraintConvertor extends Convertor { + can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_unique_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonCreateUniqueConstraint): string { + const unsquashed = PgSquasher.unsquashUnique(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" UNIQUE${ + unsquashed.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' + }("${unsquashed.columns.join('","')}");`; + } +} + +class PgAlterTableDropUniqueConstraintConvertor extends Convertor { + can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_unique_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonDeleteUniqueConstraint): string { + const unsquashed = PgSquasher.unsquashUnique(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${unsquashed.name}";`; + } +} + +class PgAlterTableAddCheckConstraintConvertor extends Convertor { + can(statement: JsonCreateCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'create_check_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonCreateCheckConstraint): string { + const unsquashed = PgSquasher.unsquashCheck(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" CHECK (${unsquashed.value});`; + } +} + +class PgAlterTableDeleteCheckConstraintConvertor extends Convertor { + can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { + return ( + statement.type === 'delete_check_constraint' && dialect === 'postgresql' + ); + } + convert(statement: JsonDeleteCheckConstraint): string { + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + } +} + +class CreatePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonCreateSequenceStatement) { + const { name, values, schema } = st; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `CREATE SEQUENCE ${sequenceWithSchema}${values.increment ? ` INCREMENT BY ${values.increment}` : ''}${ + values.minValue ? ` MINVALUE ${values.minValue}` : '' + }${values.maxValue ? ` MAXVALUE ${values.maxValue}` : ''}${ + values.startWith ? ` START WITH ${values.startWith}` : '' + }${values.cache ? ` CACHE ${values.cache}` : ''}${values.cycle ? ` CYCLE` : ''};`; + } +} + +class DropPgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonDropSequenceStatement) { + const { name, schema } = st; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `DROP SEQUENCE ${sequenceWithSchema};`; + } +} + +class RenamePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonRenameSequenceStatement) { + const { nameFrom, nameTo, schema } = st; + + const sequenceWithSchemaFrom = schema + ? `"${schema}"."${nameFrom}"` + : `"${nameFrom}"`; + // const sequenceWithSchemaTo = schema + // ? `"${schema}"."${nameTo}"` + // : `"${nameTo}"`; + + return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${nameTo}";`; + } +} + +class MovePgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'move_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonMoveSequenceStatement) { + const { schemaFrom, schemaTo, name } = st; + + const sequenceWithSchema = schemaFrom + ? `"${schemaFrom}"."${name}"` + : `"${name}"`; + + const seqSchemaTo = schemaTo ? `"${schemaTo}"` : `public`; + + return `ALTER SEQUENCE ${sequenceWithSchema} SET SCHEMA ${seqSchemaTo};`; + } +} + +class AlterPgSequenceConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_sequence' && dialect === 'postgresql'; + } + + convert(st: JsonAlterSequenceStatement) { + const { name, schema, values } = st; + + const { increment, minValue, maxValue, startWith, cache, cycle } = values; + + const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER SEQUENCE ${sequenceWithSchema}${increment ? ` INCREMENT BY ${increment}` : ''}${ + minValue ? ` MINVALUE ${minValue}` : '' + }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ + cache ? ` CACHE ${cache}` : '' + }${cycle ? ` CYCLE` : ''};`; + } +} + +class CreateTypeEnumConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'create_type_enum'; + } + + convert(st: JsonCreateEnumStatement) { + const { name, values, schema } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + let valuesStatement = '('; + valuesStatement += values.map((it) => `'${escapeSingleQuotes(it)}'`).join(', '); + valuesStatement += ')'; + + // let statement = 'DO $$ BEGIN'; + // statement += '\n'; + let statement = `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`; + // statement += '\n'; + // statement += 'EXCEPTION'; + // statement += '\n'; + // statement += ' WHEN duplicate_object THEN null;'; + // statement += '\n'; + // statement += 'END $$;'; + // statement += '\n'; + return statement; + } +} + +class DropTypeEnumConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'drop_type_enum'; + } + + convert(st: JsonDropEnumStatement) { + const { name, schema } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + let statement = `DROP TYPE ${enumNameWithSchema};`; + + return statement; + } +} + +class AlterTypeAddValueConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'alter_type_add_value'; + } + + convert(st: JsonAddValueToEnumStatement) { + const { name, schema, value, before } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `ALTER TYPE ${enumNameWithSchema} ADD VALUE '${value}'${before.length ? ` BEFORE '${before}'` : ''};`; + } +} + +class AlterTypeSetSchemaConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'move_type_enum'; + } + + convert(st: JsonMoveEnumStatement) { + const { name, schemaFrom, schemaTo } = st; + + const enumNameWithSchema = schemaFrom ? `"${schemaFrom}"."${name}"` : `"${name}"`; + + return `ALTER TYPE ${enumNameWithSchema} SET SCHEMA "${schemaTo}";`; + } +} + +class AlterRenameTypeConvertor extends Convertor { + can(statement: JsonStatement): boolean { + return statement.type === 'rename_type_enum'; + } + + convert(st: JsonRenameEnumStatement) { + const { nameTo, nameFrom, schema } = st; + + const enumNameWithSchema = schema ? `"${schema}"."${nameFrom}"` : `"${nameFrom}"`; + + return `ALTER TYPE ${enumNameWithSchema} RENAME TO "${nameTo}";`; + } +} + +class AlterTypeDropValueConvertor extends Convertor { + can(statement: JsonDropValueFromEnumStatement): boolean { + return statement.type === 'alter_type_drop_value'; + } + + convert(st: JsonDropValueFromEnumStatement) { + const { columnsWithEnum, name, newValues, enumSchema } = st; + + const statements: string[] = []; + + for (const withEnum of columnsWithEnum) { + const tableNameWithSchema = withEnum.tableSchema + ? `"${withEnum.tableSchema}"."${withEnum.table}"` + : `"${withEnum.table}"`; + + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DATA TYPE text;`, + ); + if (withEnum.default) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DEFAULT ${withEnum.default}::text;`, + ); + } + } + + statements.push(new DropTypeEnumConvertor().convert({ name: name, schema: enumSchema, type: 'drop_type_enum' })); + + statements.push(new CreateTypeEnumConvertor().convert({ + name: name, + schema: enumSchema, + values: newValues, + type: 'create_type_enum', + })); + + for (const withEnum of columnsWithEnum) { + const tableNameWithSchema = withEnum.tableSchema + ? `"${withEnum.tableSchema}"."${withEnum.table}"` + : `"${withEnum.table}"`; + + const parsedType = parseType(`"${enumSchema}".`, withEnum.columnType); + if (withEnum.default) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DEFAULT ${withEnum.default}::${parsedType};`, + ); + } + + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DATA TYPE ${parsedType} USING "${withEnum.column}"::${parsedType};`, + ); + } + + return statements; + } +} + +class PgDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'postgresql'; + } + + convert(statement: JsonDropTableStatement, _d: any, action?: string) { + const { tableName, schema, policies } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const dropPolicyConvertor = new PgDropPolicyConvertor(); + const droppedPolicies = policies?.map((p) => { + return dropPolicyConvertor.convert({ + type: 'drop_policy', + tableName, + data: action === 'push' + ? PgSquasher.unsquashPolicyPush(p) + : PgSquasher.unsquashPolicy(p), + schema, + }) as string; + }) ?? []; + + return [ + ...droppedPolicies, + `DROP TABLE ${tableNameWithSchema} CASCADE;`, + ]; + } +} + +class MySQLDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'mysql'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +export class SingleStoreDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && dialect === 'singlestore'; + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +export class SQLiteDropTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_table' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(statement: JsonDropTableStatement) { + const { tableName } = statement; + return `DROP TABLE \`${tableName}\`;`; + } +} + +class PgRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'postgresql'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo, fromSchema } = statement; + const from = fromSchema + ? `"${fromSchema}"."${tableNameFrom}"` + : `"${tableNameFrom}"`; + const to = `"${tableNameTo}"`; + return `ALTER TABLE ${from} RENAME TO ${to};`; + } +} + +export class SqliteRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; + } +} + +class MySqlRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'mysql'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; + } +} + +export class SingleStoreRenameTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_table' && dialect === 'singlestore'; + } + + convert(statement: JsonRenameTableStatement) { + const { tableNameFrom, tableNameTo } = statement; + return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; + } +} + +class PgAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`; + } +} + +class MySqlAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'mysql' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + } +} + +class SingleStoreAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && dialect === 'singlestore' + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` CHANGE \`${oldColumnName}\` \`${newColumnName}\`;`; + } +} + +class SQLiteAlterTableRenameColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_rename_column' && (dialect === 'sqlite' || dialect === 'turso') + ); + } + + convert(statement: JsonRenameColumnStatement) { + const { tableName, oldColumnName, newColumnName } = statement; + return `ALTER TABLE \`${tableName}\` RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`; + } +} + +class PgAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_drop_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP COLUMN "${columnName}";`; + } +} + +class MySqlAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'mysql'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class SingleStoreAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && dialect === 'singlestore'; + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class SQLiteAlterTableDropColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_table_drop_column' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(statement: JsonDropColumnStatement) { + const { tableName, columnName } = statement; + return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; + } +} + +class PgAlterTableAddColumnConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_add_column' && dialect === 'postgresql' + ); + } + + convert(statement: JsonAddColumnStatement) { + const { tableName, column, schema } = statement; + const { name, notNull, generated, primaryKey, identity } = column; + + const primaryKeyStatement = primaryKey ? ' PRIMARY KEY' : ''; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; + + const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' + ? `"${column.typeSchema}".` + : ''; + + const fixedType = parseType(schemaPrefix, column.type); + + const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; + + const unsquashedIdentity = identity + ? PgSquasher.unsquashIdentity(identity) + : undefined; + + const identityWithSchema = schema + ? `"${schema}"."${unsquashedIdentity?.name}"` + : `"${unsquashedIdentity?.name}"`; + + const identityStatement = unsquashedIdentity + ? ` GENERATED ${ + unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' + } AS IDENTITY (sequence name ${identityWithSchema}${ + unsquashedIdentity.increment + ? ` INCREMENT BY ${unsquashedIdentity.increment}` + : '' + }${ + unsquashedIdentity.minValue + ? ` MINVALUE ${unsquashedIdentity.minValue}` + : '' + }${ + unsquashedIdentity.maxValue + ? ` MAXVALUE ${unsquashedIdentity.maxValue}` + : '' + }${ + unsquashedIdentity.startWith + ? ` START WITH ${unsquashedIdentity.startWith}` + : '' + }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ + unsquashedIdentity.cycle ? ` CYCLE` : '' + })` + : ''; + + const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; + + return `ALTER TABLE ${tableNameWithSchema} ADD COLUMN "${name}" ${fixedType}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${identityStatement};`; + } +} + +class PgAlterTableAlterColumnSetTypeConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'pg_alter_table_alter_column_set_type' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnPgTypeStatement) { + const { tableName, columnName, newDataType, schema, oldDataType, columnDefault, typeSchema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const statements: string[] = []; + + const type = parseType(`"${typeSchema}".`, newDataType.name); + + if (!oldDataType.isEnum && !newDataType.isEnum) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type};`, + ); + if (columnDefault) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, + ); + } + } + + if (oldDataType.isEnum && !newDataType.isEnum) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type};`, + ); + if (columnDefault) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, + ); + } + } + + if (!oldDataType.isEnum && newDataType.isEnum) { + if (columnDefault) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault}::${type};`, + ); + } + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type} USING "${columnName}"::${type};`, + ); + } + + if (oldDataType.isEnum && newDataType.isEnum) { + const alterType = + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type} USING "${columnName}"::text::${type};`; + + if (newDataType.name !== oldDataType.name && columnDefault) { + statements.push( + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP DEFAULT;`, + alterType, + `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, + ); + } else { + statements.push(alterType); + } + } + + return statements; + } +} + +class PgAlterTableAlterColumnSetDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_default' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetDefaultStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${statement.newDefaultValue};`; + } +} + +class PgAlterTableAlterColumnDropDefaultConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_default' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropDefaultStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP DEFAULT;`; + } +} + +class PgAlterTableAlterColumnDropGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropGeneratedStatement) { + const { tableName, columnName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP EXPRESSION;`; + } +} + +class PgAlterTableAlterColumnSetExpressionConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, + addColumnStatement, + ]; + } +} + +class PgAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_alter_generated' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnAlterGeneratedStatement) { + const { + tableName, + columnName, + schema, + columnNotNull: notNull, + columnDefault, + columnOnUpdate, + columnAutoIncrement, + columnPk, + columnGenerated, + } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ + schema, + tableName, + column: { + name: columnName, + type: statement.newDataType, + notNull, + default: columnDefault, + onUpdate: columnOnUpdate, + autoincrement: columnAutoIncrement, + primaryKey: columnPk, + generated: columnGenerated, + }, + type: 'alter_table_add_column', + }); + + return [ + `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, + addColumnStatement, + ]; + } +} + +//// + +class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateCompositePK) { + const { columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${ + columns.join('","') + }");`; + } +} +class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonDeleteCompositePK) { + // const { name, columns } = PgSquasher.unsquashPK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; + } +} + +class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_composite_pk' && dialect === 'postgresql'; + } + + convert(statement: JsonAlterCompositePK) { + // const { name, columns } = PgSquasher.unsquashPK(statement.old); + const { columns: newColumns } = PgSquasher.unsquashPK( + statement.new, + ); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${ + newColumns.join('","') + }");`; + } +} + +class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_pk' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetPrimaryKeyStatement) { + const { columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ADD PRIMARY KEY ("${columnName}");`; + } +} + +class PgAlterTableAlterColumnDropPrimaryKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_pk' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropPrimaryKeyStatement) { + const { tableName, schema } = statement; + return `/* + Unfortunately in current drizzle-kit version we can't automatically get name for primary key. + We are working on making it available! + + Meanwhile you can: + 1. Check pk name in your database, by running + SELECT constraint_name FROM information_schema.table_constraints + WHERE table_schema = '${typeof schema === 'undefined' || schema === '' ? 'public' : schema}' + AND table_name = '${tableName}' + AND constraint_type = 'PRIMARY KEY'; + 2. Uncomment code below and paste pk name manually + + Hope to release this update as soon as possible +*/ + +-- ALTER TABLE "${tableName}" DROP CONSTRAINT "";`; + } +} + +class PgAlterTableAlterColumnSetNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_set_notnull' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnSetNotNullStatement) { + const { columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET NOT NULL;`; + } +} + +class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_alter_column_drop_notnull' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterColumnDropNotNullStatement) { + const { columnName } = statement; + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP NOT NULL;`; + } +} + +// FK +class PgCreateForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateReferenceStatement): string { + const { + name, + tableFrom, + tableTo, + columnsFrom, + columnsTo, + onDelete, + onUpdate, + schemaTo, + } = PgSquasher.unsquashFK(statement.data); + const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; + const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; + const fromColumnsString = columnsFrom.map((it) => `"${it}"`).join(','); + const toColumnsString = columnsTo.map((it) => `"${it}"`).join(','); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${tableFrom}"` + : `"${tableFrom}"`; + + const tableToNameWithSchema = schemaTo + ? `"${schemaTo}"."${tableTo}"` + : `"${tableTo}"`; + + const alterStatement = + `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; + + return alterStatement; + } +} + +class PgAlterForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'alter_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonAlterReferenceStatement): string { + const newFk = PgSquasher.unsquashFK(statement.data); + const oldFk = PgSquasher.unsquashFK(statement.oldFkey); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${oldFk.tableFrom}"` + : `"${oldFk.tableFrom}"`; + + let sql = `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${oldFk.name}";\n`; + + const onDeleteStatement = newFk.onDelete + ? ` ON DELETE ${newFk.onDelete}` + : ''; + const onUpdateStatement = newFk.onUpdate + ? ` ON UPDATE ${newFk.onUpdate}` + : ''; + + const fromColumnsString = newFk.columnsFrom + .map((it) => `"${it}"`) + .join(','); + const toColumnsString = newFk.columnsTo.map((it) => `"${it}"`).join(','); + + const tableFromNameWithSchema = oldFk.schemaTo + ? `"${oldFk.schemaTo}"."${oldFk.tableFrom}"` + : `"${oldFk.tableFrom}"`; + + const tableToNameWithSchema = newFk.schemaTo + ? `"${newFk.schemaTo}"."${newFk.tableFrom}"` + : `"${newFk.tableFrom}"`; + + const alterStatement = + `ALTER TABLE ${tableFromNameWithSchema} ADD CONSTRAINT "${newFk.name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; + + sql += alterStatement; + return sql; + } +} + +class PgDeleteForeignKeyConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'delete_reference' && dialect === 'postgresql'; + } + + convert(statement: JsonDeleteReferenceStatement): string { + const tableFrom = statement.tableName; // delete fk from renamed table case + const { name } = PgSquasher.unsquashFK(statement.data); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${tableFrom}"` + : `"${tableFrom}"`; + + return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${name}";\n`; + } +} + +class CreatePgIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_index_pg' && dialect === 'postgresql'; + } + + convert(statement: JsonPgCreateIndexStatement): string { + const { + name, + columns, + isUnique, + concurrently, + with: withMap, + method, + where, + } = statement.data; + // // since postgresql 9.5 + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const value = columns + .map( + (it) => + `${it.isExpression ? it.expression : `"${it.expression}"`}${ + it.opclass ? ` ${it.opclass}` : it.asc ? '' : ' DESC' + }${ + (it.asc && it.nulls && it.nulls === 'last') || it.opclass + ? '' + : ` NULLS ${it.nulls!.toUpperCase()}` + }`, + ) + .join(','); + + const tableNameWithSchema = statement.schema + ? `"${statement.schema}"."${statement.tableName}"` + : `"${statement.tableName}"`; + + function reverseLogic(mappedWith: Record): string { + let reversedString = ''; + for (const key in mappedWith) { + if (mappedWith.hasOwnProperty(key)) { + reversedString += `${key}=${mappedWith[key]},`; + } + } + reversedString = reversedString.slice(0, -1); + return reversedString; + } + + return `CREATE ${indexPart}${ + concurrently ? ' CONCURRENTLY' : '' + } "${name}" ON ${tableNameWithSchema} USING ${method} (${value})${ + Object.keys(withMap!).length !== 0 + ? ` WITH (${reverseLogic(withMap!)})` + : '' + }${where ? ` WHERE ${where}` : ''};`; + } +} + +class PgDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && dialect === 'postgresql'; + } + + convert(statement: JsonDropIndexStatement): string { + const { schema } = statement; + const { name } = PgSquasher.unsquashIdx(statement.data); + + const indexNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; + + return `DROP INDEX ${indexNameWithSchema};`; + } +} + +class PgCreateSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'create_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateSchema) { + const { name } = statement; + return `CREATE SCHEMA "${name}";\n`; + } +} + +class PgRenameSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'rename_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonRenameSchema) { + const { from, to } = statement; + return `ALTER SCHEMA "${from}" RENAME TO "${to}";\n`; + } +} + +class PgDropSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_schema' && dialect === 'postgresql'; + } + + convert(statement: JsonCreateSchema) { + const { name } = statement; + return `DROP SCHEMA "${name}";\n`; + } +} + +class PgAlterTableSetSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_set_schema' && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableSetSchema) { + const { tableName, schemaFrom, schemaTo } = statement; + + return `ALTER TABLE "${schemaFrom}"."${tableName}" SET SCHEMA "${schemaTo}";\n`; + } +} + +class PgAlterTableSetNewSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_set_new_schema' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableSetNewSchema) { + const { tableName, to, from } = statement; + + const tableNameWithSchema = from + ? `"${from}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA "${to}";\n`; + } +} + +class PgAlterTableRemoveFromSchemaConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'alter_table_remove_from_schema' + && dialect === 'postgresql' + ); + } + + convert(statement: JsonAlterTableRemoveFromSchema) { + const { tableName, schema } = statement; + + const tableNameWithSchema = schema + ? `"${schema}"."${tableName}"` + : `"${tableName}"`; + + return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA public;\n`; + } +} + +export class SqliteDropIndexConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return statement.type === 'drop_index' && (dialect === 'sqlite' || dialect === 'turso'); + } + + convert(statement: JsonDropIndexStatement): string { + const { name } = PgSquasher.unsquashIdx(statement.data); + return `DROP INDEX \`${name}\`;`; + } +} + +const convertors: Convertor[] = []; +convertors.push(new PgCreateTableConvertor()); + +convertors.push(new PgCreateViewConvertor()); +convertors.push(new PgDropViewConvertor()); +convertors.push(new PgRenameViewConvertor()); +convertors.push(new PgAlterViewSchemaConvertor()); +convertors.push(new PgAlterViewAddWithOptionConvertor()); +convertors.push(new PgAlterViewDropWithOptionConvertor()); +convertors.push(new PgAlterViewAlterTablespaceConvertor()); +convertors.push(new PgAlterViewAlterUsingConvertor()); + +convertors.push(new CreateTypeEnumConvertor()); +convertors.push(new DropTypeEnumConvertor()); +convertors.push(new AlterTypeAddValueConvertor()); +convertors.push(new AlterTypeSetSchemaConvertor()); +convertors.push(new AlterRenameTypeConvertor()); +convertors.push(new AlterTypeDropValueConvertor()); + +convertors.push(new CreatePgSequenceConvertor()); +convertors.push(new DropPgSequenceConvertor()); +convertors.push(new RenamePgSequenceConvertor()); +convertors.push(new MovePgSequenceConvertor()); +convertors.push(new AlterPgSequenceConvertor()); + +convertors.push(new PgDropTableConvertor()); +convertors.push(new MySQLDropTableConvertor()); +convertors.push(new SingleStoreDropTableConvertor()); +convertors.push(new SQLiteDropTableConvertor()); + +convertors.push(new PgRenameTableConvertor()); +convertors.push(new MySqlRenameTableConvertor()); +convertors.push(new SingleStoreRenameTableConvertor()); +convertors.push(new SqliteRenameTableConvertor()); + +convertors.push(new PgAlterTableRenameColumnConvertor()); +convertors.push(new MySqlAlterTableRenameColumnConvertor()); +convertors.push(new SingleStoreAlterTableRenameColumnConvertor()); +convertors.push(new SQLiteAlterTableRenameColumnConvertor()); + +convertors.push(new PgAlterTableDropColumnConvertor()); +convertors.push(new MySqlAlterTableDropColumnConvertor()); +convertors.push(new SingleStoreAlterTableDropColumnConvertor()); +convertors.push(new SQLiteAlterTableDropColumnConvertor()); + +convertors.push(new PgAlterTableAddColumnConvertor()); + +convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); + +convertors.push(new PgAlterTableAddUniqueConstraintConvertor()); +convertors.push(new PgAlterTableDropUniqueConstraintConvertor()); + +convertors.push(new PgAlterTableAddCheckConstraintConvertor()); +convertors.push(new PgAlterTableDeleteCheckConstraintConvertor()); + +convertors.push(new CreatePgIndexConvertor()); + +convertors.push(new PgDropIndexConvertor()); +convertors.push(new SqliteDropIndexConvertor()); + +convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterColumnSetNotNullConvertor()); +convertors.push(new PgAlterTableAlterColumnDropNotNullConvertor()); +convertors.push(new PgAlterTableAlterColumnSetDefaultConvertor()); +convertors.push(new PgAlterTableAlterColumnDropDefaultConvertor()); + +convertors.push(new PgAlterPolicyConvertor()); +convertors.push(new PgCreatePolicyConvertor()); +convertors.push(new PgDropPolicyConvertor()); +convertors.push(new PgRenamePolicyConvertor()); + +convertors.push(new PgAlterIndPolicyConvertor()); +convertors.push(new PgCreateIndPolicyConvertor()); +convertors.push(new PgDropIndPolicyConvertor()); +convertors.push(new PgRenameIndPolicyConvertor()); + +convertors.push(new PgEnableRlsConvertor()); +convertors.push(new PgDisableRlsConvertor()); + +convertors.push(new PgDropRoleConvertor()); +convertors.push(new PgAlterRoleConvertor()); +convertors.push(new PgCreateRoleConvertor()); +convertors.push(new PgRenameRoleConvertor()); + +/// generated +convertors.push(new PgAlterTableAlterColumnSetExpressionConvertor()); +convertors.push(new PgAlterTableAlterColumnDropGeneratedConvertor()); +convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor()); + +// convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor()); +// convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor()); + +convertors.push(new PgCreateForeignKeyConvertor()); + +convertors.push(new PgAlterForeignKeyConvertor()); + +convertors.push(new PgDeleteForeignKeyConvertor()); + +convertors.push(new PgCreateSchemaConvertor()); +convertors.push(new PgRenameSchemaConvertor()); +convertors.push(new PgDropSchemaConvertor()); +convertors.push(new PgAlterTableSetSchemaConvertor()); +convertors.push(new PgAlterTableSetNewSchemaConvertor()); +convertors.push(new PgAlterTableRemoveFromSchemaConvertor()); + +convertors.push(new PgAlterTableAlterColumnDropGenerated()); +convertors.push(new PgAlterTableAlterColumnSetGenerated()); +convertors.push(new PgAlterTableAlterColumnAlterGenerated()); + +convertors.push(new PgAlterTableCreateCompositePrimaryKeyConvertor()); +convertors.push(new PgAlterTableDeleteCompositePrimaryKeyConvertor()); +convertors.push(new PgAlterTableAlterCompositePrimaryKeyConvertor()); + +export function fromJson( + statements: JsonStatement[], + dialect: Dialect, + action?: 'push', +) { + const result = statements + .flatMap((statement) => { + const filtered = convertors.filter((it) => { + return it.can(statement, dialect); + }); + + const convertor = filtered.length === 1 ? filtered[0] : undefined; + + if (!convertor) throw new Error(`Unexpected json statement: ${statement.type} ${dialect}`); + + return convertor.convert(statement, action); + }) + .filter((it) => it !== ''); + return result; +} diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/legacy/sqlgenerator2.ts similarity index 73% rename from drizzle-kit/src/sqlgenerator.ts rename to drizzle-kit/src/legacy/sqlgenerator2.ts index 64d3c4063c..15d7d2618a 100644 --- a/drizzle-kit/src/sqlgenerator.ts +++ b/drizzle-kit/src/legacy/sqlgenerator2.ts @@ -1,5 +1,4 @@ -import { BREAKPOINT } from './cli/commands/migrate'; -import { +import type { JsonAddColumnStatement, JsonAddValueToEnumStatement, JsonAlterColumnAlterGeneratedStatement, @@ -22,7 +21,6 @@ import { JsonAlterColumnTypeStatement, JsonAlterCompositePK, JsonAlterIndPolicyStatement, - JsonAlterMySqlViewStatement, JsonAlterPolicyStatement, JsonAlterReferenceStatement, JsonAlterRoleStatement, @@ -47,7 +45,6 @@ import { JsonCreateRoleStatement, JsonCreateSchema, JsonCreateSequenceStatement, - JsonCreateSqliteViewStatement, JsonCreateTableStatement, JsonCreateUniqueConstraint, JsonDeleteCheckConstraint, @@ -70,8 +67,6 @@ import { JsonMoveEnumStatement, JsonMoveSequenceStatement, JsonPgCreateIndexStatement, - JsonRecreateSingleStoreTableStatement, - JsonRecreateTableStatement, JsonRenameColumnStatement, JsonRenameEnumStatement, JsonRenamePolicyStatement, @@ -80,15 +75,12 @@ import { JsonRenameSequenceStatement, JsonRenameTableStatement, JsonRenameViewStatement, - JsonSqliteAddColumnStatement, - JsonSqliteCreateTableStatement, JsonStatement, } from './jsonStatements'; -import { Dialect } from './schemaValidator'; -import { MySqlSquasher } from './serializer/mysqlSchema'; -import { PgSquasher, policy } from './serializer/pgSchema'; -import { SingleStoreSquasher } from './serializer/singlestoreSchema'; -import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; +import { MySqlSquasher } from './mysql-v5/mysqlSchema'; +import { PgSquasher } from './postgres-v7/pgSchema'; +import type { Dialect } from './schemaValidator'; +import { BREAKPOINT } from './sqlgenerator'; import { escapeSingleQuotes } from './utils'; @@ -155,7 +147,6 @@ abstract class Convertor { ): boolean; abstract convert( statement: JsonStatement, - json2?: SQLiteSchemaSquashed, action?: 'push', ): string | string[]; } @@ -455,7 +446,7 @@ class PgCreateTableConvertor extends Convertor { if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { statement += ',\n'; const compositePK = PgSquasher.unsquashPK(compositePKs[0]); - statement += `\tCONSTRAINT "${st.compositePkName}" PRIMARY KEY(\"${compositePK.columns.join(`","`)}\")`; + statement += `\tCONSTRAINT "${st.compositePkName}" PRIMARY KEY("${compositePK.columns.join(`","`)}")`; // statement += `\n`; } @@ -468,7 +459,7 @@ class PgCreateTableConvertor extends Convertor { const unsquashedUnique = PgSquasher.unsquashUnique(uniqueConstraint); statement += `\tCONSTRAINT "${unsquashedUnique.name}" UNIQUE${ unsquashedUnique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' - }(\"${unsquashedUnique.columns.join(`","`)}\")`; + }("${unsquashedUnique.columns.join(`","`)}")`; // statement += `\n`; } } @@ -503,11 +494,9 @@ class MySqlCreateTableConvertor extends Convertor { const { tableName, columns, - schema, checkConstraints, compositePKs, uniqueConstraints, - internals, } = st; let statement = ''; @@ -550,16 +539,7 @@ class MySqlCreateTableConvertor extends Convertor { statement += ',\n'; const unsquashedUnique = MySqlSquasher.unsquashUnique(uniqueConstraint); - const uniqueString = unsquashedUnique.columns - .map((it) => { - return internals?.indexes - ? internals?.indexes[unsquashedUnique.name]?.columns[it] - ?.isExpression - ? it - : `\`${it}\`` - : `\`${it}\``; - }) - .join(','); + const uniqueString = unsquashedUnique.columns.join(','); statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; } @@ -579,176 +559,6 @@ class MySqlCreateTableConvertor extends Convertor { return statement; } } -export class SingleStoreCreateTableConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_table' && dialect === 'singlestore'; - } - - convert(st: JsonCreateTableStatement) { - const { - tableName, - columns, - schema, - compositePKs, - uniqueConstraints, - internals, - } = st; - - let statement = ''; - statement += `CREATE TABLE \`${tableName}\` (\n`; - for (let i = 0; i < columns.length; i++) { - const column = columns[i]; - - const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; - const notNullStatement = column.notNull ? ' NOT NULL' : ''; - const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; - - const onUpdateStatement = column.onUpdate - ? ` ON UPDATE CURRENT_TIMESTAMP` - : ''; - - const autoincrementStatement = column.autoincrement - ? ' AUTO_INCREMENT' - : ''; - - const generatedStatement = column.generated - ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` - : ''; - - statement += '\t' - + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}${generatedStatement}`; - statement += i === columns.length - 1 ? '' : ',\n'; - } - - if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { - statement += ',\n'; - const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]); - statement += `\tCONSTRAINT \`${compositePK.name}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; - } - - if ( - typeof uniqueConstraints !== 'undefined' - && uniqueConstraints.length > 0 - ) { - for (const uniqueConstraint of uniqueConstraints) { - statement += ',\n'; - const unsquashedUnique = SingleStoreSquasher.unsquashUnique(uniqueConstraint); - - const uniqueString = unsquashedUnique.columns - .map((it) => { - return internals?.indexes - ? internals?.indexes[unsquashedUnique.name]?.columns[it] - ?.isExpression - ? it - : `\`${it}\`` - : `\`${it}\``; - }) - .join(','); - - statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; - } - } - - statement += `\n);`; - statement += `\n`; - return statement; - } -} - -export class SQLiteCreateTableConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'sqlite_create_table' && (dialect === 'sqlite' || dialect === 'turso'); - } - - convert(st: JsonSqliteCreateTableStatement) { - const { - tableName, - columns, - referenceData, - compositePKs, - uniqueConstraints, - checkConstraints, - } = st; - - let statement = ''; - statement += `CREATE TABLE \`${tableName}\` (\n`; - for (let i = 0; i < columns.length; i++) { - const column = columns[i]; - - const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; - const notNullStatement = column.notNull ? ' NOT NULL' : ''; - const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; - - const autoincrementStatement = column.autoincrement - ? ' AUTOINCREMENT' - : ''; - - const generatedStatement = column.generated - ? ` GENERATED ALWAYS AS ${column.generated.as} ${column.generated.type.toUpperCase()}` - : ''; - - statement += '\t'; - statement += - `\`${column.name}\` ${column.type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}`; - - statement += i === columns.length - 1 ? '' : ',\n'; - } - - compositePKs.forEach((it) => { - statement += ',\n\t'; - statement += `PRIMARY KEY(${it.map((it) => `\`${it}\``).join(', ')})`; - }); - - for (let i = 0; i < referenceData.length; i++) { - const { - name, - tableFrom, - tableTo, - columnsFrom, - columnsTo, - onDelete, - onUpdate, - } = referenceData[i]; - - const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; - const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; - const fromColumnsString = columnsFrom.map((it) => `\`${it}\``).join(','); - const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); - - statement += ','; - statement += '\n\t'; - statement += - `FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onUpdateStatement}${onDeleteStatement}`; - } - - if ( - typeof uniqueConstraints !== 'undefined' - && uniqueConstraints.length > 0 - ) { - for (const uniqueConstraint of uniqueConstraints) { - statement += ',\n'; - const unsquashedUnique = SQLiteSquasher.unsquashUnique(uniqueConstraint); - statement += `\tCONSTRAINT ${unsquashedUnique.name} UNIQUE(\`${unsquashedUnique.columns.join(`\`,\``)}\`)`; - } - } - - if ( - typeof checkConstraints !== 'undefined' - && checkConstraints.length > 0 - ) { - for (const check of checkConstraints) { - statement += ',\n'; - const { value, name } = SQLiteSquasher.unsquashCheck(check); - statement += `\tCONSTRAINT "${name}" CHECK(${value})`; - } - } - - statement += `\n`; - statement += `);`; - statement += `\n`; - return statement; - } -} class PgCreateViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { @@ -812,18 +622,6 @@ class MySqlCreateViewConvertor extends Convertor { } } -class SqliteCreateViewConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'sqlite_create_view' && (dialect === 'sqlite' || dialect === 'turso'); - } - - convert(st: JsonCreateSqliteViewStatement) { - const { definition, name } = st; - - return `CREATE VIEW \`${name}\` AS ${definition};`; - } -} - class PgDropViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_view' && dialect === 'postgresql'; @@ -862,26 +660,6 @@ class SqliteDropViewConvertor extends Convertor { } } -class MySqlAlterViewConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'alter_mysql_view' && dialect === 'mysql'; - } - - convert(st: JsonAlterMySqlViewStatement) { - const { name, algorithm, definition, sqlSecurity, withCheckOption } = st; - - let statement = `ALTER `; - statement += algorithm ? `ALGORITHM = ${algorithm}\n` : ''; - statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity}\n` : ''; - statement += `VIEW \`${name}\` AS ${definition}`; - statement += withCheckOption ? `\nWITH ${withCheckOption} CHECK OPTION` : ''; - - statement += ';'; - - return statement; - } -} - class PgRenameViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_view' && dialect === 'postgresql'; @@ -960,7 +738,7 @@ class PgAlterViewDropWithOptionConvertor extends Convertor { const options: string[] = []; - Object.entries(withOptions).forEach(([key, value]) => { + Object.entries(withOptions).forEach(([key]) => { options.push(`${key.snake_case()}`); }); @@ -1247,29 +1025,6 @@ class MySqlAlterTableAddCheckConstraintConvertor extends Convertor { } } -class SingleStoreAlterTableAddUniqueConstraintConvertor extends Convertor { - can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { - return statement.type === 'create_unique_constraint' && dialect === 'singlestore'; - } - convert(statement: JsonCreateUniqueConstraint): string { - const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); - - return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ - unsquashed.columns.join('`,`') - }\`);`; - } -} -class SingleStoreAlterTableDropUniqueConstraintConvertor extends Convertor { - can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { - return statement.type === 'delete_unique_constraint' && dialect === 'singlestore'; - } - convert(statement: JsonDeleteUniqueConstraint): string { - const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); - - return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; - } -} - class MySqlAlterTableDeleteCheckConstraintConvertor extends Convertor { can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { return ( @@ -1326,9 +1081,9 @@ class RenamePgSequenceConvertor extends Convertor { const sequenceWithSchemaFrom = schema ? `"${schema}"."${nameFrom}"` : `"${nameFrom}"`; - const sequenceWithSchemaTo = schema - ? `"${schema}"."${nameTo}"` - : `"${nameTo}"`; + // const sequenceWithSchemaTo = schema + // ? `"${schema}"."${nameTo}"` + // : `"${nameTo}"`; return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${nameTo}";`; } @@ -1584,7 +1339,7 @@ class PgRenameTableConvertor extends Convertor { } convert(statement: JsonRenameTableStatement) { - const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement; + const { tableNameFrom, tableNameTo, fromSchema } = statement; const from = fromSchema ? `"${fromSchema}"."${tableNameFrom}"` : `"${tableNameFrom}"`; @@ -1743,7 +1498,7 @@ class PgAlterTableAddColumnConvertor extends Convertor { convert(statement: JsonAddColumnStatement) { const { tableName, column, schema } = statement; - const { name, type, notNull, generated, primaryKey, identity } = column; + const { name, notNull, generated, primaryKey, identity } = column; const primaryKeyStatement = primaryKey ? ' PRIMARY KEY' : ''; @@ -1861,37 +1616,6 @@ class SingleStoreAlterTableAddColumnConvertor extends Convertor { } } -export class SQLiteAlterTableAddColumnConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'sqlite_alter_table_add_column' && (dialect === 'sqlite' || dialect === 'turso') - ); - } - - convert(statement: JsonSqliteAddColumnStatement) { - const { tableName, column, referenceData } = statement; - const { name, type, notNull, primaryKey, generated } = column; - - const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; - const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; - const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; - const referenceAsObject = referenceData - ? SQLiteSquasher.unsquashFK(referenceData) - : undefined; - const referenceStatement = `${ - referenceAsObject - ? ` REFERENCES ${referenceAsObject.tableTo}(${referenceAsObject.columnsTo})` - : '' - }`; - // const autoincrementStatement = `${autoincrement ? 'AUTO_INCREMENT' : ''}` - const generatedStatement = generated - ? ` GENERATED ALWAYS AS ${generated.as} ${generated.type.toUpperCase()}` - : ''; - - return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${referenceStatement};`; - } -} - class PgAlterTableAlterColumnSetTypeConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( @@ -2117,410 +1841,47 @@ class PgAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { } //// -class SqliteAlterTableAlterColumnDropGeneratedConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_drop_generated' - && (dialect === 'sqlite' || dialect === 'turso') - ); - } - - convert(statement: JsonAlterColumnDropGeneratedStatement) { - const { - tableName, - columnName, - schema, - columnDefault, - columnOnUpdate, - columnAutoIncrement, - columnPk, - columnGenerated, - columnNotNull, - } = statement; - - const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( - { - tableName, - column: { - name: columnName, - type: statement.newDataType, - notNull: columnNotNull, - default: columnDefault, - onUpdate: columnOnUpdate, - autoincrement: columnAutoIncrement, - primaryKey: columnPk, - generated: columnGenerated, - }, - type: 'sqlite_alter_table_add_column', - }, - ); - - const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ - tableName, - columnName, - schema, - type: 'alter_table_drop_column', - }); - return [dropColumnStatement, addColumnStatement]; - } -} +type MySqlModifyColumnStatement = + | JsonAlterColumnDropNotNullStatement + | JsonAlterColumnSetNotNullStatement + | JsonAlterColumnTypeStatement + | JsonAlterColumnDropOnUpdateStatement + | JsonAlterColumnSetOnUpdateStatement + | JsonAlterColumnDropAutoincrementStatement + | JsonAlterColumnSetAutoincrementStatement + | JsonAlterColumnSetDefaultStatement + | JsonAlterColumnDropDefaultStatement + | JsonAlterColumnSetGeneratedStatement + | JsonAlterColumnDropGeneratedStatement; -class SqliteAlterTableAlterColumnSetExpressionConvertor extends Convertor { +class MySqlModifyColumn extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( - statement.type === 'alter_table_alter_column_set_generated' - && (dialect === 'sqlite' || dialect === 'turso') + (statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_on_update' + || statement.type === 'alter_table_alter_column_set_on_update' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_default' + || statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_generated' + || statement.type === 'alter_table_alter_column_drop_generated') + && dialect === 'mysql' ); } - convert(statement: JsonAlterColumnSetGeneratedStatement) { - const { - tableName, - columnName, - schema, - columnNotNull: notNull, - columnDefault, - columnOnUpdate, - columnAutoIncrement, - columnPk, - columnGenerated, - } = statement; - - const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( - { - tableName, - column: { - name: columnName, - type: statement.newDataType, - notNull, - default: columnDefault, - onUpdate: columnOnUpdate, - autoincrement: columnAutoIncrement, - primaryKey: columnPk, - generated: columnGenerated, - }, - type: 'sqlite_alter_table_add_column', - }, - ); - - const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ - tableName, - columnName, - schema, - type: 'alter_table_drop_column', - }); - - return [dropColumnStatement, addColumnStatement]; - } -} - -class SqliteAlterTableAlterColumnAlterGeneratedConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_alter_generated' - && (dialect === 'sqlite' || dialect === 'turso') - ); - } - - convert(statement: JsonAlterColumnAlterGeneratedStatement) { - const { - tableName, - columnName, - schema, - columnNotNull, - columnDefault, - columnOnUpdate, - columnAutoIncrement, - columnPk, - columnGenerated, - } = statement; - - const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( - { - tableName, - column: { - name: columnName, - type: statement.newDataType, - notNull: columnNotNull, - default: columnDefault, - onUpdate: columnOnUpdate, - autoincrement: columnAutoIncrement, - primaryKey: columnPk, - generated: columnGenerated, - }, - type: 'sqlite_alter_table_add_column', - }, - ); - - const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ - tableName, - columnName, - schema, - type: 'alter_table_drop_column', - }); - - return [dropColumnStatement, addColumnStatement]; - } -} - -//// - -class MySqlAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_alter_generated' - && dialect === 'mysql' - ); - } - - convert(statement: JsonAlterColumnAlterGeneratedStatement) { - const { - tableName, - columnName, - schema, - columnNotNull: notNull, - columnDefault, - columnOnUpdate, - columnAutoIncrement, - columnPk, - columnGenerated, - } = statement; - - const tableNameWithSchema = schema - ? `\`${schema}\`.\`${tableName}\`` - : `\`${tableName}\``; - - const addColumnStatement = new MySqlAlterTableAddColumnConvertor().convert({ - schema, - tableName, - column: { - name: columnName, - type: statement.newDataType, - notNull, - default: columnDefault, - onUpdate: columnOnUpdate, - autoincrement: columnAutoIncrement, - primaryKey: columnPk, - generated: columnGenerated, - }, - type: 'alter_table_add_column', - }); - - return [ - `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, - addColumnStatement, - ]; - } -} - -class MySqlAlterTableAlterColumnSetDefaultConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_set_default' - && dialect === 'mysql' - ); - } - - convert(statement: JsonAlterColumnSetDefaultStatement) { - const { tableName, columnName } = statement; - return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; - } -} - -class MySqlAlterTableAlterColumnDropDefaultConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_drop_default' - && dialect === 'mysql' - ); - } - - convert(statement: JsonAlterColumnDropDefaultStatement) { - const { tableName, columnName } = statement; - return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; - } -} - -class MySqlAlterTableAddPk extends Convertor { - can(statement: JsonStatement, dialect: string): boolean { - return ( - statement.type === 'alter_table_alter_column_set_pk' - && dialect === 'mysql' - ); - } - convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { - return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; - } -} - -class MySqlAlterTableDropPk extends Convertor { - can(statement: JsonStatement, dialect: string): boolean { - return ( - statement.type === 'alter_table_alter_column_drop_pk' - && dialect === 'mysql' - ); - } - convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { - return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; - } -} - -type LibSQLModifyColumnStatement = - | JsonAlterColumnTypeStatement - | JsonAlterColumnDropNotNullStatement - | JsonAlterColumnSetNotNullStatement - | JsonAlterColumnSetDefaultStatement - | JsonAlterColumnDropDefaultStatement; - -export class LibSQLModifyColumn extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - (statement.type === 'alter_table_alter_column_set_type' - || statement.type === 'alter_table_alter_column_drop_notnull' - || statement.type === 'alter_table_alter_column_set_notnull' - || statement.type === 'alter_table_alter_column_set_default' - || statement.type === 'alter_table_alter_column_drop_default' - || statement.type === 'create_check_constraint' - || statement.type === 'delete_check_constraint') - && dialect === 'turso' - ); - } - - convert(statement: LibSQLModifyColumnStatement, json2: SQLiteSchemaSquashed) { - const { tableName, columnName } = statement; - - let columnType = ``; - let columnDefault: any = ''; - let columnNotNull = ''; - - const sqlStatements: string[] = []; - - // collect index info - const indexes: { - name: string; - tableName: string; - columns: string[]; - isUnique: boolean; - where?: string | undefined; - }[] = []; - for (const table of Object.values(json2.tables)) { - for (const index of Object.values(table.indexes)) { - const unsquashed = SQLiteSquasher.unsquashIdx(index); - sqlStatements.push(`DROP INDEX "${unsquashed.name}";`); - indexes.push({ ...unsquashed, tableName: table.name }); - } - } - - switch (statement.type) { - case 'alter_table_alter_column_set_type': - columnType = ` ${statement.newDataType}`; - - columnDefault = statement.columnDefault - ? ` DEFAULT ${statement.columnDefault}` - : ''; - - columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; - - break; - case 'alter_table_alter_column_drop_notnull': - columnType = ` ${statement.newDataType}`; - - columnDefault = statement.columnDefault - ? ` DEFAULT ${statement.columnDefault}` - : ''; - - columnNotNull = ''; - break; - case 'alter_table_alter_column_set_notnull': - columnType = ` ${statement.newDataType}`; - - columnDefault = statement.columnDefault - ? ` DEFAULT ${statement.columnDefault}` - : ''; - - columnNotNull = ` NOT NULL`; - break; - case 'alter_table_alter_column_set_default': - columnType = ` ${statement.newDataType}`; - - columnDefault = ` DEFAULT ${statement.newDefaultValue}`; - - columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; - break; - case 'alter_table_alter_column_drop_default': - columnType = ` ${statement.newDataType}`; - - columnDefault = ''; - - columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; - break; - } - - // Seems like getting value from simple json2 shanpshot makes dates be dates - columnDefault = columnDefault instanceof Date - ? columnDefault.toISOString() - : columnDefault; - - sqlStatements.push( - `ALTER TABLE \`${tableName}\` ALTER COLUMN "${columnName}" TO "${columnName}"${columnType}${columnNotNull}${columnDefault};`, - ); - - for (const index of indexes) { - const indexPart = index.isUnique ? 'UNIQUE INDEX' : 'INDEX'; - const whereStatement = index.where ? ` WHERE ${index.where}` : ''; - const uniqueString = index.columns.map((it) => `\`${it}\``).join(','); - const tableName = index.tableName; - - sqlStatements.push( - `CREATE ${indexPart} \`${index.name}\` ON \`${tableName}\` (${uniqueString})${whereStatement};`, - ); - } - - return sqlStatements; - } -} - -type MySqlModifyColumnStatement = - | JsonAlterColumnDropNotNullStatement - | JsonAlterColumnSetNotNullStatement - | JsonAlterColumnTypeStatement - | JsonAlterColumnDropOnUpdateStatement - | JsonAlterColumnSetOnUpdateStatement - | JsonAlterColumnDropAutoincrementStatement - | JsonAlterColumnSetAutoincrementStatement - | JsonAlterColumnSetDefaultStatement - | JsonAlterColumnDropDefaultStatement - | JsonAlterColumnSetGeneratedStatement - | JsonAlterColumnDropGeneratedStatement; - -class MySqlModifyColumn extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - (statement.type === 'alter_table_alter_column_set_type' - || statement.type === 'alter_table_alter_column_set_notnull' - || statement.type === 'alter_table_alter_column_drop_notnull' - || statement.type === 'alter_table_alter_column_drop_on_update' - || statement.type === 'alter_table_alter_column_set_on_update' - || statement.type === 'alter_table_alter_column_set_autoincrement' - || statement.type === 'alter_table_alter_column_drop_autoincrement' - || statement.type === 'alter_table_alter_column_set_default' - || statement.type === 'alter_table_alter_column_drop_default' - || statement.type === 'alter_table_alter_column_set_generated' - || statement.type === 'alter_table_alter_column_drop_generated') - && dialect === 'mysql' - ); - } - - convert(statement: MySqlModifyColumnStatement) { - const { tableName, columnName } = statement; - let columnType = ``; - let columnDefault: any = ''; - let columnNotNull = ''; - let columnOnUpdate = ''; - let columnAutoincrement = ''; - let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; - let columnGenerated = ''; + convert(statement: MySqlModifyColumnStatement) { + const { tableName, columnName } = statement; + let columnType = ``; + let columnDefault: any = ''; + let columnNotNull = ''; + let columnOnUpdate = ''; + let columnAutoincrement = ''; + // let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; + let columnGenerated = ''; if (statement.type === 'alter_table_alter_column_drop_notnull') { columnType = ` ${statement.newDataType}`; @@ -2764,33 +2125,33 @@ class SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor } } -class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_set_default' - && dialect === 'singlestore' - ); - } +// class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return ( +// statement.type === 'alter_table_alter_column_set_default' +// && dialect === 'singlestore' +// ); +// } - convert(statement: JsonAlterColumnSetDefaultStatement) { - const { tableName, columnName } = statement; - return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; - } -} +// convert(statement: JsonAlterColumnSetDefaultStatement) { +// const { tableName, columnName } = statement; +// return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; +// } +// } -class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_drop_default' - && dialect === 'singlestore' - ); - } +// class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return ( +// statement.type === 'alter_table_alter_column_drop_default' +// && dialect === 'singlestore' +// ); +// } - convert(statement: JsonAlterColumnDropDefaultStatement) { - const { tableName, columnName } = statement; - return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; - } -} +// convert(statement: JsonAlterColumnDropDefaultStatement) { +// const { tableName, columnName } = statement; +// return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; +// } +// } class SingleStoreAlterTableAddPk extends Convertor { can(statement: JsonStatement, dialect: string): boolean { @@ -2854,7 +2215,7 @@ class SingleStoreModifyColumn extends Convertor { let columnNotNull = ''; let columnOnUpdate = ''; let columnAutoincrement = ''; - let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; + // let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; let columnGenerated = ''; if (statement.type === 'alter_table_alter_column_drop_notnull') { @@ -3050,25 +2411,25 @@ class SingleStoreModifyColumn extends Convertor { return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnNotNull}${columnDefault}${columnOnUpdate}${columnGenerated};`; } } -class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'alter_table_alter_column_drop_default' - && dialect === 'sqlite' - ); - } - - convert(statement: JsonAlterColumnDropDefaultStatement) { - return ( - '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' - + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' - + '\n https://www.sqlite.org/lang_altertable.html' - + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' - + "\n\n Due to that we don't generate migration automatically and it has to be done manually" - + '\n*/' - ); - } -} +// class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return ( +// statement.type === 'alter_table_alter_column_drop_default' +// && dialect === 'sqlite' +// ); +// } + +// convert(statement: JsonAlterColumnDropDefaultStatement) { +// return ( +// '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' +// + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' +// + '\n https://www.sqlite.org/lang_altertable.html' +// + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' +// + "\n\n Due to that we don't generate migration automatically and it has to be done manually" +// + '\n*/' +// ); +// } +// } class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { @@ -3076,7 +2437,7 @@ class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonCreateCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.data); + const { columns } = PgSquasher.unsquashPK(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` @@ -3093,7 +2454,7 @@ class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonDeleteCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.data); + // const { name, columns } = PgSquasher.unsquashPK(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` @@ -3109,8 +2470,8 @@ class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonAlterCompositePK) { - const { name, columns } = PgSquasher.unsquashPK(statement.old); - const { name: newName, columns: newColumns } = PgSquasher.unsquashPK( + // const { name, columns } = PgSquasher.unsquashPK(statement.old); + const { columns: newColumns } = PgSquasher.unsquashPK( statement.new, ); @@ -3130,7 +2491,7 @@ class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonCreateCompositePK) { - const { name, columns } = MySqlSquasher.unsquashPK(statement.data); + const { columns } = MySqlSquasher.unsquashPK(statement.data); return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; } } @@ -3141,7 +2502,7 @@ class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonDeleteCompositePK) { - const { name, columns } = MySqlSquasher.unsquashPK(statement.data); + // const { name, columns } = MySqlSquasher.unsquashPK(statement.data); return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY;`; } } @@ -3152,96 +2513,96 @@ class MySqlAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { } convert(statement: JsonAlterCompositePK) { - const { name, columns } = MySqlSquasher.unsquashPK(statement.old); - const { name: newName, columns: newColumns } = MySqlSquasher.unsquashPK( + // const { name, columns } = MySqlSquasher.unsquashPK(statement.old); + const { columns: newColumns } = MySqlSquasher.unsquashPK( statement.new, ); return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY, ADD PRIMARY KEY(\`${newColumns.join('`,`')}\`);`; } } -class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_composite_pk' && dialect === 'sqlite'; - } - - convert(statement: JsonCreateCompositePK) { - let msg = '/*\n'; - msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`; - msg += 'SQLite does not support adding primary key to an already created table\n'; - msg += 'You can do it in 3 steps with drizzle orm:\n'; - msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; - msg += ' - migrate old data from one table to another\n'; - msg += ' - delete old_table in schema, generate sql\n\n'; - msg += 'or create manual migration like below:\n\n'; - msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; - msg += 'CREATE TABLE table_name (\n'; - msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; - msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; - msg += '\t...\n'; - msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; - msg += ' );\n'; - msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; - msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; - msg += '*/\n'; - return msg; - } -} -class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'delete_composite_pk' && dialect === 'sqlite'; - } - - convert(statement: JsonDeleteCompositePK) { - let msg = '/*\n'; - msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`; - msg += 'SQLite does not supportprimary key deletion from existing table\n'; - msg += 'You can do it in 3 steps with drizzle orm:\n'; - msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n'; - msg += ' - migrate old data from one table to another\n'; - msg += ' - delete old_table in schema, generate sql\n\n'; - msg += 'or create manual migration like below:\n\n'; - msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; - msg += 'CREATE TABLE table_name (\n'; - msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; - msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; - msg += '\t...\n'; - msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; - msg += ' );\n'; - msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; - msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; - msg += '*/\n'; - return msg; - } -} - -class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'alter_composite_pk' && dialect === 'sqlite'; - } - - convert(statement: JsonAlterCompositePK) { - let msg = '/*\n'; - msg += 'SQLite does not support altering primary key\n'; - msg += 'You can do it in 3 steps with drizzle orm:\n'; - msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; - msg += ' - migrate old data from one table to another\n'; - msg += ' - delete old_table in schema, generate sql\n\n'; - msg += 'or create manual migration like below:\n\n'; - msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; - msg += 'CREATE TABLE table_name (\n'; - msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; - msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; - msg += '\t...\n'; - msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; - msg += ' );\n'; - msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; - msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; - msg += '*/\n'; - - return msg; - } -} +// class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return statement.type === 'create_composite_pk' && dialect === 'sqlite'; +// } + +// convert(statement: JsonCreateCompositePK) { +// let msg = '/*\n'; +// msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`; +// msg += 'SQLite does not support adding primary key to an already created table\n'; +// msg += 'You can do it in 3 steps with drizzle orm:\n'; +// msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; +// msg += ' - migrate old data from one table to another\n'; +// msg += ' - delete old_table in schema, generate sql\n\n'; +// msg += 'or create manual migration like below:\n\n'; +// msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; +// msg += 'CREATE TABLE table_name (\n'; +// msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; +// msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; +// msg += '\t...\n'; +// msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; +// msg += ' );\n'; +// msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; +// msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; +// msg += '*/\n'; +// return msg; +// } +// } +// class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return statement.type === 'delete_composite_pk' && dialect === 'sqlite'; +// } + +// convert(statement: JsonDeleteCompositePK) { +// let msg = '/*\n'; +// msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`; +// msg += 'SQLite does not supportprimary key deletion from existing table\n'; +// msg += 'You can do it in 3 steps with drizzle orm:\n'; +// msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n'; +// msg += ' - migrate old data from one table to another\n'; +// msg += ' - delete old_table in schema, generate sql\n\n'; +// msg += 'or create manual migration like below:\n\n'; +// msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; +// msg += 'CREATE TABLE table_name (\n'; +// msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; +// msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; +// msg += '\t...\n'; +// msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; +// msg += ' );\n'; +// msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; +// msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; +// msg += '*/\n'; +// return msg; +// } +// } + +// class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { +// can(statement: JsonStatement, dialect: Dialect): boolean { +// return statement.type === 'alter_composite_pk' && dialect === 'sqlite'; +// } + +// convert(statement: JsonAlterCompositePK) { +// let msg = '/*\n'; +// msg += 'SQLite does not support altering primary key\n'; +// msg += 'You can do it in 3 steps with drizzle orm:\n'; +// msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; +// msg += ' - migrate old data from one table to another\n'; +// msg += ' - delete old_table in schema, generate sql\n\n'; +// msg += 'or create manual migration like below:\n\n'; +// msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; +// msg += 'CREATE TABLE table_name (\n'; +// msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; +// msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; +// msg += '\t...\n'; +// msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; +// msg += ' );\n'; +// msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; +// msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; +// msg += '*/\n'; + +// return msg; +// } +// } class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { @@ -3252,7 +2613,7 @@ class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { } convert(statement: JsonAlterColumnSetPrimaryKeyStatement) { - const { tableName, columnName } = statement; + const { columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` @@ -3271,7 +2632,7 @@ class PgAlterTableAlterColumnDropPrimaryKeyConvertor extends Convertor { } convert(statement: JsonAlterColumnDropPrimaryKeyStatement) { - const { tableName, columnName, schema } = statement; + const { tableName, schema } = statement; return `/* Unfortunately in current drizzle-kit version we can't automatically get name for primary key. We are working on making it available! @@ -3300,7 +2661,7 @@ class PgAlterTableAlterColumnSetNotNullConvertor extends Convertor { } convert(statement: JsonAlterColumnSetNotNullStatement) { - const { tableName, columnName } = statement; + const { columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` @@ -3319,7 +2680,7 @@ class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor { } convert(statement: JsonAlterColumnDropNotNullStatement) { - const { tableName, columnName } = statement; + const { columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` @@ -3366,39 +2727,6 @@ class PgCreateForeignKeyConvertor extends Convertor { } } -class LibSQLCreateForeignKeyConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'create_reference' - && dialect === 'turso' - ); - } - - convert( - statement: JsonCreateReferenceStatement, - json2?: SQLiteSchemaSquashed, - action?: 'push', - ): string { - const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === 'push' - ? SQLiteSquasher.unsquashPushFK(statement.data) - : SQLiteSquasher.unsquashFK(statement.data); - const { columnDefault, columnNotNull, columnType } = statement; - - const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; - const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; - const columnsDefaultValue = columnDefault - ? ` DEFAULT ${columnDefault}` - : ''; - const columnNotNullValue = columnNotNull ? ` NOT NULL` : ''; - const columnTypeValue = columnType ? ` ${columnType}` : ''; - - const columnFrom = columnsFrom[0]; - const columnTo = columnsTo[0]; - - return `ALTER TABLE \`${tableFrom}\` ALTER COLUMN "${columnFrom}" TO "${columnFrom}"${columnTypeValue}${columnNotNullValue}${columnsDefaultValue} REFERENCES ${tableTo}(${columnTo})${onDeleteStatement}${onUpdateStatement};`; - } -} - class MySqlCreateForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_reference' && dialect === 'mysql'; @@ -3550,84 +2878,6 @@ class CreatePgIndexConvertor extends Convertor { } } -class CreateMySqlIndexConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_index' && dialect === 'mysql'; - } - - convert(statement: JsonCreateIndexStatement): string { - // should be changed - const { name, columns, isUnique } = MySqlSquasher.unsquashIdx( - statement.data, - ); - const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; - - const uniqueString = columns - .map((it) => { - return statement.internal?.indexes - ? statement.internal?.indexes[name]?.columns[it]?.isExpression - ? it - : `\`${it}\`` - : `\`${it}\``; - }) - .join(','); - - return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; - } -} - -export class CreateSingleStoreIndexConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_index' && dialect === 'singlestore'; - } - - convert(statement: JsonCreateIndexStatement): string { - // should be changed - const { name, columns, isUnique } = SingleStoreSquasher.unsquashIdx( - statement.data, - ); - const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; - - const uniqueString = columns - .map((it) => { - return statement.internal?.indexes - ? statement.internal?.indexes[name]?.columns[it]?.isExpression - ? it - : `\`${it}\`` - : `\`${it}\``; - }) - .join(','); - - return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; - } -} - -export class CreateSqliteIndexConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'create_index' && (dialect === 'sqlite' || dialect === 'turso'); - } - - convert(statement: JsonCreateIndexStatement): string { - // should be changed - const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx( - statement.data, - ); - // // since postgresql 9.5 - const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; - const whereStatement = where ? ` WHERE ${where}` : ''; - const uniqueString = columns - .map((it) => { - return statement.internal?.indexes - ? statement.internal?.indexes[name]?.columns[it]?.isExpression - ? it - : `\`${it}\`` - : `\`${it}\``; - }) - .join(','); - return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString})${whereStatement};`; - } -} - class PgDropIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_index' && dialect === 'postgresql'; @@ -3750,214 +3000,26 @@ class MySqlDropIndexConvertor extends Convertor { } } -class SingleStoreDropIndexConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return statement.type === 'drop_index' && dialect === 'singlestore'; - } - - convert(statement: JsonDropIndexStatement): string { - const { name } = SingleStoreSquasher.unsquashIdx(statement.data); - return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; - } -} - -class SQLiteRecreateTableConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'recreate_table' && dialect === 'sqlite' - ); - } - - convert(statement: JsonRecreateTableStatement): string | string[] { - const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; - - const columnNames = columns.map((it) => `"${it.name}"`).join(', '); - const newTableName = `__new_${tableName}`; - - const sqlStatements: string[] = []; - - sqlStatements.push(`PRAGMA foreign_keys=OFF;`); - - // map all possible variants - const mappedCheckConstraints: string[] = checkConstraints.map((it) => - it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) - .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `'${newTableName}'.`) - ); - - // create new table - sqlStatements.push( - new SQLiteCreateTableConvertor().convert({ - type: 'sqlite_create_table', - tableName: newTableName, - columns, - referenceData, - compositePKs, - checkConstraints: mappedCheckConstraints, - }), - ); - - // migrate data - sqlStatements.push( - `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, - ); - - // drop table - sqlStatements.push( - new SQLiteDropTableConvertor().convert({ - type: 'drop_table', - tableName: tableName, - schema: '', - }), - ); - - // rename table - sqlStatements.push( - new SqliteRenameTableConvertor().convert({ - fromSchema: '', - tableNameFrom: newTableName, - tableNameTo: tableName, - toSchema: '', - type: 'rename_table', - }), - ); - - sqlStatements.push(`PRAGMA foreign_keys=ON;`); - - return sqlStatements; - } -} - -class LibSQLRecreateTableConvertor extends Convertor { - can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'recreate_table' - && dialect === 'turso' - ); - } - - convert(statement: JsonRecreateTableStatement): string[] { - const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; - - const columnNames = columns.map((it) => `"${it.name}"`).join(', '); - const newTableName = `__new_${tableName}`; - - const sqlStatements: string[] = []; - - const mappedCheckConstraints: string[] = checkConstraints.map((it) => - it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) - .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) - ); - - sqlStatements.push(`PRAGMA foreign_keys=OFF;`); - - // create new table - sqlStatements.push( - new SQLiteCreateTableConvertor().convert({ - type: 'sqlite_create_table', - tableName: newTableName, - columns, - referenceData, - compositePKs, - checkConstraints: mappedCheckConstraints, - }), - ); - - // migrate data - sqlStatements.push( - `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, - ); - - // drop table - sqlStatements.push( - new SQLiteDropTableConvertor().convert({ - type: 'drop_table', - tableName: tableName, - schema: '', - }), - ); - - // rename table - sqlStatements.push( - new SqliteRenameTableConvertor().convert({ - fromSchema: '', - tableNameFrom: newTableName, - tableNameTo: tableName, - toSchema: '', - type: 'rename_table', - }), - ); - - sqlStatements.push(`PRAGMA foreign_keys=ON;`); - - return sqlStatements; - } -} - -class SingleStoreRecreateTableConvertor extends Convertor { +class CreateMySqlIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { - return ( - statement.type === 'singlestore_recreate_table' - && dialect === 'singlestore' - ); + return statement.type === 'create_index' && dialect === 'mysql'; } - convert(statement: JsonRecreateSingleStoreTableStatement): string[] { - const { tableName, columns, compositePKs, uniqueConstraints } = statement; - - const columnNames = columns.map((it) => `\`${it.name}\``).join(', '); - const newTableName = `__new_${tableName}`; - - const sqlStatements: string[] = []; - - // create new table - sqlStatements.push( - new SingleStoreCreateTableConvertor().convert({ - type: 'create_table', - tableName: newTableName, - columns, - compositePKs, - uniqueConstraints, - schema: '', - }), - ); - - // migrate data - sqlStatements.push( - `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, - ); - - // drop table - sqlStatements.push( - new SingleStoreDropTableConvertor().convert({ - type: 'drop_table', - tableName: tableName, - schema: '', - }), - ); - - // rename table - sqlStatements.push( - new SingleStoreRenameTableConvertor().convert({ - fromSchema: '', - tableNameFrom: newTableName, - tableNameTo: tableName, - toSchema: '', - type: 'rename_table', - }), + convert(statement: JsonCreateIndexStatement): string { + // should be changed + const { name, columns, isUnique } = MySqlSquasher.unsquashIdx( + statement.data, ); + const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; + const uniqueString = columns.join(','); - return sqlStatements; + return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; } } const convertors: Convertor[] = []; convertors.push(new PgCreateTableConvertor()); convertors.push(new MySqlCreateTableConvertor()); -convertors.push(new SingleStoreCreateTableConvertor()); -convertors.push(new SingleStoreRecreateTableConvertor()); -convertors.push(new SQLiteCreateTableConvertor()); -convertors.push(new SQLiteRecreateTableConvertor()); -convertors.push(new LibSQLRecreateTableConvertor()); convertors.push(new PgCreateViewConvertor()); convertors.push(new PgDropViewConvertor()); @@ -3971,9 +3033,7 @@ convertors.push(new PgAlterViewAlterUsingConvertor()); convertors.push(new MySqlCreateViewConvertor()); convertors.push(new MySqlDropViewConvertor()); convertors.push(new MySqlRenameViewConvertor()); -convertors.push(new MySqlAlterViewConvertor()); -convertors.push(new SqliteCreateViewConvertor()); convertors.push(new SqliteDropViewConvertor()); convertors.push(new CreateTypeEnumConvertor()); @@ -4012,7 +3072,6 @@ convertors.push(new SQLiteAlterTableDropColumnConvertor()); convertors.push(new PgAlterTableAddColumnConvertor()); convertors.push(new MySqlAlterTableAddColumnConvertor()); convertors.push(new SingleStoreAlterTableAddColumnConvertor()); -convertors.push(new SQLiteAlterTableAddColumnConvertor()); convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); @@ -4027,18 +3086,12 @@ convertors.push(new MySqlAlterTableDeleteCheckConstraintConvertor()); convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor()); convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor()); -convertors.push(new SingleStoreAlterTableAddUniqueConstraintConvertor()); -convertors.push(new SingleStoreAlterTableDropUniqueConstraintConvertor()); - convertors.push(new CreatePgIndexConvertor()); convertors.push(new CreateMySqlIndexConvertor()); -convertors.push(new CreateSingleStoreIndexConvertor()); -convertors.push(new CreateSqliteIndexConvertor()); convertors.push(new PgDropIndexConvertor()); convertors.push(new SqliteDropIndexConvertor()); convertors.push(new MySqlDropIndexConvertor()); -convertors.push(new SingleStoreDropIndexConvertor()); convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); @@ -4070,16 +3123,8 @@ convertors.push(new PgAlterTableAlterColumnSetExpressionConvertor()); convertors.push(new PgAlterTableAlterColumnDropGeneratedConvertor()); convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor()); -convertors.push(new MySqlAlterTableAlterColumnAlterrGeneratedConvertor()); - convertors.push(new SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor()); - -convertors.push(new SqliteAlterTableAlterColumnDropGeneratedConvertor()); -convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor()); -convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor()); - convertors.push(new MySqlModifyColumn()); -convertors.push(new LibSQLModifyColumn()); // convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor()); // convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor()); @@ -4100,8 +3145,6 @@ convertors.push(new PgAlterTableSetSchemaConvertor()); convertors.push(new PgAlterTableSetNewSchemaConvertor()); convertors.push(new PgAlterTableRemoveFromSchemaConvertor()); -convertors.push(new LibSQLCreateForeignKeyConvertor()); - convertors.push(new PgAlterTableAlterColumnDropGenerated()); convertors.push(new PgAlterTableAlterColumnSetGenerated()); convertors.push(new PgAlterTableAlterColumnAlterGenerated()); @@ -4111,9 +3154,7 @@ convertors.push(new PgAlterTableDeleteCompositePrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterCompositePrimaryKeyConvertor()); convertors.push(new MySqlAlterTableDeleteCompositePrimaryKeyConvertor()); -convertors.push(new MySqlAlterTableDropPk()); convertors.push(new MySqlAlterTableCreateCompositePrimaryKeyConvertor()); -convertors.push(new MySqlAlterTableAddPk()); convertors.push(new MySqlAlterTableAlterCompositePrimaryKeyConvertor()); convertors.push(new SingleStoreAlterTableDropPk()); @@ -4123,7 +3164,6 @@ export function fromJson( statements: JsonStatement[], dialect: Dialect, action?: 'push', - json2?: SQLiteSchemaSquashed, ) { const result = statements .flatMap((statement) => { @@ -4133,11 +3173,9 @@ export function fromJson( const convertor = filtered.length === 1 ? filtered[0] : undefined; - if (!convertor) { - return ''; - } + if (!convertor) throw new Error(`Unexpected statement: ${dialect}:${statement.type}`); - return convertor.convert(statement, json2, action); + return convertor.convert(statement, action); }) .filter((it) => it !== ''); return result; @@ -4145,7 +3183,8 @@ export function fromJson( // blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ // test case for enum altering -https: ` +// oxlint-disable-next-line no-unused-expressions +` create table users ( id int, name character varying(128) diff --git a/drizzle-kit/src/legacy/utils.ts b/drizzle-kit/src/legacy/utils.ts new file mode 100644 index 0000000000..d5c644f58a --- /dev/null +++ b/drizzle-kit/src/legacy/utils.ts @@ -0,0 +1,212 @@ +import chalk from 'chalk'; +import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs'; +import type { SQL } from 'orm044'; +import { CasingCache, toCamelCase, toSnakeCase } from 'orm044/casing'; +import { join } from 'path'; +import { parse } from 'url'; +import type { CasingType } from './common'; +import { assertUnreachable, snapshotVersion } from './global'; +import type { Dialect } from './schemaValidator'; + +export type DB = { + query: (sql: string, params?: any[]) => Promise; +}; + +export type SQLiteDB = { + query: (sql: string, params?: any[]) => Promise; + run(query: string): Promise; +}; + +export type LibSQLDB = { + query: (sql: string, params?: any[]) => Promise; + run(query: string): Promise; + batchWithPragma?(queries: string[]): Promise; +}; + +export const copy = (it: T): T => { + return JSON.parse(JSON.stringify(it)); +}; + +export const objectValues = (obj: T): Array => { + return Object.values(obj); +}; + +export const assertV1OutFolder = (out: string) => { + if (!existsSync(out)) return; + + const oldMigrationFolders = readdirSync(out).filter( + (it) => it.length === 14 && /^\d+$/.test(it), + ); + + if (oldMigrationFolders.length > 0) { + console.log( + `Your migrations folder format is outdated, please run ${ + chalk.green.bold( + `drizzle-kit up`, + ) + }`, + ); + process.exit(1); + } +}; + +export type Journal = { + version: string; + dialect: Dialect; + entries: { + idx: number; + version: string; + when: number; + tag: string; + breakpoints: boolean; + }[]; +}; + +export const dryJournal = (dialect: Dialect): Journal => { + return { + version: snapshotVersion, + dialect, + entries: [], + }; +}; + +// export const preparePushFolder = (dialect: Dialect) => { +// const out = ".drizzle"; +// let snapshot: string = ""; +// if (!existsSync(join(out))) { +// mkdirSync(out); +// snapshot = JSON.stringify(dryJournal(dialect)); +// } else { +// snapshot = readdirSync(out)[0]; +// } + +// return { snapshot }; +// }; + +export const prepareOutFolder = (out: string, dialect: Dialect) => { + const meta = join(out, 'meta'); + const journalPath = join(meta, '_journal.json'); + + if (!existsSync(join(out, 'meta'))) { + mkdirSync(meta, { recursive: true }); + writeFileSync(journalPath, JSON.stringify(dryJournal(dialect))); + } + + const journal = JSON.parse(readFileSync(journalPath).toString()); + + const snapshots = readdirSync(meta) + .filter((it) => !it.startsWith('_')) + .map((it) => join(meta, it)); + + snapshots.sort(); + return { meta, snapshots, journal }; +}; + +export const columnRenameKey = ( + table: string, + schema: string, + column: string, +) => { + const out = schema + ? `"${schema}"."${table}"."${column}"` + : `"${table}"."${column}"`; + return out; +}; + +export const kloudMeta = () => { + return { + pg: [5], + mysql: [] as number[], + sqlite: [] as number[], + }; +}; + +export const normaliseSQLiteUrl = ( + it: string, + type: 'libsql' | 'better-sqlite', +) => { + if (type === 'libsql') { + if (it.startsWith('file:')) { + return it; + } + try { + const url = parse(it); + if (url.protocol === null) { + return `file:${it}`; + } + return it; + } catch { + return `file:${it}`; + } + } + + if (type === 'better-sqlite') { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; + } + + assertUnreachable(type); +}; + +export const normalisePGliteUrl = ( + it: string, +) => { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; +}; + +export function isPgArrayType(sqlType: string) { + return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null; +} + +export function findAddedAndRemoved(columnNames1: string[], columnNames2: string[]) { + const set1 = new Set(columnNames1); + const set2 = new Set(columnNames2); + + const addedColumns = columnNames2.filter((it) => !set1.has(it)); + const removedColumns = columnNames1.filter((it) => !set2.has(it)); + + return { addedColumns, removedColumns }; +} + +export function escapeSingleQuotes(str: string) { + return str.replace(/'/g, "''"); +} + +export function unescapeSingleQuotes(str: string, ignoreFirstAndLastChar: boolean) { + const regex = ignoreFirstAndLastChar ? /(? { + return sql.toQuery({ + escapeName: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeParam: () => { + throw new Error("we don't support params for `sql` default values"); + }, + escapeString: () => { + throw new Error("we don't support params for `sql` default values"); + }, + casing: new CasingCache(casing), + }).sql; +}; diff --git a/drizzle-kit/src/loader.mjs b/drizzle-kit/src/loader.mjs deleted file mode 100644 index 488f5712c8..0000000000 --- a/drizzle-kit/src/loader.mjs +++ /dev/null @@ -1,57 +0,0 @@ -import esbuild from 'esbuild'; -import { readFileSync } from 'fs'; -import * as path from 'path'; - -const parse = (it) => { - if (!it) return { drizzle: false }; - - if (it.endsWith('__drizzle__')) { - const offset = it.startsWith('file://') ? 'file://'.length : 0; - const clean = it.slice(offset, -'__drizzle__'.length); - return { drizzle: true, clean, original: it }; - } - return { drizzle: false, clean: it }; -}; - -export function resolve(specifier, context, nextResolve) { - const { drizzle, clean } = parse(specifier); - if (drizzle && !clean.endsWith('.ts') && !clean.endsWith('.mts')) { - return nextResolve(clean); - } - - if (drizzle) { - return { - shortCircuit: true, - url: `file://${specifier}`, - }; - } - - const parsedParent = parse(context.parentURL); - const parentURL = parsedParent.drizzle - ? new URL(`file://${path.resolve(parsedParent.clean)}`) - : context.parentURL; - - // Let Node.js handle all other specifiers. - return nextResolve(specifier, { ...context, parentURL }); -} - -export async function load(url, context, defaultLoad) { - const { drizzle, clean } = parse(url); - if (drizzle) { - const file = readFileSync(clean, 'utf-8'); - if (clean.endsWith('.ts') || clean.endsWith('.mts')) { - const source = esbuild.transformSync(file, { - loader: 'ts', - format: 'esm', - }); - return { - format: 'module', - shortCircuit: true, - source: source.code, - }; - } - } - - // let Node.js handle all other URLs - return defaultLoad(url, context, defaultLoad); -} diff --git a/drizzle-kit/src/migrationPreparator.ts b/drizzle-kit/src/migrationPreparator.ts deleted file mode 100644 index 2846044418..0000000000 --- a/drizzle-kit/src/migrationPreparator.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { randomUUID } from 'crypto'; -import fs from 'fs'; -import { CasingType } from './cli/validations/common'; -import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer'; -import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema'; -import { dryPg, PgSchema, pgSchema } from './serializer/pgSchema'; -import { drySingleStore, SingleStoreSchema, singlestoreSchema } from './serializer/singlestoreSchema'; -import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema'; - -export const prepareMySqlDbPushSnapshot = async ( - prev: MySqlSchema, - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: MySqlSchema; cur: MySqlSchema }> => { - const serialized = await serializeMySql(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prev.id; - - const { version, dialect, ...rest } = serialized; - const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; - - return { prev, cur: result }; -}; - -export const prepareSingleStoreDbPushSnapshot = async ( - prev: SingleStoreSchema, - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema }> => { - const serialized = await serializeSingleStore(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prev.id; - - const { version, dialect, ...rest } = serialized; - const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; - - return { prev, cur: result }; -}; - -export const prepareSQLiteDbPushSnapshot = async ( - prev: SQLiteSchema, - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema }> => { - const serialized = await serializeSQLite(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prev.id; - - const { version, dialect, ...rest } = serialized; - const result: SQLiteSchema = { - version, - dialect, - id, - prevId: idPrev, - ...rest, - }; - - return { prev, cur: result }; -}; - -export const preparePgDbPushSnapshot = async ( - prev: PgSchema, - schemaPath: string | string[], - casing: CasingType | undefined, - schemaFilter: string[] = ['public'], -): Promise<{ prev: PgSchema; cur: PgSchema }> => { - const serialized = await serializePg(schemaPath, casing, schemaFilter); - - const id = randomUUID(); - const idPrev = prev.id; - - const { version, dialect, ...rest } = serialized; - const result: PgSchema = { version, dialect, id, prevId: idPrev, ...rest }; - - return { prev, cur: result }; -}; - -export const prepareMySqlMigrationSnapshot = async ( - migrationFolders: string[], - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: MySqlSchema; cur: MySqlSchema; custom: MySqlSchema }> => { - const prevSnapshot = mysqlSchema.parse( - preparePrevSnapshot(migrationFolders, dryMySql), - ); - const serialized = await serializeMySql(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prevSnapshot.id; - - const { version, dialect, ...rest } = serialized; - const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; - - const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; - - // that's for custom migrations, when we need new IDs, but old snapshot - const custom: MySqlSchema = { - id, - prevId: idPrev, - ...prevRest, - }; - - return { prev: prevSnapshot, cur: result, custom }; -}; - -export const prepareSingleStoreMigrationSnapshot = async ( - migrationFolders: string[], - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema; custom: SingleStoreSchema }> => { - const prevSnapshot = singlestoreSchema.parse( - preparePrevSnapshot(migrationFolders, drySingleStore), - ); - const serialized = await serializeSingleStore(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prevSnapshot.id; - - const { version, dialect, ...rest } = serialized; - const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; - - const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; - - // that's for custom migrations, when we need new IDs, but old snapshot - const custom: SingleStoreSchema = { - id, - prevId: idPrev, - ...prevRest, - }; - - return { prev: prevSnapshot, cur: result, custom }; -}; - -export const prepareSqliteMigrationSnapshot = async ( - snapshots: string[], - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema; custom: SQLiteSchema }> => { - const prevSnapshot = sqliteSchema.parse( - preparePrevSnapshot(snapshots, drySQLite), - ); - const serialized = await serializeSQLite(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prevSnapshot.id; - - const { version, dialect, ...rest } = serialized; - const result: SQLiteSchema = { - version, - dialect, - id, - prevId: idPrev, - ...rest, - }; - - const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; - - // that's for custom migrations, when we need new IDs, but old snapshot - const custom: SQLiteSchema = { - id, - prevId: idPrev, - ...prevRest, - }; - - return { prev: prevSnapshot, cur: result, custom }; -}; - -export const preparePgMigrationSnapshot = async ( - snapshots: string[], - schemaPath: string | string[], - casing: CasingType | undefined, -): Promise<{ prev: PgSchema; cur: PgSchema; custom: PgSchema }> => { - const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg)); - const serialized = await serializePg(schemaPath, casing); - - const id = randomUUID(); - const idPrev = prevSnapshot.id; - - // const { version, dialect, ...rest } = serialized; - - const result: PgSchema = { id, prevId: idPrev, ...serialized }; - - const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; - - // that's for custom migrations, when we need new IDs, but old snapshot - const custom: PgSchema = { - id, - prevId: idPrev, - ...prevRest, - }; - - return { prev: prevSnapshot, cur: result, custom }; -}; - -const preparePrevSnapshot = (snapshots: string[], defaultPrev: any) => { - let prevSnapshot: any; - - if (snapshots.length === 0) { - prevSnapshot = defaultPrev; - } else { - const lastSnapshot = snapshots[snapshots.length - 1]; - prevSnapshot = JSON.parse(fs.readFileSync(lastSnapshot).toString()); - } - return prevSnapshot; -}; diff --git a/drizzle-kit/src/schemaValidator.ts b/drizzle-kit/src/schemaValidator.ts deleted file mode 100644 index ce4b2e59c4..0000000000 --- a/drizzle-kit/src/schemaValidator.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { enum as enumType, TypeOf, union } from 'zod'; -import { mysqlSchema, mysqlSchemaSquashed } from './serializer/mysqlSchema'; -import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema'; -import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema'; -import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema'; - -export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore', 'gel'] as const; -export const dialect = enumType(dialects); - -export type Dialect = (typeof dialects)[number]; -const _: Dialect = '' as TypeOf; - -const commonSquashedSchema = union([ - pgSchemaSquashed, - mysqlSchemaSquashed, - SQLiteSchemaSquashed, - singlestoreSchemaSquashed, -]); - -const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema, singlestoreSchema]); - -export type CommonSquashedSchema = TypeOf; -export type CommonSchema = TypeOf; diff --git a/drizzle-kit/src/serializer/gelSchema.ts b/drizzle-kit/src/serializer/gelSchema.ts deleted file mode 100644 index f7bf8b4bf2..0000000000 --- a/drizzle-kit/src/serializer/gelSchema.ts +++ /dev/null @@ -1,633 +0,0 @@ -import { mapValues, originUUID, snapshotVersion } from '../global'; - -import { any, array, boolean, enum as enumType, literal, number, object, record, string, TypeOf, union } from 'zod'; - -const enumSchema = object({ - name: string(), - schema: string(), - values: string().array(), -}).strict(); - -const enumSchemaV1 = object({ - name: string(), - values: record(string(), string()), -}).strict(); - -const indexColumn = object({ - expression: string(), - isExpression: boolean(), - asc: boolean(), - nulls: string().optional(), - opclass: string().optional(), -}); - -export type IndexColumnType = TypeOf; - -const index = object({ - name: string(), - columns: indexColumn.array(), - isUnique: boolean(), - with: record(string(), any()).optional(), - method: string().default('btree'), - where: string().optional(), - concurrently: boolean().default(false), -}).strict(); - -const fk = object({ - name: string(), - tableFrom: string(), - columnsFrom: string().array(), - tableTo: string(), - schemaTo: string().optional(), - columnsTo: string().array(), - onUpdate: string().optional(), - onDelete: string().optional(), -}).strict(); - -export const sequenceSchema = object({ - name: string(), - increment: string().optional(), - minValue: string().optional(), - maxValue: string().optional(), - startWith: string().optional(), - cache: string().optional(), - cycle: boolean().optional(), - schema: string(), -}).strict(); - -export const roleSchema = object({ - name: string(), - createDb: boolean().optional(), - createRole: boolean().optional(), - inherit: boolean().optional(), -}).strict(); - -export const sequenceSquashed = object({ - name: string(), - schema: string(), - values: string(), -}).strict(); - -const column = object({ - name: string(), - type: string(), - typeSchema: string().optional(), - primaryKey: boolean(), - notNull: boolean(), - default: any().optional(), - isUnique: any().optional(), - uniqueName: string().optional(), - nullsNotDistinct: boolean().optional(), - generated: object({ - type: literal('stored'), - as: string(), - }).optional(), - identity: sequenceSchema - .merge(object({ type: enumType(['always', 'byDefault']) })) - .optional(), -}).strict(); - -const checkConstraint = object({ - name: string(), - value: string(), -}).strict(); - -const columnSquashed = object({ - name: string(), - type: string(), - typeSchema: string().optional(), - primaryKey: boolean(), - notNull: boolean(), - default: any().optional(), - isUnique: any().optional(), - uniqueName: string().optional(), - nullsNotDistinct: boolean().optional(), - generated: object({ - type: literal('stored'), - as: string(), - }).optional(), - identity: string().optional(), -}).strict(); - -const compositePK = object({ - name: string(), - columns: string().array(), -}).strict(); - -const uniqueConstraint = object({ - name: string(), - columns: string().array(), - nullsNotDistinct: boolean(), -}).strict(); - -export const policy = object({ - name: string(), - as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), - for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), - to: string().array().optional(), - using: string().optional(), - withCheck: string().optional(), - on: string().optional(), - schema: string().optional(), -}).strict(); - -export const policySquashed = object({ - name: string(), - values: string(), -}).strict(); - -const viewWithOption = object({ - checkOption: enumType(['local', 'cascaded']).optional(), - securityBarrier: boolean().optional(), - securityInvoker: boolean().optional(), -}).strict(); - -const matViewWithOption = object({ - fillfactor: number().optional(), - toastTupleTarget: number().optional(), - parallelWorkers: number().optional(), - autovacuumEnabled: boolean().optional(), - vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), - vacuumTruncate: boolean().optional(), - autovacuumVacuumThreshold: number().optional(), - autovacuumVacuumScaleFactor: number().optional(), - autovacuumVacuumCostDelay: number().optional(), - autovacuumVacuumCostLimit: number().optional(), - autovacuumFreezeMinAge: number().optional(), - autovacuumFreezeMaxAge: number().optional(), - autovacuumFreezeTableAge: number().optional(), - autovacuumMultixactFreezeMinAge: number().optional(), - autovacuumMultixactFreezeMaxAge: number().optional(), - autovacuumMultixactFreezeTableAge: number().optional(), - logAutovacuumMinDuration: number().optional(), - userCatalogTable: boolean().optional(), -}).strict(); - -export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); - -export const view = object({ - name: string(), - schema: string(), - columns: record(string(), column), - definition: string().optional(), - materialized: boolean(), - with: mergedViewWithOption.optional(), - isExisting: boolean(), - withNoData: boolean().optional(), - using: string().optional(), - tablespace: string().optional(), -}).strict(); - -const table = object({ - name: string(), - schema: string(), - columns: record(string(), column), - indexes: record(string(), index), - foreignKeys: record(string(), fk), - compositePrimaryKeys: record(string(), compositePK), - uniqueConstraints: record(string(), uniqueConstraint).default({}), - policies: record(string(), policy).default({}), - checkConstraints: record(string(), checkConstraint).default({}), - isRLSEnabled: boolean().default(false), -}).strict(); - -const schemaHash = object({ - id: string(), - prevId: string(), -}); - -export const kitInternals = object({ - tables: record( - string(), - object({ - columns: record( - string(), - object({ - isArray: boolean().optional(), - dimensions: number().optional(), - rawType: string().optional(), - isDefaultAnExpression: boolean().optional(), - }).optional(), - ), - }).optional(), - ), -}).optional(); - -export const gelSchemaExternal = object({ - version: literal('1'), - dialect: literal('gel'), - tables: array(table), - enums: array(enumSchemaV1), - schemas: array(object({ name: string() })), - _meta: object({ - schemas: record(string(), string()), - tables: record(string(), string()), - columns: record(string(), string()), - }), -}).strict(); - -export const gelSchemaInternal = object({ - version: literal('1'), - dialect: literal('gel'), - tables: record(string(), table), - enums: record(string(), enumSchema), - schemas: record(string(), string()), - views: record(string(), view).default({}), - sequences: record(string(), sequenceSchema).default({}), - roles: record(string(), roleSchema).default({}), - policies: record(string(), policy).default({}), - _meta: object({ - schemas: record(string(), string()), - tables: record(string(), string()), - columns: record(string(), string()), - }), - internal: kitInternals, -}).strict(); - -const tableSquashed = object({ - name: string(), - schema: string(), - columns: record(string(), columnSquashed), - indexes: record(string(), string()), - foreignKeys: record(string(), string()), - compositePrimaryKeys: record(string(), string()), - uniqueConstraints: record(string(), string()), - policies: record(string(), string()), - checkConstraints: record(string(), string()), - isRLSEnabled: boolean().default(false), -}).strict(); - -export const gelSchemaSquashed = object({ - version: literal('1'), - dialect: literal('gel'), - tables: record(string(), tableSquashed), - enums: record(string(), enumSchema), - schemas: record(string(), string()), - views: record(string(), view), - sequences: record(string(), sequenceSquashed), - roles: record(string(), roleSchema).default({}), - policies: record(string(), policySquashed).default({}), -}).strict(); - -export const gelSchema = gelSchemaInternal.merge(schemaHash); - -export type Enum = TypeOf; -export type Sequence = TypeOf; -export type Role = TypeOf; -export type Column = TypeOf; -export type Table = TypeOf; -export type GelSchema = TypeOf; -export type GelSchemaInternal = TypeOf; -export type GelSchemaExternal = TypeOf; -export type GelSchemaSquashed = TypeOf; -export type Index = TypeOf; -export type ForeignKey = TypeOf; -export type PrimaryKey = TypeOf; -export type UniqueConstraint = TypeOf; -export type Policy = TypeOf; -export type View = TypeOf; -export type MatViewWithOption = TypeOf; -export type ViewWithOption = TypeOf; - -export type GelKitInternals = TypeOf; -export type CheckConstraint = TypeOf; - -// no prev version -export const backwardCompatibleGelSchema = gelSchema; - -export const GelSquasher = { - squashIdx: (idx: Index) => { - index.parse(idx); - return `${idx.name};${ - idx.columns - .map( - (c) => `${c.expression}--${c.isExpression}--${c.asc}--${c.nulls}--${c.opclass ? c.opclass : ''}`, - ) - .join(',,') - };${idx.isUnique};${idx.concurrently};${idx.method};${idx.where};${JSON.stringify(idx.with)}`; - }, - unsquashIdx: (input: string): Index => { - const [ - name, - columnsString, - isUnique, - concurrently, - method, - where, - idxWith, - ] = input.split(';'); - - const columnString = columnsString.split(',,'); - const columns: IndexColumnType[] = []; - - for (const column of columnString) { - const [expression, isExpression, asc, nulls, opclass] = column.split('--'); - columns.push({ - nulls: nulls as IndexColumnType['nulls'], - isExpression: isExpression === 'true', - asc: asc === 'true', - expression: expression, - opclass: opclass === 'undefined' ? undefined : opclass, - }); - } - - const result: Index = index.parse({ - name, - columns: columns, - isUnique: isUnique === 'true', - concurrently: concurrently === 'true', - method, - where: where === 'undefined' ? undefined : where, - with: !idxWith || idxWith === 'undefined' ? undefined : JSON.parse(idxWith), - }); - return result; - }, - squashIdxPush: (idx: Index) => { - index.parse(idx); - return `${idx.name};${ - idx.columns - .map((c) => `${c.isExpression ? '' : c.expression}--${c.asc}--${c.nulls}`) - .join(',,') - };${idx.isUnique};${idx.method};${JSON.stringify(idx.with)}`; - }, - unsquashIdxPush: (input: string): Index => { - const [name, columnsString, isUnique, method, idxWith] = input.split(';'); - - const columnString = columnsString.split('--'); - const columns: IndexColumnType[] = []; - - for (const column of columnString) { - const [expression, asc, nulls, opclass] = column.split(','); - columns.push({ - nulls: nulls as IndexColumnType['nulls'], - isExpression: expression === '', - asc: asc === 'true', - expression: expression, - }); - } - - const result: Index = index.parse({ - name, - columns: columns, - isUnique: isUnique === 'true', - concurrently: false, - method, - with: idxWith === 'undefined' ? undefined : JSON.parse(idxWith), - }); - return result; - }, - squashFK: (fk: ForeignKey) => { - return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ - fk.onUpdate ?? '' - };${fk.onDelete ?? ''};${fk.schemaTo || 'public'}`; - }, - squashPolicy: (policy: Policy) => { - return `${policy.name}--${policy.as}--${policy.for}--${ - policy.to?.join(',') - }--${policy.using}--${policy.withCheck}--${policy.on}`; - }, - unsquashPolicy: (policy: string): Policy => { - const splitted = policy.split('--'); - return { - name: splitted[0], - as: splitted[1] as Policy['as'], - for: splitted[2] as Policy['for'], - to: splitted[3].split(','), - using: splitted[4] !== 'undefined' ? splitted[4] : undefined, - withCheck: splitted[5] !== 'undefined' ? splitted[5] : undefined, - on: splitted[6] !== 'undefined' ? splitted[6] : undefined, - }; - }, - squashPolicyPush: (policy: Policy) => { - return `${policy.name}--${policy.as}--${policy.for}--${policy.to?.join(',')}--${policy.on}`; - }, - unsquashPolicyPush: (policy: string): Policy => { - const splitted = policy.split('--'); - return { - name: splitted[0], - as: splitted[1] as Policy['as'], - for: splitted[2] as Policy['for'], - to: splitted[3].split(','), - on: splitted[4] !== 'undefined' ? splitted[4] : undefined, - }; - }, - squashPK: (pk: PrimaryKey) => { - return `${pk.columns.join(',')};${pk.name}`; - }, - unsquashPK: (pk: string): PrimaryKey => { - const splitted = pk.split(';'); - return { name: splitted[1], columns: splitted[0].split(',') }; - }, - squashUnique: (unq: UniqueConstraint) => { - return `${unq.name};${unq.columns.join(',')};${unq.nullsNotDistinct}`; - }, - unsquashUnique: (unq: string): UniqueConstraint => { - const [name, columns, nullsNotDistinct] = unq.split(';'); - return { - name, - columns: columns.split(','), - nullsNotDistinct: nullsNotDistinct === 'true', - }; - }, - unsquashFK: (input: string): ForeignKey => { - const [ - name, - tableFrom, - columnsFromStr, - tableTo, - columnsToStr, - onUpdate, - onDelete, - schemaTo, - ] = input.split(';'); - - const result: ForeignKey = fk.parse({ - name, - tableFrom, - columnsFrom: columnsFromStr.split(','), - schemaTo: schemaTo, - tableTo, - columnsTo: columnsToStr.split(','), - onUpdate, - onDelete, - }); - return result; - }, - squashSequence: (seq: Omit) => { - return `${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${seq.cycle ?? ''}`; - }, - unsquashSequence: (seq: string): Omit => { - const splitted = seq.split(';'); - return { - minValue: splitted[0] !== 'undefined' ? splitted[0] : undefined, - maxValue: splitted[1] !== 'undefined' ? splitted[1] : undefined, - increment: splitted[2] !== 'undefined' ? splitted[2] : undefined, - startWith: splitted[3] !== 'undefined' ? splitted[3] : undefined, - cache: splitted[4] !== 'undefined' ? splitted[4] : undefined, - cycle: splitted[5] === 'true', - }; - }, - squashIdentity: ( - seq: Omit & { type: 'always' | 'byDefault' }, - ) => { - return `${seq.name};${seq.type};${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${ - seq.cycle ?? '' - }`; - }, - unsquashIdentity: ( - seq: string, - ): Omit & { type: 'always' | 'byDefault' } => { - const splitted = seq.split(';'); - return { - name: splitted[0], - type: splitted[1] as 'always' | 'byDefault', - minValue: splitted[2] !== 'undefined' ? splitted[2] : undefined, - maxValue: splitted[3] !== 'undefined' ? splitted[3] : undefined, - increment: splitted[4] !== 'undefined' ? splitted[4] : undefined, - startWith: splitted[5] !== 'undefined' ? splitted[5] : undefined, - cache: splitted[6] !== 'undefined' ? splitted[6] : undefined, - cycle: splitted[7] === 'true', - }; - }, - squashCheck: (check: CheckConstraint) => { - return `${check.name};${check.value}`; - }, - unsquashCheck: (input: string): CheckConstraint => { - const [ - name, - value, - ] = input.split(';'); - - return { name, value }; - }, -}; - -export const squashGelScheme = ( - json: GelSchema, - action?: 'push' | undefined, -): GelSchemaSquashed => { - const mappedTables = Object.fromEntries( - Object.entries(json.tables).map((it) => { - const squashedIndexes = mapValues(it[1].indexes, (index) => { - return action === 'push' - ? GelSquasher.squashIdxPush(index) - : GelSquasher.squashIdx(index); - }); - - const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { - return GelSquasher.squashFK(fk); - }); - - const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { - return GelSquasher.squashPK(pk); - }); - - const mappedColumns = Object.fromEntries( - Object.entries(it[1].columns).map((it) => { - const mappedIdentity = it[1].identity - ? GelSquasher.squashIdentity(it[1].identity) - : undefined; - return [ - it[0], - { - ...it[1], - identity: mappedIdentity, - }, - ]; - }), - ); - - const squashedUniqueConstraints = mapValues( - it[1].uniqueConstraints, - (unq) => { - return GelSquasher.squashUnique(unq); - }, - ); - - const squashedPolicies = mapValues(it[1].policies, (policy) => { - return action === 'push' - ? GelSquasher.squashPolicyPush(policy) - : GelSquasher.squashPolicy(policy); - }); - const squashedChecksContraints = mapValues( - it[1].checkConstraints, - (check) => { - return GelSquasher.squashCheck(check); - }, - ); - - return [ - it[0], - { - name: it[1].name, - schema: it[1].schema, - columns: mappedColumns, - indexes: squashedIndexes, - foreignKeys: squashedFKs, - compositePrimaryKeys: squashedPKs, - uniqueConstraints: squashedUniqueConstraints, - policies: squashedPolicies, - checkConstraints: squashedChecksContraints, - isRLSEnabled: it[1].isRLSEnabled ?? false, - }, - ]; - }), - ); - - const mappedSequences = Object.fromEntries( - Object.entries(json.sequences).map((it) => { - return [ - it[0], - { - name: it[1].name, - schema: it[1].schema, - values: GelSquasher.squashSequence(it[1]), - }, - ]; - }), - ); - - const mappedPolicies = Object.fromEntries( - Object.entries(json.policies).map((it) => { - return [ - it[0], - { - name: it[1].name, - values: action === 'push' - ? GelSquasher.squashPolicyPush(it[1]) - : GelSquasher.squashPolicy(it[1]), - }, - ]; - }), - ); - - return { - version: '1', - dialect: json.dialect, - tables: mappedTables, - enums: json.enums, - schemas: json.schemas, - views: json.views, - policies: mappedPolicies, - sequences: mappedSequences, - roles: json.roles, - }; -}; - -export const dryGel = gelSchema.parse({ - version: '1', - dialect: 'gel', - id: originUUID, - prevId: '', - tables: {}, - enums: {}, - schemas: {}, - policies: {}, - roles: {}, - sequences: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, -}); diff --git a/drizzle-kit/src/serializer/gelSerializer.ts b/drizzle-kit/src/serializer/gelSerializer.ts deleted file mode 100644 index c3adf05f0e..0000000000 --- a/drizzle-kit/src/serializer/gelSerializer.ts +++ /dev/null @@ -1,1661 +0,0 @@ -import chalk from 'chalk'; -import { getTableName, is, SQL } from 'drizzle-orm'; -import { - AnyGelTable, - GelColumn, - GelDialect, - GelMaterializedView, - GelPolicy, - GelRole, - GelSchema, - GelSequence, - GelView, - getMaterializedViewConfig, - getTableConfig, - getViewConfig, - IndexedColumn, -} from 'drizzle-orm/gel-core'; -import { CasingType } from 'src/cli/validations/common'; -import { IntrospectStage, IntrospectStatus } from 'src/cli/views'; -import { vectorOps } from 'src/extensions/vector'; -import { withStyle } from '../cli/validations/outputs'; -import { type DB, escapeSingleQuotes } from '../utils'; -import { GelSchemaInternal } from './gelSchema'; -import type { - Column, - ForeignKey, - GelKitInternals, - Index, - IndexColumnType, - Policy, - PrimaryKey, - Role, - Sequence, - Table, - UniqueConstraint, - View, -} from './gelSchema'; -import { getColumnCasing, sqlToStr } from './utils'; - -export const indexName = (tableName: string, columns: string[]) => { - return `${tableName}_${columns.join('_')}_index`; -}; - -function stringFromIdentityProperty(field: string | number | undefined): string | undefined { - return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); -} - -function maxRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; -} - -function minRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; -} - -function stringFromDatabaseIdentityProperty(field: any): string | undefined { - return typeof field === 'string' - ? (field as string) - : typeof field === 'undefined' - ? undefined - : typeof field === 'bigint' - ? field.toString() - : String(field); -} - -export function buildArrayString(array: any[], sqlType: string): string { - sqlType = sqlType.split('[')[0]; - const values = array - .map((value) => { - if (typeof value === 'number' || typeof value === 'bigint') { - return value.toString(); - } else if (typeof value === 'boolean') { - return value ? 'true' : 'false'; - } else if (Array.isArray(value)) { - return buildArrayString(value, sqlType); - } else if (value instanceof Date) { - if (sqlType === 'date') { - return `"${value.toISOString().split('T')[0]}"`; - } else if (sqlType === 'timestamp') { - return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; - } else { - return `"${value.toISOString()}"`; - } - } else if (typeof value === 'object') { - return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; - } - - return `"${value}"`; - }) - .join(','); - - return `{${values}}`; -} - -const generateGelSnapshot = ( - tables: AnyGelTable[], - // enums: GelEnum[], - schemas: GelSchema[], - sequences: GelSequence[], - roles: GelRole[], - policies: GelPolicy[], - views: GelView[], - matViews: GelMaterializedView[], - casing: CasingType | undefined, - schemaFilter?: string[], -): GelSchemaInternal => { - const dialect = new GelDialect({ casing }); - const result: Record = {}; - const resultViews: Record = {}; - const sequencesToReturn: Record = {}; - const rolesToReturn: Record = {}; - // this policies are a separate objects that were linked to a table outside of it - const policiesToReturn: Record = {}; - - // This object stores unique names for indexes and will be used to detect if you have the same names for indexes - // within the same PostgreSQL schema - - const indexesInSchema: Record = {}; - - for (const table of tables) { - // This object stores unique names for checks and will be used to detect if you have the same names for checks - // within the same PostgreSQL table - const checksInTable: Record = {}; - - const { - name: tableName, - columns, - indexes, - foreignKeys, - checks, - schema, - primaryKeys, - uniqueConstraints, - policies, - enableRLS, - } = getTableConfig(table); - - if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { - continue; - } - - const columnsObject: Record = {}; - const indexesObject: Record = {}; - // const checksObject: Record = {}; - const foreignKeysObject: Record = {}; - const primaryKeysObject: Record = {}; - // const uniqueConstraintObject: Record = {}; - const policiesObject: Record = {}; - - columns.forEach((column) => { - const name = getColumnCasing(column, casing); - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - - // const typeSchema = is(column, GelEnumColumn) ? column.enum.schema || 'public' : undefined; - const generated = column.generated; - const identity = column.generatedIdentity; - - const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) - ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); - const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); - const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; - - const columnToSet: Column = { - name, - type: column.getSQLType(), - typeSchema: undefined, - primaryKey, - notNull, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: 'stored', - } - : undefined, - identity: identity - ? { - type: identity.type, - name: identity.sequenceName ?? `${tableName}_${name}_seq`, - schema: schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: identity?.sequenceOptions?.cycle ?? false, - } - : undefined, - }; - - // if (column.isUnique) { - // const existingUnique = uniqueConstraintObject[column.uniqueName!]; - // if (typeof existingUnique !== 'undefined') { - // console.log( - // `\n${ - // withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - // chalk.underline.blue( - // tableName, - // ) - // } table. - // The unique constraint ${ - // chalk.underline.blue( - // column.uniqueName, - // ) - // } on the ${ - // chalk.underline.blue( - // name, - // ) - // } column is conflicting with a unique constraint name already defined for ${ - // chalk.underline.blue( - // existingUnique.columns.join(','), - // ) - // } columns\n`) - // }`, - // ); - // process.exit(1); - // } - // uniqueConstraintObject[column.uniqueName!] = { - // name: column.uniqueName!, - // nullsNotDistinct: column.uniqueType === 'not distinct', - // columns: [columnToSet.name], - // }; - // } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; - } else { - if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if (sqlTypeLowered === 'timestamp') { - columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; - } else { - columnToSet.default = `'${column.default.toISOString()}'`; - } - } else if (Array.isArray(column.default)) { - columnToSet.default = columnToSet.default; - } else { - // Should do for all types - // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; - columnToSet.default = column.default; - } - } - } - } - columnsObject[name] = columnToSet; - }); - - primaryKeys.map((pk) => { - const originalColumnNames = pk.columns.map((c) => c.name); - const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); - - let name = pk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnNames.length; i++) { - name = name.replace(originalColumnNames[i], columnNames[i]); - } - } - - primaryKeysObject[name] = { - name, - columns: columnNames, - }; - }); - - // uniqueConstraints?.map((unq) => { - // const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); - - // const name = unq.name ?? uniqueKeyName(table, columnNames); - - // // const existingUnique = uniqueConstraintObject[name]; - // // if (typeof existingUnique !== 'undefined') { - // // console.log( - // // `\n${ - // // withStyle.errorWarning( - // // `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. - // // The unique constraint ${chalk.underline.blue(name)} on the ${ - // // chalk.underline.blue( - // // columnNames.join(','), - // // ) - // // } columns is confilcting with a unique constraint name already defined for ${ - // // chalk.underline.blue(existingUnique.columns.join(',')) - // // } columns\n`, - // // ) - // // }`, - // // ); - // // process.exit(1); - // // } - - // // uniqueConstraintObject[name] = { - // // name: unq.name!, - // // nullsNotDistinct: unq.nullsNotDistinct, - // // columns: columnNames, - // // }; - // }); - - const fks: ForeignKey[] = foreignKeys.map((fk) => { - const tableFrom = tableName; - const onDelete = fk.onDelete; - const onUpdate = fk.onUpdate; - const reference = fk.reference(); - - const tableTo = getTableName(reference.foreignTable); - // TODO: resolve issue with schema undefined/public for db push(or squasher) - // getTableConfig(reference.foreignTable).schema || "public"; - const schemaTo = getTableConfig(reference.foreignTable).schema; - - const originalColumnsFrom = reference.columns.map((it) => it.name); - const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); - const originalColumnsTo = reference.foreignColumns.map((it) => it.name); - const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); - - let name = fk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnsFrom.length; i++) { - name = name.replace(originalColumnsFrom[i], columnsFrom[i]); - } - for (let i = 0; i < originalColumnsTo.length; i++) { - name = name.replace(originalColumnsTo[i], columnsTo[i]); - } - } - - return { - name, - tableFrom, - tableTo, - schemaTo, - columnsFrom, - columnsTo, - onDelete, - onUpdate, - } as ForeignKey; - }); - - fks.forEach((it) => { - foreignKeysObject[it.name] = it; - }); - - indexes.forEach((value) => { - const columns = value.config.columns; - - let indexColumnNames: string[] = []; - columns.forEach((it) => { - if (is(it, SQL)) { - if (typeof value.config.name === 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `Please specify an index name in ${getTableName(value.config.table)} table that has "${ - dialect.sqlToQuery(it).sql - }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, - ) - }`, - ); - process.exit(1); - } - } - it = it as IndexedColumn; - const name = getColumnCasing(it as IndexedColumn, casing); - if ( - !is(it, SQL) - && typeof it.indexConfig!.opClass === 'undefined' - ) { - console.log( - `\n${ - withStyle.errorWarning( - `You are specifying an index on the ${ - chalk.blueBright( - name, - ) - } column inside the ${ - chalk.blueBright( - tableName, - ) - } table with the ${ - chalk.blueBright( - 'vector', - ) - } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ - vectorOps - .map((it) => `${chalk.underline(`${it}`)}`) - .join(', ') - }].\n\nYou can specify it using current syntax: ${ - chalk.underline( - `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ - vectorOps[0] - }"))`, - ) - }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, - ) - }`, - ); - process.exit(1); - } - indexColumnNames.push(name); - }); - - const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); - - let indexColumns: IndexColumnType[] = columns.map( - (it): IndexColumnType => { - if (is(it, SQL)) { - return { - expression: dialect.sqlToQuery(it, 'indexes').sql, - asc: true, - isExpression: true, - nulls: 'last', - }; - } else { - it = it as IndexedColumn; - return { - expression: getColumnCasing(it as IndexedColumn, casing), - isExpression: false, - asc: it.indexConfig?.order === 'asc', - nulls: it.indexConfig?.nulls - ? it.indexConfig?.nulls - : it.indexConfig?.order === 'desc' - ? 'first' - : 'last', - opclass: it.indexConfig?.opClass, - }; - } - }, - ); - - // check for index names duplicates - if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { - if (indexesInSchema[schema ?? 'public'].includes(name)) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated index name across ${ - chalk.underline.blue(schema ?? 'public') - } schema. Please rename your index in either the ${ - chalk.underline.blue( - tableName, - ) - } table or the table with the duplicated index name`, - ) - }`, - ); - process.exit(1); - } - indexesInSchema[schema ?? 'public'].push(name); - } else { - indexesInSchema[schema ?? 'public'] = [name]; - } - - indexesObject[name] = { - name, - columns: indexColumns, - isUnique: value.config.unique ?? false, - where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, - concurrently: value.config.concurrently ?? false, - method: value.config.method ?? 'btree', - with: value.config.with ?? {}, - }; - }); - - policies.forEach((policy) => { - const mappedTo = []; - - if (!policy.to) { - mappedTo.push('public'); - } else { - if (policy.to && typeof policy.to === 'string') { - mappedTo.push(policy.to); - } else if (policy.to && is(policy.to, GelRole)) { - mappedTo.push(policy.to.name); - } else if (policy.to && Array.isArray(policy.to)) { - policy.to.forEach((it) => { - if (typeof it === 'string') { - mappedTo.push(it); - } else if (is(it, GelRole)) { - mappedTo.push(it.name); - } - }); - } - } - - if (policiesObject[policy.name] !== undefined) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated policy name across ${ - chalk.underline.blue(tableKey) - } table. Please rename one of the policies with ${ - chalk.underline.blue( - policy.name, - ) - } name`, - ) - }`, - ); - process.exit(1); - } - - policiesObject[policy.name] = { - name: policy.name, - as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', - for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', - to: mappedTo.sort(), - using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, - withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, - }; - }); - - // checks.forEach((check) => { - // const checkName = check.name; - - // if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { - // if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { - // console.log( - // `\n${ - // withStyle.errorWarning( - // `We\'ve found duplicated check constraint name across ${ - // chalk.underline.blue( - // schema ?? 'public', - // ) - // } schema in ${ - // chalk.underline.blue( - // tableName, - // ) - // }. Please rename your check constraint in either the ${ - // chalk.underline.blue( - // tableName, - // ) - // } table or the table with the duplicated check contraint name`, - // ) - // }`, - // ); - // process.exit(1); - // } - // checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); - // } else { - // checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; - // } - - // checksObject[checkName] = { - // name: checkName, - // value: dialect.sqlToQuery(check.value).sql, - // }; - // }); - - const tableKey = `${schema ?? 'public'}.${tableName}`; - - result[tableKey] = { - name: tableName, - schema: schema ?? '', - columns: columnsObject, - indexes: indexesObject, - foreignKeys: foreignKeysObject, - compositePrimaryKeys: primaryKeysObject, - uniqueConstraints: {}, // uniqueConstraintObject, - policies: policiesObject, - checkConstraints: {}, // checksObject, - isRLSEnabled: enableRLS, - }; - } - - for (const policy of policies) { - // @ts-ignore - if (!policy._linkedTable) { - console.log( - `\n${ - withStyle.errorWarning( - `"Policy ${policy.name} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, - ) - }`, - ); - continue; - } - - // @ts-ignore - const tableConfig = getTableConfig(policy._linkedTable); - - const tableKey = `${tableConfig.schema ?? 'public'}.${tableConfig.name}`; - - const mappedTo = []; - - if (!policy.to) { - mappedTo.push('public'); - } else { - if (policy.to && typeof policy.to === 'string') { - mappedTo.push(policy.to); - } else if (policy.to && is(policy.to, GelRole)) { - mappedTo.push(policy.to.name); - } else if (policy.to && Array.isArray(policy.to)) { - policy.to.forEach((it) => { - if (typeof it === 'string') { - mappedTo.push(it); - } else if (is(it, GelRole)) { - mappedTo.push(it.name); - } - }); - } - } - - // add separate policies object, that will be only responsible for policy creation - // but we would need to track if a policy was enabled for a specific table or not - // enable only if jsonStatements for enable rls was not already there + filter it - - if (result[tableKey]?.policies[policy.name] !== undefined || policiesToReturn[policy.name] !== undefined) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated policy name across ${ - chalk.underline.blue(tableKey) - } table. Please rename one of the policies with ${ - chalk.underline.blue( - policy.name, - ) - } name`, - ) - }`, - ); - process.exit(1); - } - - const mappedPolicy = { - name: policy.name, - as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', - for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', - to: mappedTo.sort(), - using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, - withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, - }; - - if (result[tableKey]) { - result[tableKey].policies[policy.name] = mappedPolicy; - } else { - policiesToReturn[policy.name] = { - ...mappedPolicy, - schema: tableConfig.schema ?? 'public', - on: `"${tableConfig.schema ?? 'public'}"."${tableConfig.name}"`, - }; - } - } - - for (const sequence of sequences) { - const name = sequence.seqName!; - if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { - const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) - ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); - const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); - const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; - - sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { - name, - schema: sequence.schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: sequence.seqOptions?.cycle ?? false, - }; - } else { - // duplicate seq error - } - } - - for (const role of roles) { - if (!(role as any)._existing) { - rolesToReturn[role.name] = { - name: role.name, - createDb: (role as any).createDb === undefined ? false : (role as any).createDb, - createRole: (role as any).createRole === undefined ? false : (role as any).createRole, - inherit: (role as any).inherit === undefined ? true : (role as any).inherit, - }; - } - } - const combinedViews = [...views, ...matViews]; - for (const view of combinedViews) { - let viewName; - let schema; - let query; - let selectedFields; - let isExisting; - let withOption; - let tablespace; - let using; - let withNoData; - let materialized: boolean = false; - - if (is(view, GelView)) { - ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); - } else { - ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = - getMaterializedViewConfig(view)); - - materialized = true; - } - - const viewSchema = schema ?? 'public'; - - const viewKey = `${viewSchema}.${viewName}`; - - const columnsObject: Record = {}; - const uniqueConstraintObject: Record = {}; - - const existingView = resultViews[viewKey]; - if (typeof existingView !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated view name across ${ - chalk.underline.blue(schema ?? 'public') - } schema. Please rename your view`, - ) - }`, - ); - process.exit(1); - } - - for (const key in selectedFields) { - if (is(selectedFields[key], GelColumn)) { - const column = selectedFields[key]; - - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - - // const typeSchema = is(column, GelEnumColumn) ? column.enum.schema || 'public' : undefined; - const generated = column.generated; - const identity = column.generatedIdentity; - - const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) - ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); - const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); - const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - typeSchema: undefined, - primaryKey, - notNull, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: 'stored', - } - : undefined, - identity: identity - ? { - type: identity.type, - name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, - schema: schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: identity?.sequenceOptions?.cycle ?? false, - } - : undefined, - }; - - if (column.isUnique) { - const existingUnique = uniqueConstraintObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. - The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ - chalk.underline.blue( - column.name, - ) - } column is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue(existingUnique.columns.join(',')) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - uniqueConstraintObject[column.uniqueName!] = { - name: column.uniqueName!, - nullsNotDistinct: column.uniqueType === 'not distinct', - columns: [columnToSet.name], - }; - } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${column.default}'`; - } else { - if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if (sqlTypeLowered === 'timestamp') { - columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; - } else { - columnToSet.default = `'${column.default.toISOString()}'`; - } - } else if (Array.isArray(column.default)) { - columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; - } else { - // Should do for all types - // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; - columnToSet.default = column.default; - } - } - } - } - columnsObject[column.name] = columnToSet; - } - } - - resultViews[viewKey] = { - columns: columnsObject, - definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, - name: viewName, - schema: viewSchema, - isExisting, - with: withOption, - withNoData, - materialized, - tablespace, - using, - }; - } - - // const enumsToReturn: Record = enums.reduce<{ - // [key: string]: Enum; - // }>((map, obj) => { - // const enumSchema = obj.schema || 'public'; - // const key = `${enumSchema}.${obj.enumName}`; - // map[key] = { - // name: obj.enumName, - // schema: enumSchema, - // values: obj.enumValues, - // }; - // return map; - // }, {}); - - const schemasObject = Object.fromEntries( - schemas - .filter((it) => { - if (schemaFilter) { - return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; - } else { - return it.schemaName !== 'public'; - } - }) - .map((it) => [it.schemaName, it.schemaName]), - ); - - return { - version: '1', - dialect: 'gel', - tables: result, - enums: {}, - schemas: schemasObject, - sequences: sequencesToReturn, - roles: rolesToReturn, - policies: policiesToReturn, - views: resultViews, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - }; -}; - -const trimChar = (str: string, char: string) => { - let start = 0; - let end = str.length; - - while (start < end && str[start] === char) ++start; - while (end > start && str[end - 1] === char) --end; - - // this.toString() due to ava deep equal issue with String { "value" } - return start > 0 || end < str.length ? str.substring(start, end) : str.toString(); -}; - -function prepareRoles(entities?: { - roles: boolean | { - provider?: string | undefined; - include?: string[] | undefined; - exclude?: string[] | undefined; - }; -}) { - let useRoles: boolean = false; - const includeRoles: string[] = []; - const excludeRoles: string[] = []; - - if (entities && entities.roles) { - if (typeof entities.roles === 'object') { - if (entities.roles.provider) { - if (entities.roles.provider === 'supabase') { - excludeRoles.push(...[ - 'anon', - 'authenticator', - 'authenticated', - 'service_role', - 'supabase_auth_admin', - 'supabase_storage_admin', - 'dashboard_user', - 'supabase_admin', - ]); - } else if (entities.roles.provider === 'neon') { - excludeRoles.push(...['authenticated', 'anonymous']); - } - } - if (entities.roles.include) { - includeRoles.push(...entities.roles.include); - } - if (entities.roles.exclude) { - excludeRoles.push(...entities.roles.exclude); - } - } else { - useRoles = entities.roles; - } - } - return { useRoles, includeRoles, excludeRoles }; -} - -export const fromDatabase = async ( - db: DB, - tablesFilter: (table: string) => boolean = () => true, - schemaFilters: string[], - entities?: { - roles: boolean | { - provider?: string | undefined; - include?: string[] | undefined; - exclude?: string[] | undefined; - }; - }, - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, - tsSchema?: GelSchemaInternal, -): Promise => { - const result: Record = {}; - // const views: Record = {}; - const policies: Record = {}; - const internals: GelKitInternals = { tables: {} }; - - const where = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); - - const allTables = await db.query<{ table_schema: string; table_name: string; type: string; rls_enabled: boolean }>( - `SELECT - n.nspname::text AS table_schema, - c.relname::text AS table_name, - CASE - WHEN c.relkind = 'r' THEN 'table' - WHEN c.relkind = 'v' THEN 'view' - WHEN c.relkind = 'm' THEN 'materialized_view' - END AS type, - c.relrowsecurity AS rls_enabled -FROM - pg_catalog.pg_class c -JOIN - pg_catalog.pg_namespace n ON n.oid::text = c.relnamespace::text -WHERE - c.relkind IN ('r', 'v', 'm') - ${where === '' ? '' : ` AND ${where}`};`, - ); - - const schemas = new Set(allTables.map((it) => it.table_schema)); - - const allSchemas = await db.query<{ - table_schema: string; - }>(`select s.nspname::text as table_schema - from pg_catalog.pg_namespace s - join pg_catalog.pg_user u on u.usesysid::text = s.nspowner::text - where nspname not in ('information_schema', 'pg_catalog', 'public') - and nspname::text not like 'pg_toast%' - and nspname::text not like 'pg_temp_%' - order by 1;`); - - allSchemas.forEach((item) => { - if (schemaFilters.includes(item.table_schema)) { - schemas.add(item.table_schema); - } - }); - - let columnsCount = 0; - let indexesCount = 0; - let foreignKeysCount = 0; - let tableCount = 0; - - const sequencesToReturn: Record = {}; - - const all = allTables - .filter((it) => it.type === 'table') - .map((row) => { - return new Promise(async (res, rej) => { - const tableName = row.table_name as string; - if (!tablesFilter(tableName)) return res(''); - tableCount += 1; - const tableSchema = row.table_schema; - - try { - const columnToReturn: Record = {}; - const indexToReturn: Record = {}; - const foreignKeysToReturn: Record = {}; - const primaryKeys: Record = {}; - // const uniqueConstrains: Record = {}; - // const checkConstraints: Record = {}; - - const tableResponse = await getColumnsInfoQuery({ schema: tableSchema, table: tableName, db }); - - // const tableConstraints = await db.query( - // `SELECT c.column_name::text, c.data_type::text, constraint_type::text, constraint_name::text, constraint_schema::text - // FROM information_schema.table_constraints tc - // JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) - // JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema - // AND tc.table_name = c.table_name AND ccu.column_name = c.column_name - // WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, - // ); - - // const tableChecks = await db.query(`SELECT - // tc.constraint_name::text, - // tc.constraint_type::text, - // pg_get_constraintdef(con.oid) AS constraint_definition - // FROM - // information_schema.table_constraints AS tc - // JOIN pg_constraint AS con - // ON tc.constraint_name = con.conname - // AND con.conrelid = ( - // SELECT oid - // FROM pg_class - // WHERE relname = tc.table_name - // AND relnamespace = ( - // SELECT oid - // FROM pg_namespace - // WHERE nspname = tc.constraint_schema - // ) - // ) - // WHERE - // tc.table_name = '${tableName}' - // AND tc.constraint_schema = '${tableSchema}' - // AND tc.constraint_type = 'CHECK';`); - - columnsCount += tableResponse.length; - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } - - const tableForeignKeys = await db.query( - `SELECT - con.contype::text AS constraint_type, - nsp.nspname::text AS constraint_schema, - con.conname::text AS constraint_name, - rel.relname::text AS table_name, - att.attname::text AS column_name, - fnsp.nspname::text AS foreign_table_schema, - frel.relname::text AS foreign_table_name, - fatt.attname::text AS foreign_column_name, - CASE con.confupdtype - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'n' THEN 'SET NULL' - WHEN 'c' THEN 'CASCADE' - WHEN 'd' THEN 'SET DEFAULT' - END AS update_rule, - CASE con.confdeltype - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'n' THEN 'SET NULL' - WHEN 'c' THEN 'CASCADE' - WHEN 'd' THEN 'SET DEFAULT' - END AS delete_rule - FROM - pg_catalog.pg_constraint con - JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid - JOIN pg_catalog.pg_namespace nsp ON nsp.oid = con.connamespace - LEFT JOIN pg_catalog.pg_attribute att ON att.attnum = ANY (con.conkey) - AND att.attrelid = con.conrelid - LEFT JOIN pg_catalog.pg_class frel ON frel.oid = con.confrelid - LEFT JOIN pg_catalog.pg_namespace fnsp ON fnsp.oid = frel.relnamespace - LEFT JOIN pg_catalog.pg_attribute fatt ON fatt.attnum = ANY (con.confkey) - AND fatt.attrelid = con.confrelid - WHERE - nsp.nspname = '${tableSchema}' - AND rel.relname = '${tableName}' - AND con.contype IN ('f');`, - ); - - foreignKeysCount += tableForeignKeys.length; - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'fetching'); - } - for (const fk of tableForeignKeys) { - // const tableFrom = fk.table_name; - const columnFrom: string = fk.column_name; - const tableTo = fk.foreign_table_name; - const columnTo: string = fk.foreign_column_name; - const schemaTo: string = fk.foreign_table_schema; - const foreignKeyName = fk.constraint_name; - const onUpdate = fk.update_rule?.toLowerCase(); - const onDelete = fk.delete_rule?.toLowerCase(); - - if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { - foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); - foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); - } else { - foreignKeysToReturn[foreignKeyName] = { - name: foreignKeyName, - tableFrom: tableName, - tableTo, - schemaTo, - columnsFrom: [columnFrom], - columnsTo: [columnTo], - onDelete, - onUpdate, - }; - } - - foreignKeysToReturn[foreignKeyName].columnsFrom = [ - ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), - ]; - - foreignKeysToReturn[foreignKeyName].columnsTo = [...new Set(foreignKeysToReturn[foreignKeyName].columnsTo)]; - } - - // const uniqueConstrainsRows = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'UNIQUE'); - - // for (const unqs of uniqueConstrainsRows) { - // // const tableFrom = fk.table_name; - // const columnName: string = unqs.column_name; - // const constraintName: string = unqs.constraint_name; - - // if (typeof uniqueConstrains[constraintName] !== 'undefined') { - // uniqueConstrains[constraintName].columns.push(columnName); - // } else { - // uniqueConstrains[constraintName] = { - // columns: [columnName], - // nullsNotDistinct: false, - // name: constraintName, - // }; - // } - // } - - // checksCount += tableChecks.length; - // if (progressCallback) { - // progressCallback('checks', checksCount, 'fetching'); - // } - // for (const checks of tableChecks) { - // // CHECK (((email)::text <> 'test@gmail.com'::text)) - // // Where (email) is column in table - // let checkValue: string = checks.constraint_definition; - // const constraintName: string = checks.constraint_name; - - // checkValue = checkValue.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); - - // checkConstraints[constraintName] = { - // name: constraintName, - // value: checkValue, - // }; - // } - - for (const columnResponse of tableResponse) { - const columnName = columnResponse.column_name; - if (columnName === '__type__') continue; - - const columnAdditionalDT = columnResponse.additional_dt; - const columnDimensions = columnResponse.array_dimensions; - const enumType: string = columnResponse.enum_name; - let columnType: string = columnResponse.data_type; - // const typeSchema = columnResponse.type_schema; - const defaultValueRes: string = columnResponse.column_default; - - const isGenerated = columnResponse.is_generated === 'ALWAYS'; - const generationExpression = columnResponse.generation_expression; - const isIdentity = columnResponse.is_identity === 'YES'; - const identityGeneration = columnResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; - const identityStart = columnResponse.identity_start; - const identityIncrement = columnResponse.identity_increment; - const identityMaximum = columnResponse.identity_maximum; - const identityMinimum = columnResponse.identity_minimum; - const identityCycle = columnResponse.identity_cycle === 'YES'; - const identityName = columnResponse.seq_name; - - // const primaryKey = tableConstraints.filter((mapRow) => - // columnName === mapRow.column_name && mapRow.constraint_type === 'PRIMARY KEY' - // ); - - // const cprimaryKey = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'PRIMARY KEY'); - - // if (cprimaryKey.length > 1) { - // const tableCompositePkName = await db.query( - // `SELECT conname::text AS primary_key - // FROM pg_constraint join pg_class on (pg_class.oid = conrelid) - // WHERE contype = 'p' - // AND connamespace = $1::regnamespace - // AND pg_class.relname = $2;`, - // [tableSchema, tableName], - // ); - // primaryKeys[tableCompositePkName[0].primary_key] = { - // name: tableCompositePkName[0].primary_key, - // columns: cprimaryKey.map((c: any) => c.column_name), - // }; - // } - - let columnTypeMapped = columnType; - - // Set default to internal object - if (columnAdditionalDT === 'ARRAY') { - if (typeof internals.tables[tableName] === 'undefined') { - internals.tables[tableName] = { - columns: { - [columnName]: { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }, - }, - }; - } else { - if (typeof internals.tables[tableName]!.columns[columnName] === 'undefined') { - internals.tables[tableName]!.columns[columnName] = { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }; - } - } - } - - const defaultValue = defaultForColumn(columnResponse, internals, tableName); - if ( - defaultValue === 'NULL' - || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) - ) { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; - } - } - } - - if (columnTypeMapped.startsWith('numeric(')) { - columnTypeMapped = columnTypeMapped.replace(',', ', '); - } - - if (columnAdditionalDT === 'ARRAY') { - for (let i = 1; i < Number(columnDimensions); i++) { - columnTypeMapped += '[]'; - } - } - - // TODO check if correct - // skip range and tuples - if (columnTypeMapped.includes('tuple<') || columnTypeMapped.includes('range')) continue; - - columnTypeMapped = trimChar(columnTypeMapped, '"'); - columnTypeMapped = columnTypeMapped.replace('pg_catalog.', ''); - - // patching array types - columnTypeMapped = columnTypeMapped.replace('float4[]', 'real[]').replace('float8[]', 'double precision[]') - .replace('"numeric"[]', 'numeric[]').replace('"time"[]', 'time without time zone[]').replace( - 'int2[]', - 'smallint[]', - ).replace( - 'int4[]', - 'integer[]', - ).replace( - 'int8[]', - 'bigint[]', - ).replace( - 'bool[]', - 'boolean[]', - ); - - columnToReturn[columnName] = { - name: columnName, - type: - // filter vectors, but in future we should filter any extension that was installed by user - columnAdditionalDT === 'USER-DEFINED' - && !['vector', 'geometry'].includes(enumType) - ? enumType - : columnTypeMapped, - typeSchema: undefined, - // typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined - // ? enumsToReturn[`${typeSchema}.${enumType}`].schema - // : undefined, - primaryKey: columnName === 'id', - default: defaultValue, - notNull: columnResponse.is_nullable === 'NO', - generated: isGenerated - ? { as: generationExpression, type: 'stored' } - : undefined, - identity: isIdentity - ? { - type: identityGeneration, - name: identityName, - increment: stringFromDatabaseIdentityProperty(identityIncrement), - minValue: stringFromDatabaseIdentityProperty(identityMinimum), - maxValue: stringFromDatabaseIdentityProperty(identityMaximum), - startWith: stringFromDatabaseIdentityProperty(identityStart), - cache: sequencesToReturn[identityName]?.cache - ? sequencesToReturn[identityName]?.cache - : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - : undefined, - cycle: identityCycle, - schema: tableSchema, - } - : undefined, - }; - - if (identityName && typeof identityName === 'string') { - // remove "" from sequence name - delete sequencesToReturn[ - `${tableSchema}.${ - identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName - }` - ]; - delete sequencesToReturn[identityName]; - } - } - - const dbIndexes = await db.query( - `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname::text as table_name, ic.relname::text AS indexname, - k.i AS index_order, - i.indisunique as is_unique, - am.amname::text as method, - ic.reloptions as with, - coalesce(a.attname, - (('{' || pg_get_expr( - i.indexprs, - i.indrelid - ) - || '}')::text[] - )[k.i] - )::text AS column_name, - CASE - WHEN pg_get_expr(i.indexprs, i.indrelid) IS NOT NULL THEN 1 - ELSE 0 - END AS is_expression, - i.indoption[k.i-1] & 1 = 1 AS descending, - i.indoption[k.i-1] & 2 = 2 AS nulls_first, - pg_get_expr( - i.indpred, - i.indrelid - ) as where, - opc.opcname::text - FROM pg_class t - LEFT JOIN pg_index i ON t.oid = i.indrelid - LEFT JOIN pg_class ic ON ic.oid = i.indexrelid - CROSS JOIN LATERAL (SELECT unnest(i.indkey), generate_subscripts(i.indkey, 1) + 1) AS k(attnum, i) - LEFT JOIN pg_attribute AS a - ON i.indrelid = a.attrelid AND k.attnum = a.attnum - JOIN pg_namespace c on c.oid = t.relnamespace - LEFT JOIN pg_am AS am ON ic.relam = am.oid - JOIN pg_opclass opc ON opc.oid = ANY(i.indclass) - WHERE - c.nspname = '${tableSchema}' AND - t.relname = '${tableName}';`, - ); - - const dbIndexFromConstraint = await db.query( - `SELECT - idx.indexrelname::text AS index_name, - idx.relname::text AS table_name, - schemaname::text, - CASE WHEN con.conname IS NOT NULL THEN 1 ELSE 0 END AS generated_by_constraint - FROM - pg_stat_user_indexes idx - LEFT JOIN - pg_constraint con ON con.conindid = idx.indexrelid - WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' - group by index_name, table_name,schemaname, generated_by_constraint;`, - ); - - const idxsInConsteraint = dbIndexFromConstraint.filter((it) => it.generated_by_constraint === 1).map((it) => - it.index_name - ); - - for (const dbIndex of dbIndexes) { - const indexName: string = dbIndex.indexname; - const indexColumnName: string = dbIndex.column_name; - const indexIsUnique = dbIndex.is_unique; - const indexMethod = dbIndex.method; - const indexWith: string[] = dbIndex.with; - const indexWhere: string = dbIndex.where; - const opclass: string = dbIndex.opcname; - const isExpression = dbIndex.is_expression === 1; - - const desc: boolean = dbIndex.descending; - const nullsFirst: boolean = dbIndex.nulls_first; - - const mappedWith: Record = {}; - - if (indexWith !== null) { - indexWith - // .slice(1, indexWith.length - 1) - // .split(",") - .forEach((it) => { - const splitted = it.split('='); - mappedWith[splitted[0]] = splitted[1]; - }); - } - - if (idxsInConsteraint.includes(indexName)) continue; - - if (typeof indexToReturn[indexName] !== 'undefined') { - indexToReturn[indexName].columns.push({ - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }); - } else { - indexToReturn[indexName] = { - name: indexName, - columns: [ - { - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }, - ], - isUnique: indexIsUnique, - // should not be a part of diff detects - concurrently: false, - method: indexMethod, - where: indexWhere === null ? undefined : indexWhere, - with: mappedWith, - }; - } - } - - indexesCount += Object.keys(indexToReturn).length; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); - } - result[`${tableSchema}.${tableName}`] = { - name: tableName, - schema: tableSchema !== 'public' ? tableSchema : '', - columns: columnToReturn, - indexes: indexToReturn, - foreignKeys: foreignKeysToReturn, - compositePrimaryKeys: primaryKeys, - uniqueConstraints: {}, // uniqueConstrains, - checkConstraints: {}, // checkConstraints, - policies: {}, // policiesByTable[`${tableSchema}.${tableName}`] ?? {}, - isRLSEnabled: row.rls_enabled, - }; - } catch (e) { - rej(e); - return; - } - res(''); - }); - }); - - if (progressCallback) { - progressCallback('tables', tableCount, 'done'); - } - - for await (const _ of all) { - } - - if (progressCallback) { - progressCallback('columns', columnsCount, 'done'); - progressCallback('indexes', indexesCount, 'done'); - progressCallback('fks', foreignKeysCount, 'done'); - } - - const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); - - return { - version: '1', - dialect: 'gel', - tables: result, - enums: {}, - schemas: schemasObject, - sequences: sequencesToReturn, - roles: {}, // rolesToReturn, - policies, - views: {}, // views, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - internal: internals, - }; -}; - -const defaultForColumn = (column: any, internals: GelKitInternals, tableName: string) => { - const columnName = column.column_name; - const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; - - if (column.column_default === null || column.column_default === undefined) return undefined; - - if (column.column_default.endsWith('[]')) { - column.column_default = column.column_default.slice(0, -2); - } - - column.column_default = column.column_default.replace(/::(.*?)(? psql stores like '99'::numeric - return columnDefaultAsString.includes("'") ? columnDefaultAsString : `'${columnDefaultAsString}'`; - } else if (column.data_type === 'json' || column.data_type === 'jsonb') { - return `'${columnDefaultAsString}'`; - } else if (column.data_type === 'boolean') { - return column.column_default === 'true'; - } else if (columnDefaultAsString === 'NULL') { - return `NULL`; - } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { - return columnDefaultAsString; - } else { - return `${columnDefaultAsString.replace(/\\/g, '`\\')}`; - } -}; - -const getColumnsInfoQuery = ({ schema, table, db }: { schema: string; table: string; db: DB }) => { - return db.query( - `SELECT - a.attrelid::regclass::text AS table_name, -- Table, view, or materialized view name - a.attname::text AS column_name, -- Column name - CASE - WHEN NOT a.attisdropped THEN - CASE - WHEN a.attnotnull THEN 'NO' - ELSE 'YES' - END - ELSE NULL - END AS is_nullable, -- NULL or NOT NULL constraint - a.attndims AS array_dimensions, -- Array dimensions - CASE - WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) - AND EXISTS ( - SELECT FROM pg_attrdef ad - WHERE ad.adrelid = a.attrelid - AND ad.adnum = a.attnum - AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' - || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)' - ) - THEN CASE a.atttypid - WHEN 'int'::regtype THEN 'serial' - WHEN 'int8'::regtype THEN 'bigserial' - WHEN 'int2'::regtype THEN 'smallserial' - END - ELSE format_type(a.atttypid, a.atttypmod) - END AS data_type, -- Column data type --- ns.nspname AS type_schema, -- Schema name - c.column_default::text, -- Column default value - c.data_type::text AS additional_dt, -- Data type from information_schema - c.udt_name::text AS enum_name, -- Enum type (if applicable) - c.is_generated::text, -- Is it a generated column? - c.generation_expression::text, -- Generation expression (if generated) - c.is_identity::text, -- Is it an identity column? - c.identity_generation::text, -- Identity generation strategy (ALWAYS or BY DEFAULT) - c.identity_start::text, -- Start value of identity column - c.identity_increment::text, -- Increment for identity column - c.identity_maximum::text, -- Maximum value for identity column - c.identity_minimum::text, -- Minimum value for identity column - c.identity_cycle::text, -- Does the identity column cycle? - ns.nspname::text AS type_schema -- Schema of the enum type -FROM - pg_attribute a -JOIN - pg_class cls ON cls.oid = a.attrelid -- Join pg_class to get table/view/materialized view info -JOIN - pg_namespace ns ON ns.oid = cls.relnamespace -- Join namespace to get schema info -LEFT JOIN - information_schema.columns c ON c.column_name = a.attname - AND c.table_schema = ns.nspname - AND c.table_name = cls.relname -- Match schema and table/view name -LEFT JOIN - pg_type enum_t ON enum_t.oid = a.atttypid -- Join to get the type info -LEFT JOIN - pg_namespace enum_ns ON enum_ns.oid = enum_t.typnamespace -- Join to get the enum schema -WHERE - a.attnum > 0 -- Valid column numbers only - AND NOT a.attisdropped -- Skip dropped columns - AND cls.relkind IN ('r', 'v', 'm') -- Include regular tables ('r'), views ('v'), and materialized views ('m') - AND ns.nspname::text = '${schema}' -- Filter by schema - AND cls.relname::text = '${table}' -- Filter by table name -ORDER BY - a.attnum; -- Order by column number`, - ); -}; diff --git a/drizzle-kit/src/serializer/index.ts b/drizzle-kit/src/serializer/index.ts deleted file mode 100644 index d24afbab08..0000000000 --- a/drizzle-kit/src/serializer/index.ts +++ /dev/null @@ -1,130 +0,0 @@ -import chalk from 'chalk'; -import fs from 'fs'; -import * as glob from 'glob'; -import Path from 'path'; -import { CasingType } from 'src/cli/validations/common'; -import { error } from '../cli/views'; -import type { MySqlSchemaInternal } from './mysqlSchema'; -import type { PgSchemaInternal } from './pgSchema'; -import { SingleStoreSchemaInternal } from './singlestoreSchema'; -import type { SQLiteSchemaInternal } from './sqliteSchema'; - -export const serializeMySql = async ( - path: string | string[], - casing: CasingType | undefined, -): Promise => { - const filenames = prepareFilenames(path); - - console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); - - const { prepareFromMySqlImports } = await import('./mysqlImports'); - const { generateMySqlSnapshot } = await import('./mysqlSerializer'); - - const { tables, views } = await prepareFromMySqlImports(filenames); - - return generateMySqlSnapshot(tables, views, casing); -}; - -export const serializePg = async ( - path: string | string[], - casing: CasingType | undefined, - schemaFilter?: string[], -): Promise => { - const filenames = prepareFilenames(path); - - const { prepareFromPgImports } = await import('./pgImports'); - const { generatePgSnapshot } = await import('./pgSerializer'); - - const { tables, enums, schemas, sequences, views, matViews, roles, policies } = await prepareFromPgImports( - filenames, - ); - - return generatePgSnapshot(tables, enums, schemas, sequences, roles, policies, views, matViews, casing, schemaFilter); -}; - -export const serializeSQLite = async ( - path: string | string[], - casing: CasingType | undefined, -): Promise => { - const filenames = prepareFilenames(path); - - const { prepareFromSqliteImports } = await import('./sqliteImports'); - const { generateSqliteSnapshot } = await import('./sqliteSerializer'); - const { tables, views } = await prepareFromSqliteImports(filenames); - return generateSqliteSnapshot(tables, views, casing); -}; - -export const serializeSingleStore = async ( - path: string | string[], - casing: CasingType | undefined, -): Promise => { - const filenames = prepareFilenames(path); - - console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); - - const { prepareFromSingleStoreImports } = await import('./singlestoreImports'); - const { generateSingleStoreSnapshot } = await import('./singlestoreSerializer'); - - const { tables /* views */ } = await prepareFromSingleStoreImports(filenames); - - return generateSingleStoreSnapshot(tables, /* views, */ casing); -}; - -export const prepareFilenames = (path: string | string[]) => { - if (typeof path === 'string') { - path = [path]; - } - const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; - - const result = path.reduce((result, cur) => { - const globbed = glob.sync(`${prefix}${cur}`); - - globbed.forEach((it) => { - const fileName = fs.lstatSync(it).isDirectory() ? null : Path.resolve(it); - - const filenames = fileName - ? [fileName!] - : fs.readdirSync(it).map((file) => Path.join(Path.resolve(it), file)); - - filenames - .filter((file) => !fs.lstatSync(file).isDirectory()) - .forEach((file) => result.add(file)); - }); - - return result; - }, new Set()); - const res = [...result]; - - // TODO: properly handle and test - const errors = res.filter((it) => { - return !( - it.endsWith('.ts') - || it.endsWith('.js') - || it.endsWith('.cjs') - || it.endsWith('.mjs') - || it.endsWith('.mts') - || it.endsWith('.cts') - ); - }); - - // when schema: "./schema" and not "./schema.ts" - if (res.length === 0) { - console.log( - error( - `No schema files found for path config [${ - path - .map((it) => `'${it}'`) - .join(', ') - }]`, - ), - ); - console.log( - error( - `If path represents a file - please make sure to use .ts or other extension in the path`, - ), - ); - process.exit(1); - } - - return res; -}; diff --git a/drizzle-kit/src/serializer/mysqlImports.ts b/drizzle-kit/src/serializer/mysqlImports.ts deleted file mode 100644 index a8e8ead39d..0000000000 --- a/drizzle-kit/src/serializer/mysqlImports.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { is } from 'drizzle-orm'; -import { AnyMySqlTable, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; -import { safeRegister } from '../cli/commands/utils'; - -export const prepareFromExports = (exports: Record) => { - const tables: AnyMySqlTable[] = []; - const views: MySqlView[] = []; - - const i0values = Object.values(exports); - i0values.forEach((t) => { - if (is(t, MySqlTable)) { - tables.push(t); - } - - if (is(t, MySqlView)) { - views.push(t); - } - }); - - return { tables, views }; -}; - -export const prepareFromMySqlImports = async (imports: string[]) => { - const tables: AnyMySqlTable[] = []; - const views: MySqlView[] = []; - - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; - const i0: Record = require(`${it}`); - const prepared = prepareFromExports(i0); - - tables.push(...prepared.tables); - views.push(...prepared.views); - } - unregister(); - return { tables: Array.from(new Set(tables)), views }; -}; diff --git a/drizzle-kit/src/serializer/mysqlSerializer.ts b/drizzle-kit/src/serializer/mysqlSerializer.ts deleted file mode 100644 index 322d8957f8..0000000000 --- a/drizzle-kit/src/serializer/mysqlSerializer.ts +++ /dev/null @@ -1,999 +0,0 @@ -import chalk from 'chalk'; -import { getTableName, is, SQL } from 'drizzle-orm'; -import { - AnyMySqlTable, - getTableConfig, - getViewConfig, - MySqlColumn, - MySqlDialect, - MySqlView, - type PrimaryKey as PrimaryKeyORM, - uniqueKeyName, -} from 'drizzle-orm/mysql-core'; -import { RowDataPacket } from 'mysql2/promise'; -import { CasingType } from 'src/cli/validations/common'; -import { withStyle } from '../cli/validations/outputs'; -import { IntrospectStage, IntrospectStatus } from '../cli/views'; -import { - CheckConstraint, - Column, - ForeignKey, - Index, - MySqlKitInternals, - MySqlSchemaInternal, - PrimaryKey, - Table, - UniqueConstraint, - View, -} from '../serializer/mysqlSchema'; -import { type DB, escapeSingleQuotes } from '../utils'; -import { getColumnCasing, sqlToStr } from './utils'; - -export const indexName = (tableName: string, columns: string[]) => { - return `${tableName}_${columns.join('_')}_index`; -}; - -const handleEnumType = (type: string) => { - let str = type.split('(')[1]; - str = str.substring(0, str.length - 1); - const values = str.split(',').map((v) => `'${escapeSingleQuotes(v.substring(1, v.length - 1))}'`); - return `enum(${values.join(',')})`; -}; - -export const generateMySqlSnapshot = ( - tables: AnyMySqlTable[], - views: MySqlView[], - casing: CasingType | undefined, -): MySqlSchemaInternal => { - const dialect = new MySqlDialect({ casing }); - const result: Record = {}; - const resultViews: Record = {}; - const internal: MySqlKitInternals = { tables: {}, indexes: {} }; - - for (const table of tables) { - const { - name: tableName, - columns, - indexes, - foreignKeys, - schema, - checks, - primaryKeys, - uniqueConstraints, - } = getTableConfig(table); - - const columnsObject: Record = {}; - const indexesObject: Record = {}; - const foreignKeysObject: Record = {}; - const primaryKeysObject: Record = {}; - const uniqueConstraintObject: Record = {}; - const checkConstraintObject: Record = {}; - - // this object will help to identify same check names - let checksInTable: Record = {}; - - columns.forEach((column) => { - const name = getColumnCasing(column, casing); - const notNull: boolean = column.notNull; - const sqlType = column.getSQLType(); - const sqlTypeLowered = sqlType.toLowerCase(); - const autoIncrement = typeof (column as any).autoIncrement === 'undefined' - ? false - : (column as any).autoIncrement; - - const generated = column.generated; - - const columnToSet: Column = { - name, - type: sqlType.startsWith('enum') ? handleEnumType(sqlType) : sqlType, - primaryKey: false, - // If field is autoincrement it's notNull by default - // notNull: autoIncrement ? true : notNull, - notNull, - autoincrement: autoIncrement, - onUpdate: (column as any).hasOnUpdateNow, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: generated.mode ?? 'stored', - } - : undefined, - }; - - if (column.primary) { - primaryKeysObject[`${tableName}_${name}`] = { - name: `${tableName}_${name}`, - columns: [name], - }; - } - - if (column.isUnique) { - const existingUnique = uniqueConstraintObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. - The unique constraint ${ - chalk.underline.blue( - column.uniqueName, - ) - } on the ${ - chalk.underline.blue( - name, - ) - } column is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`) - }`, - ); - process.exit(1); - } - uniqueConstraintObject[column.uniqueName!] = { - name: column.uniqueName!, - columns: [columnToSet.name], - }; - } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; - } else { - if (sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if ( - sqlTypeLowered.startsWith('datetime') - || sqlTypeLowered.startsWith('timestamp') - ) { - columnToSet.default = `'${ - column.default - .toISOString() - .replace('T', ' ') - .slice(0, 23) - }'`; - } - } else { - columnToSet.default = column.default; - } - } - if (['blob', 'text', 'json'].includes(column.getSQLType())) { - columnToSet.default = `(${columnToSet.default})`; - } - } - } - columnsObject[name] = columnToSet; - }); - - primaryKeys.map((pk: PrimaryKeyORM) => { - const originalColumnNames = pk.columns.map((c) => c.name); - const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); - - let name = pk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnNames.length; i++) { - name = name.replace(originalColumnNames[i], columnNames[i]); - } - } - - primaryKeysObject[name] = { - name, - columns: columnNames, - }; - - // all composite pk's should be treated as notNull - for (const column of pk.columns) { - columnsObject[getColumnCasing(column, casing)].notNull = true; - } - }); - - uniqueConstraints?.map((unq) => { - const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); - - const name = unq.name ?? uniqueKeyName(table, columnNames); - - const existingUnique = uniqueConstraintObject[name]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. \nThe unique constraint ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - columnNames.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - - uniqueConstraintObject[name] = { - name: unq.name!, - columns: columnNames, - }; - }); - - const fks: ForeignKey[] = foreignKeys.map((fk) => { - const tableFrom = tableName; - const onDelete = fk.onDelete ?? 'no action'; - const onUpdate = fk.onUpdate ?? 'no action'; - const reference = fk.reference(); - - const referenceFT = reference.foreignTable; - - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - const tableTo = getTableName(referenceFT); - - const originalColumnsFrom = reference.columns.map((it) => it.name); - const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); - const originalColumnsTo = reference.foreignColumns.map((it) => it.name); - const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); - - let name = fk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnsFrom.length; i++) { - name = name.replace(originalColumnsFrom[i], columnsFrom[i]); - } - for (let i = 0; i < originalColumnsTo.length; i++) { - name = name.replace(originalColumnsTo[i], columnsTo[i]); - } - } - - return { - name, - tableFrom, - tableTo, - columnsFrom, - columnsTo, - onDelete, - onUpdate, - } as ForeignKey; - }); - - fks.forEach((it) => { - foreignKeysObject[it.name] = it; - }); - - indexes.forEach((value) => { - const columns = value.config.columns; - const name = value.config.name; - - let indexColumns = columns.map((it) => { - if (is(it, SQL)) { - const sql = dialect.sqlToQuery(it, 'indexes').sql; - if (typeof internal!.indexes![name] === 'undefined') { - internal!.indexes![name] = { - columns: { - [sql]: { - isExpression: true, - }, - }, - }; - } else { - if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { - internal!.indexes![name]!.columns[sql] = { - isExpression: true, - }; - } else { - internal!.indexes![name]!.columns[sql]!.isExpression = true; - } - } - return sql; - } else { - return `${getColumnCasing(it, casing)}`; - } - }); - - if (value.config.unique) { - if (typeof uniqueConstraintObject[name] !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. \nThe unique index ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - indexColumns.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - uniqueConstraintObject[name].columns.join(','), - ) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - } else { - if (typeof foreignKeysObject[name] !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `In MySQL, when creating a foreign key, an index is automatically generated with the same name as the foreign key constraint.\n\nWe have encountered a collision between the index name on columns ${ - chalk.underline.blue( - indexColumns.join(','), - ) - } and the foreign key on columns ${ - chalk.underline.blue( - foreignKeysObject[name].columnsFrom.join(','), - ) - }. Please change either the index name or the foreign key name. For more information, please refer to https://dev.mysql.com/doc/refman/8.0/en/constraint-foreign-key.html\n - `, - ) - }`, - ); - process.exit(1); - } - } - - indexesObject[name] = { - name, - columns: indexColumns, - isUnique: value.config.unique ?? false, - using: value.config.using, - algorithm: value.config.algorithm, - lock: value.config.lock, - }; - }); - - checks.forEach((check) => { - check; - const checkName = check.name; - if (typeof checksInTable[tableName] !== 'undefined') { - if (checksInTable[tableName].includes(check.name)) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated check constraint name in ${ - chalk.underline.blue( - tableName, - ) - }. Please rename your check constraint in the ${ - chalk.underline.blue( - tableName, - ) - } table`, - ) - }`, - ); - process.exit(1); - } - checksInTable[tableName].push(checkName); - } else { - checksInTable[tableName] = [check.name]; - } - - checkConstraintObject[checkName] = { - name: checkName, - value: dialect.sqlToQuery(check.value).sql, - }; - }); - - // only handle tables without schemas - if (!schema) { - result[tableName] = { - name: tableName, - columns: columnsObject, - indexes: indexesObject, - foreignKeys: foreignKeysObject, - compositePrimaryKeys: primaryKeysObject, - uniqueConstraints: uniqueConstraintObject, - checkConstraint: checkConstraintObject, - }; - } - } - - for (const view of views) { - const { - isExisting, - name, - query, - schema, - selectedFields, - algorithm, - sqlSecurity, - withCheckOption, - } = getViewConfig(view); - - const columnsObject: Record = {}; - - const existingView = resultViews[name]; - if (typeof existingView !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated view name across ${ - chalk.underline.blue( - schema ?? 'public', - ) - } schema. Please rename your view`, - ) - }`, - ); - process.exit(1); - } - - for (const key in selectedFields) { - if (is(selectedFields[key], MySqlColumn)) { - const column = selectedFields[key]; - - const notNull: boolean = column.notNull; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - const autoIncrement = typeof (column as any).autoIncrement === 'undefined' - ? false - : (column as any).autoIncrement; - - const generated = column.generated; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - primaryKey: false, - // If field is autoincrement it's notNull by default - // notNull: autoIncrement ? true : notNull, - notNull, - autoincrement: autoIncrement, - onUpdate: (column as any).hasOnUpdateNow, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: generated.mode ?? 'stored', - } - : undefined, - }; - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${column.default}'`; - } else { - if (sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if ( - sqlTypeLowered.startsWith('datetime') - || sqlTypeLowered.startsWith('timestamp') - ) { - columnToSet.default = `'${ - column.default - .toISOString() - .replace('T', ' ') - .slice(0, 23) - }'`; - } - } else { - columnToSet.default = column.default; - } - } - if (['blob', 'text', 'json'].includes(column.getSQLType())) { - columnToSet.default = `(${columnToSet.default})`; - } - } - } - columnsObject[column.name] = columnToSet; - } - } - - resultViews[name] = { - columns: columnsObject, - name, - isExisting, - definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, - withCheckOption, - algorithm: algorithm ?? 'undefined', // set default values - sqlSecurity: sqlSecurity ?? 'definer', // set default values - }; - } - - return { - version: '5', - dialect: 'mysql', - tables: result, - views: resultViews, - _meta: { - tables: {}, - columns: {}, - }, - internal, - }; -}; - -function clearDefaults(defaultValue: any, collate: string) { - if (typeof collate === 'undefined' || collate === null) { - collate = `utf8mb4`; - } - - let resultDefault = defaultValue; - collate = `_${collate}`; - if (defaultValue.startsWith(collate)) { - resultDefault = resultDefault - .substring(collate.length, defaultValue.length) - .replace(/\\/g, ''); - if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { - return `('${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}')`; - } else { - return `'${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}'`; - } - } else { - return `(${resultDefault})`; - } -} - -export const fromDatabase = async ( - db: DB, - inputSchema: string, - tablesFilter: (table: string) => boolean = (table) => true, - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, -): Promise => { - const result: Record = {}; - const internals: MySqlKitInternals = { tables: {}, indexes: {} }; - - const columns = await db.query(`select * from information_schema.columns - where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' - order by table_name, ordinal_position;`); - - const response = columns as RowDataPacket[]; - - const schemas: string[] = []; - - let columnsCount = 0; - let tablesCount = new Set(); - let indexesCount = 0; - let foreignKeysCount = 0; - let checksCount = 0; - let viewsCount = 0; - - const idxs = await db.query( - `select * from INFORMATION_SCHEMA.STATISTICS - WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, - ); - - const idxRows = idxs as RowDataPacket[]; - - for (const column of response) { - if (!tablesFilter(column['TABLE_NAME'] as string)) continue; - - columnsCount += 1; - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } - const schema: string = column['TABLE_SCHEMA']; - const tableName = column['TABLE_NAME']; - - tablesCount.add(`${schema}.${tableName}`); - if (progressCallback) { - progressCallback('columns', tablesCount.size, 'fetching'); - } - const columnName: string = column['COLUMN_NAME']; - const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' - const dataType = column['DATA_TYPE']; // varchar - const columnType = column['COLUMN_TYPE']; // varchar(256) - const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' - const columnDefault: string = column['COLUMN_DEFAULT']; - const collation: string = column['CHARACTER_SET_NAME']; - const geenratedExpression: string = column['GENERATION_EXPRESSION']; - - let columnExtra = column['EXTRA']; - let isAutoincrement = false; // 'auto_increment', '' - let isDefaultAnExpression = false; // 'auto_increment', '' - - if (typeof column['EXTRA'] !== 'undefined') { - columnExtra = column['EXTRA']; - isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' - isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' - } - - // if (isPrimary) { - // if (typeof tableToPk[tableName] === "undefined") { - // tableToPk[tableName] = [columnName]; - // } else { - // tableToPk[tableName].push(columnName); - // } - // } - - if (schema !== inputSchema) { - schemas.push(schema); - } - - const table = result[tableName]; - - // let changedType = columnType.replace("bigint unsigned", "serial") - let changedType = columnType; - - if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { - // check unique here - const uniqueIdx = idxRows.filter( - (it) => - it['COLUMN_NAME'] === columnName - && it['TABLE_NAME'] === tableName - && it['NON_UNIQUE'] === 0, - ); - if (uniqueIdx && uniqueIdx.length === 1) { - changedType = columnType.replace('bigint unsigned', 'serial'); - } - } - - if (columnType.includes('decimal(10,0)')) { - changedType = columnType.replace('decimal(10,0)', 'decimal'); - } - - let onUpdate: boolean | undefined = undefined; - if ( - columnType.startsWith('timestamp') - && typeof columnExtra !== 'undefined' - && columnExtra.includes('on update CURRENT_TIMESTAMP') - ) { - onUpdate = true; - } - - const newColumn: Column = { - default: columnDefault === null || columnDefault === undefined - ? undefined - : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) - && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) - ? Number(columnDefault) - : isDefaultAnExpression - ? clearDefaults(columnDefault, collation) - : `'${escapeSingleQuotes(columnDefault)}'`, - autoincrement: isAutoincrement, - name: columnName, - type: changedType, - primaryKey: false, - notNull: !isNullable, - onUpdate, - generated: geenratedExpression - ? { - as: geenratedExpression, - type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', - } - : undefined, - }; - - // Set default to internal object - if (isDefaultAnExpression) { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if ( - typeof internals!.tables![tableName]!.columns[columnName] - === 'undefined' - ) { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![tableName]!.columns[ - columnName - ]!.isDefaultAnExpression = true; - } - } - } - - if (!table) { - result[tableName] = { - name: tableName, - columns: { - [columnName]: newColumn, - }, - compositePrimaryKeys: {}, - indexes: {}, - foreignKeys: {}, - uniqueConstraints: {}, - checkConstraint: {}, - }; - } else { - result[tableName]!.columns[columnName] = newColumn; - } - } - - const tablePks = await db.query( - `SELECT table_name, column_name, ordinal_position - FROM information_schema.table_constraints t - LEFT JOIN information_schema.key_column_usage k - USING(constraint_name,table_schema,table_name) - WHERE t.constraint_type='PRIMARY KEY' - and table_name != '__drizzle_migrations' - AND t.table_schema = '${inputSchema}' - ORDER BY ordinal_position`, - ); - - const tableToPk: { [tname: string]: string[] } = {}; - - const tableToPkRows = tablePks as RowDataPacket[]; - for (const tableToPkRow of tableToPkRows) { - const tableName: string = tableToPkRow['TABLE_NAME']; - const columnName: string = tableToPkRow['COLUMN_NAME']; - const position: string = tableToPkRow['ordinal_position']; - - if (typeof result[tableName] === 'undefined') { - continue; - } - - if (typeof tableToPk[tableName] === 'undefined') { - tableToPk[tableName] = [columnName]; - } else { - tableToPk[tableName].push(columnName); - } - } - - for (const [key, value] of Object.entries(tableToPk)) { - // if (value.length > 1) { - result[key].compositePrimaryKeys = { - [`${key}_${value.join('_')}`]: { - name: `${key}_${value.join('_')}`, - columns: value, - }, - }; - // } else if (value.length === 1) { - // result[key].columns[value[0]].primaryKey = true; - // } else { - // } - } - if (progressCallback) { - progressCallback('columns', columnsCount, 'done'); - progressCallback('tables', tablesCount.size, 'done'); - } - try { - const fks = await db.query( - `SELECT - kcu.TABLE_SCHEMA, - kcu.TABLE_NAME, - kcu.CONSTRAINT_NAME, - kcu.COLUMN_NAME, - kcu.REFERENCED_TABLE_SCHEMA, - kcu.REFERENCED_TABLE_NAME, - kcu.REFERENCED_COLUMN_NAME, - rc.UPDATE_RULE, - rc.DELETE_RULE - FROM - INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu - LEFT JOIN - information_schema.referential_constraints rc - ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME - WHERE kcu.TABLE_SCHEMA = '${inputSchema}' AND kcu.CONSTRAINT_NAME != 'PRIMARY' - AND kcu.REFERENCED_TABLE_NAME IS NOT NULL;`, - ); - - const fkRows = fks as RowDataPacket[]; - - for (const fkRow of fkRows) { - foreignKeysCount += 1; - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'fetching'); - } - const tableSchema = fkRow['TABLE_SCHEMA']; - const tableName: string = fkRow['TABLE_NAME']; - const constraintName = fkRow['CONSTRAINT_NAME']; - const columnName: string = fkRow['COLUMN_NAME']; - const refTableSchema = fkRow['REFERENCED_TABLE_SCHEMA']; - const refTableName = fkRow['REFERENCED_TABLE_NAME']; - const refColumnName: string = fkRow['REFERENCED_COLUMN_NAME']; - const updateRule: string = fkRow['UPDATE_RULE']; - const deleteRule = fkRow['DELETE_RULE']; - - const tableInResult = result[tableName]; - if (typeof tableInResult === 'undefined') continue; - - if (typeof tableInResult.foreignKeys[constraintName] !== 'undefined') { - tableInResult.foreignKeys[constraintName]!.columnsFrom.push(columnName); - tableInResult.foreignKeys[constraintName]!.columnsTo.push( - refColumnName, - ); - } else { - tableInResult.foreignKeys[constraintName] = { - name: constraintName, - tableFrom: tableName, - tableTo: refTableName, - columnsFrom: [columnName], - columnsTo: [refColumnName], - onDelete: deleteRule?.toLowerCase(), - onUpdate: updateRule?.toLowerCase(), - }; - } - - tableInResult.foreignKeys[constraintName]!.columnsFrom = [ - ...new Set(tableInResult.foreignKeys[constraintName]!.columnsFrom), - ]; - - tableInResult.foreignKeys[constraintName]!.columnsTo = [ - ...new Set(tableInResult.foreignKeys[constraintName]!.columnsTo), - ]; - } - } catch (e) { - // console.log(`Can't proccess foreign keys`); - } - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'done'); - } - - for (const idxRow of idxRows) { - const tableSchema = idxRow['TABLE_SCHEMA']; - const tableName = idxRow['TABLE_NAME']; - const constraintName = idxRow['INDEX_NAME']; - const columnName: string = idxRow['COLUMN_NAME']; - const isUnique = idxRow['NON_UNIQUE'] === 0; - - const tableInResult = result[tableName]; - if (typeof tableInResult === 'undefined') continue; - - // if (tableInResult.columns[columnName].type === "serial") continue; - - indexesCount += 1; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); - } - - if (isUnique) { - if ( - typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' - ) { - tableInResult.uniqueConstraints[constraintName]!.columns.push( - columnName, - ); - } else { - tableInResult.uniqueConstraints[constraintName] = { - name: constraintName, - columns: [columnName], - }; - } - } else { - // in MySQL FK creates index by default. Name of index is the same as fk constraint name - // so for introspect we will just skip it - if (typeof tableInResult.foreignKeys[constraintName] === 'undefined') { - if (typeof tableInResult.indexes[constraintName] !== 'undefined') { - tableInResult.indexes[constraintName]!.columns.push(columnName); - } else { - tableInResult.indexes[constraintName] = { - name: constraintName, - columns: [columnName], - isUnique: isUnique, - }; - } - } - } - } - - const views = await db.query( - `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, - ); - - const resultViews: Record = {}; - - viewsCount = views.length; - if (progressCallback) { - progressCallback('views', viewsCount, 'fetching'); - } - for await (const view of views) { - const viewName = view['TABLE_NAME']; - const definition = view['VIEW_DEFINITION']; - - const withCheckOption = view['CHECK_OPTION'] === 'NONE' ? undefined : view['CHECK_OPTION'].toLowerCase(); - const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); - - const [createSqlStatement] = await db.query(`SHOW CREATE VIEW \`${viewName}\`;`); - const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); - const algorithm = algorithmMatch ? algorithmMatch[1].toLowerCase() : undefined; - - const columns = result[viewName].columns; - delete result[viewName]; - - resultViews[viewName] = { - columns: columns, - isExisting: false, - name: viewName, - algorithm, - definition, - sqlSecurity, - withCheckOption, - }; - } - - if (progressCallback) { - progressCallback('indexes', indexesCount, 'done'); - // progressCallback("enums", 0, "fetching"); - progressCallback('enums', 0, 'done'); - progressCallback('views', viewsCount, 'done'); - } - - const checkConstraints = await db.query( - `SELECT - tc.table_name, - tc.constraint_name, - cc.check_clause -FROM - information_schema.table_constraints tc -JOIN - information_schema.check_constraints cc - ON tc.constraint_name = cc.constraint_name -WHERE - tc.constraint_schema = '${inputSchema}' -AND - tc.constraint_type = 'CHECK';`, - ); - - checksCount += checkConstraints.length; - if (progressCallback) { - progressCallback('checks', checksCount, 'fetching'); - } - for (const checkConstraintRow of checkConstraints) { - const constraintName = checkConstraintRow['CONSTRAINT_NAME']; - const constraintValue = checkConstraintRow['CHECK_CLAUSE']; - const tableName = checkConstraintRow['TABLE_NAME']; - - const tableInResult = result[tableName]; - // if (typeof tableInResult === 'undefined') continue; - - tableInResult.checkConstraint[constraintName] = { - name: constraintName, - value: constraintValue, - }; - } - - if (progressCallback) { - progressCallback('checks', checksCount, 'done'); - } - - return { - version: '5', - dialect: 'mysql', - tables: result, - views: resultViews, - _meta: { - tables: {}, - columns: {}, - }, - internal: internals, - }; -}; diff --git a/drizzle-kit/src/serializer/pgImports.ts b/drizzle-kit/src/serializer/pgImports.ts deleted file mode 100644 index c12493a9a0..0000000000 --- a/drizzle-kit/src/serializer/pgImports.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { is } from 'drizzle-orm'; -import { Relations } from 'drizzle-orm/_relations'; -import { - AnyPgTable, - isPgEnum, - isPgMaterializedView, - isPgSequence, - isPgView, - PgEnum, - PgMaterializedView, - PgPolicy, - PgRole, - PgSchema, - PgSequence, - PgTable, - PgView, -} from 'drizzle-orm/pg-core'; -import { safeRegister } from '../cli/commands/utils'; - -export const prepareFromExports = (exports: Record) => { - const tables: AnyPgTable[] = []; - const enums: PgEnum[] = []; - const schemas: PgSchema[] = []; - const sequences: PgSequence[] = []; - const roles: PgRole[] = []; - const policies: PgPolicy[] = []; - const views: PgView[] = []; - const matViews: PgMaterializedView[] = []; - const relations: Relations[] = []; - - const i0values = Object.values(exports); - i0values.forEach((t) => { - if (isPgEnum(t)) { - enums.push(t); - return; - } - if (is(t, PgTable)) { - tables.push(t); - } - - if (is(t, PgSchema)) { - schemas.push(t); - } - - if (isPgView(t)) { - views.push(t); - } - - if (isPgMaterializedView(t)) { - matViews.push(t); - } - - if (isPgSequence(t)) { - sequences.push(t); - } - - if (is(t, PgRole)) { - roles.push(t); - } - - if (is(t, PgPolicy)) { - policies.push(t); - } - - if (is(t, Relations)) { - relations.push(t); - } - }); - - return { tables, enums, schemas, sequences, views, matViews, roles, policies, relations }; -}; - -export const prepareFromPgImports = async (imports: string[]) => { - const tables: AnyPgTable[] = []; - const enums: PgEnum[] = []; - const schemas: PgSchema[] = []; - const sequences: PgSequence[] = []; - const views: PgView[] = []; - const roles: PgRole[] = []; - const policies: PgPolicy[] = []; - const matViews: PgMaterializedView[] = []; - const relations: Relations[] = []; - - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; - - const i0: Record = require(`${it}`); - const prepared = prepareFromExports(i0); - - tables.push(...prepared.tables); - enums.push(...prepared.enums); - schemas.push(...prepared.schemas); - sequences.push(...prepared.sequences); - views.push(...prepared.views); - matViews.push(...prepared.matViews); - roles.push(...prepared.roles); - policies.push(...prepared.policies); - relations.push(...prepared.relations); - } - unregister(); - - return { - tables: Array.from(new Set(tables)), - enums, - schemas, - sequences, - views, - matViews, - roles, - policies, - relations, - }; -}; diff --git a/drizzle-kit/src/serializer/pgSerializer.ts b/drizzle-kit/src/serializer/pgSerializer.ts deleted file mode 100644 index a98ba34825..0000000000 --- a/drizzle-kit/src/serializer/pgSerializer.ts +++ /dev/null @@ -1,2128 +0,0 @@ -import chalk from 'chalk'; -import { getTableName, is, SQL } from 'drizzle-orm'; -import { - AnyPgTable, - getMaterializedViewConfig, - getTableConfig, - getViewConfig, - IndexedColumn, - PgArray, - PgColumn, - PgDialect, - PgEnum, - PgEnumColumn, - PgMaterializedView, - PgPolicy, - PgRole, - PgSchema, - PgSequence, - PgView, - uniqueKeyName, -} from 'drizzle-orm/pg-core'; -import { CasingType } from 'src/cli/validations/common'; -import { vectorOps } from 'src/extensions/vector'; -import { withStyle } from '../cli/validations/outputs'; -import type { IntrospectStage, IntrospectStatus } from '../cli/views'; -import type { - CheckConstraint, - Column, - Enum, - ForeignKey, - Index, - IndexColumnType, - PgKitInternals, - PgSchemaInternal, - Policy, - PrimaryKey, - Role, - Sequence, - Table, - UniqueConstraint, - View, -} from '../serializer/pgSchema'; -import { type DB, escapeSingleQuotes, isPgArrayType } from '../utils'; -import { getColumnCasing, sqlToStr } from './utils'; - -export const indexName = (tableName: string, columns: string[]) => { - return `${tableName}_${columns.join('_')}_index`; -}; - -function stringFromIdentityProperty(field: string | number | undefined): string | undefined { - return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); -} - -function maxRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; -} - -function minRangeForIdentityBasedOn(columnType: string) { - return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; -} - -function stringFromDatabaseIdentityProperty(field: any): string | undefined { - return typeof field === 'string' - ? (field as string) - : typeof field === 'undefined' - ? undefined - : typeof field === 'bigint' - ? field.toString() - : String(field); -} - -export function buildArrayString(array: any[], sqlType: string): string { - sqlType = sqlType.split('[')[0]; - const values = array - .map((value) => { - if (typeof value === 'number' || typeof value === 'bigint') { - return value.toString(); - } else if (typeof value === 'boolean') { - return value ? 'true' : 'false'; - } else if (Array.isArray(value)) { - return buildArrayString(value, sqlType); - } else if (value instanceof Date) { - if (sqlType === 'date') { - return `"${value.toISOString().split('T')[0]}"`; - } else if (sqlType === 'timestamp') { - return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; - } else { - return `"${value.toISOString()}"`; - } - } else if (typeof value === 'object') { - return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; - } - - return `"${value}"`; - }) - .join(','); - - return `{${values}}`; -} - -export const generatePgSnapshot = ( - tables: AnyPgTable[], - enums: PgEnum[], - schemas: PgSchema[], - sequences: PgSequence[], - roles: PgRole[], - policies: PgPolicy[], - views: PgView[], - matViews: PgMaterializedView[], - casing: CasingType | undefined, - schemaFilter?: string[], -): PgSchemaInternal => { - const dialect = new PgDialect({ casing }); - const result: Record = {}; - const resultViews: Record = {}; - const sequencesToReturn: Record = {}; - const rolesToReturn: Record = {}; - // this policies are a separate objects that were linked to a table outside of it - const policiesToReturn: Record = {}; - - // This object stores unique names for indexes and will be used to detect if you have the same names for indexes - // within the same PostgreSQL schema - - const indexesInSchema: Record = {}; - - for (const table of tables) { - // This object stores unique names for checks and will be used to detect if you have the same names for checks - // within the same PostgreSQL table - const checksInTable: Record = {}; - - const { - name: tableName, - columns, - indexes, - foreignKeys, - checks, - schema, - primaryKeys, - uniqueConstraints, - policies, - enableRLS, - } = getTableConfig(table); - - if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { - continue; - } - - const columnsObject: Record = {}; - const indexesObject: Record = {}; - const checksObject: Record = {}; - const foreignKeysObject: Record = {}; - const primaryKeysObject: Record = {}; - const uniqueConstraintObject: Record = {}; - const policiesObject: Record = {}; - - columns.forEach((column) => { - const name = getColumnCasing(column, casing); - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - - const getEnumSchema = (column: PgColumn) => { - while (is(column, PgArray)) { - column = column.baseColumn; - } - return is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; - }; - const typeSchema: string | undefined = getEnumSchema(column); - - const generated = column.generated; - const identity = column.generatedIdentity; - - const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) - ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); - const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); - const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; - - const columnToSet: Column = { - name, - type: column.getSQLType(), - typeSchema: typeSchema, - primaryKey, - notNull, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: 'stored', - } - : undefined, - identity: identity - ? { - type: identity.type, - name: identity.sequenceName ?? `${tableName}_${name}_seq`, - schema: schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: identity?.sequenceOptions?.cycle ?? false, - } - : undefined, - }; - - if (column.isUnique) { - const existingUnique = uniqueConstraintObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. - The unique constraint ${ - chalk.underline.blue( - column.uniqueName, - ) - } on the ${ - chalk.underline.blue( - name, - ) - } column is conflicting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`) - }`, - ); - process.exit(1); - } - uniqueConstraintObject[column.uniqueName!] = { - name: column.uniqueName!, - nullsNotDistinct: column.uniqueType === 'not distinct', - columns: [columnToSet.name], - }; - } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; - } else { - if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if (sqlTypeLowered === 'timestamp') { - columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; - } else { - columnToSet.default = `'${column.default.toISOString()}'`; - } - } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { - columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; - } else { - // Should do for all types - // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; - columnToSet.default = column.default; - } - } - } - } - columnsObject[name] = columnToSet; - }); - - primaryKeys.map((pk) => { - const originalColumnNames = pk.columns.map((c) => c.name); - const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); - - let name = pk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnNames.length; i++) { - name = name.replace(originalColumnNames[i], columnNames[i]); - } - } - - primaryKeysObject[name] = { - name, - columns: columnNames, - }; - }); - - uniqueConstraints?.map((unq) => { - const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); - - const name = unq.name ?? uniqueKeyName(table, columnNames); - - const existingUnique = uniqueConstraintObject[name]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. - The unique constraint ${chalk.underline.blue(name)} on the ${ - chalk.underline.blue( - columnNames.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue(existingUnique.columns.join(',')) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - - uniqueConstraintObject[name] = { - name: unq.name!, - nullsNotDistinct: unq.nullsNotDistinct, - columns: columnNames, - }; - }); - - const fks: ForeignKey[] = foreignKeys.map((fk) => { - const tableFrom = tableName; - const onDelete = fk.onDelete; - const onUpdate = fk.onUpdate; - const reference = fk.reference(); - - const tableTo = getTableName(reference.foreignTable); - // TODO: resolve issue with schema undefined/public for db push(or squasher) - // getTableConfig(reference.foreignTable).schema || "public"; - const schemaTo = getTableConfig(reference.foreignTable).schema; - - const originalColumnsFrom = reference.columns.map((it) => it.name); - const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); - const originalColumnsTo = reference.foreignColumns.map((it) => it.name); - const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); - - let name = fk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnsFrom.length; i++) { - name = name.replace(originalColumnsFrom[i], columnsFrom[i]); - } - for (let i = 0; i < originalColumnsTo.length; i++) { - name = name.replace(originalColumnsTo[i], columnsTo[i]); - } - } - - return { - name, - tableFrom, - tableTo, - schemaTo, - columnsFrom, - columnsTo, - onDelete, - onUpdate, - } as ForeignKey; - }); - - fks.forEach((it) => { - foreignKeysObject[it.name] = it; - }); - - indexes.forEach((value) => { - const columns = value.config.columns; - - let indexColumnNames: string[] = []; - columns.forEach((it) => { - if (is(it, SQL)) { - if (typeof value.config.name === 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `Please specify an index name in ${getTableName(value.config.table)} table that has "${ - dialect.sqlToQuery(it).sql - }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, - ) - }`, - ); - process.exit(1); - } - } - it = it as IndexedColumn; - const name = getColumnCasing(it as IndexedColumn, casing); - if ( - !is(it, SQL) - && it.type! === 'PgVector' - && typeof it.indexConfig!.opClass === 'undefined' - ) { - console.log( - `\n${ - withStyle.errorWarning( - `You are specifying an index on the ${ - chalk.blueBright( - name, - ) - } column inside the ${ - chalk.blueBright( - tableName, - ) - } table with the ${ - chalk.blueBright( - 'vector', - ) - } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ - vectorOps - .map((it) => `${chalk.underline(`${it}`)}`) - .join(', ') - }].\n\nYou can specify it using current syntax: ${ - chalk.underline( - `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ - vectorOps[0] - }"))`, - ) - }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, - ) - }`, - ); - process.exit(1); - } - indexColumnNames.push(name); - }); - - const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); - - let indexColumns: IndexColumnType[] = columns.map( - (it): IndexColumnType => { - if (is(it, SQL)) { - return { - expression: dialect.sqlToQuery(it, 'indexes').sql, - asc: true, - isExpression: true, - nulls: 'last', - }; - } else { - it = it as IndexedColumn; - return { - expression: getColumnCasing(it as IndexedColumn, casing), - isExpression: false, - asc: it.indexConfig?.order === 'asc', - nulls: it.indexConfig?.nulls - ? it.indexConfig?.nulls - : it.indexConfig?.order === 'desc' - ? 'first' - : 'last', - opclass: it.indexConfig?.opClass, - }; - } - }, - ); - - // check for index names duplicates - if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { - if (indexesInSchema[schema ?? 'public'].includes(name)) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated index name across ${ - chalk.underline.blue(schema ?? 'public') - } schema. Please rename your index in either the ${ - chalk.underline.blue( - tableName, - ) - } table or the table with the duplicated index name`, - ) - }`, - ); - process.exit(1); - } - indexesInSchema[schema ?? 'public'].push(name); - } else { - indexesInSchema[schema ?? 'public'] = [name]; - } - - indexesObject[name] = { - name, - columns: indexColumns, - isUnique: value.config.unique ?? false, - where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, - concurrently: value.config.concurrently ?? false, - method: value.config.method ?? 'btree', - with: value.config.with ?? {}, - }; - }); - - policies.forEach((policy) => { - const mappedTo = []; - - if (!policy.to) { - mappedTo.push('public'); - } else { - if (policy.to && typeof policy.to === 'string') { - mappedTo.push(policy.to); - } else if (policy.to && is(policy.to, PgRole)) { - mappedTo.push(policy.to.name); - } else if (policy.to && Array.isArray(policy.to)) { - policy.to.forEach((it) => { - if (typeof it === 'string') { - mappedTo.push(it); - } else if (is(it, PgRole)) { - mappedTo.push(it.name); - } - }); - } - } - - if (policiesObject[policy.name] !== undefined) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated policy name across ${ - chalk.underline.blue(tableKey) - } table. Please rename one of the policies with ${ - chalk.underline.blue( - policy.name, - ) - } name`, - ) - }`, - ); - process.exit(1); - } - - policiesObject[policy.name] = { - name: policy.name, - as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', - for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', - to: mappedTo.sort(), - using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, - withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, - }; - }); - - checks.forEach((check) => { - const checkName = check.name; - - if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { - if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated check constraint name across ${ - chalk.underline.blue( - schema ?? 'public', - ) - } schema in ${ - chalk.underline.blue( - tableName, - ) - }. Please rename your check constraint in either the ${ - chalk.underline.blue( - tableName, - ) - } table or the table with the duplicated check contraint name`, - ) - }`, - ); - process.exit(1); - } - checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); - } else { - checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; - } - - checksObject[checkName] = { - name: checkName, - value: dialect.sqlToQuery(check.value).sql, - }; - }); - - const tableKey = `${schema ?? 'public'}.${tableName}`; - - result[tableKey] = { - name: tableName, - schema: schema ?? '', - columns: columnsObject, - indexes: indexesObject, - foreignKeys: foreignKeysObject, - compositePrimaryKeys: primaryKeysObject, - uniqueConstraints: uniqueConstraintObject, - policies: policiesObject, - checkConstraints: checksObject, - isRLSEnabled: enableRLS, - }; - } - - for (const policy of policies) { - // @ts-ignore - if (!policy._linkedTable) { - console.log( - `\n${ - withStyle.errorWarning( - `"Policy ${policy.name} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, - ) - }`, - ); - continue; - } - - // @ts-ignore - const tableConfig = getTableConfig(policy._linkedTable); - - const tableKey = `${tableConfig.schema ?? 'public'}.${tableConfig.name}`; - - const mappedTo = []; - - if (!policy.to) { - mappedTo.push('public'); - } else { - if (policy.to && typeof policy.to === 'string') { - mappedTo.push(policy.to); - } else if (policy.to && is(policy.to, PgRole)) { - mappedTo.push(policy.to.name); - } else if (policy.to && Array.isArray(policy.to)) { - policy.to.forEach((it) => { - if (typeof it === 'string') { - mappedTo.push(it); - } else if (is(it, PgRole)) { - mappedTo.push(it.name); - } - }); - } - } - - // add separate policies object, that will be only responsible for policy creation - // but we would need to track if a policy was enabled for a specific table or not - // enable only if jsonStatements for enable rls was not already there + filter it - - if (result[tableKey]?.policies[policy.name] !== undefined || policiesToReturn[policy.name] !== undefined) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated policy name across ${ - chalk.underline.blue(tableKey) - } table. Please rename one of the policies with ${ - chalk.underline.blue( - policy.name, - ) - } name`, - ) - }`, - ); - process.exit(1); - } - - const mappedPolicy = { - name: policy.name, - as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', - for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', - to: mappedTo.sort(), - using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, - withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, - }; - - if (result[tableKey]) { - result[tableKey].policies[policy.name] = mappedPolicy; - } else { - policiesToReturn[policy.name] = { - ...mappedPolicy, - schema: tableConfig.schema ?? 'public', - on: `"${tableConfig.schema ?? 'public'}"."${tableConfig.name}"`, - }; - } - } - - for (const sequence of sequences) { - const name = sequence.seqName!; - if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { - const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) - ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); - const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); - const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; - - sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { - name, - schema: sequence.schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: sequence.seqOptions?.cycle ?? false, - }; - } else { - // duplicate seq error - } - } - - for (const role of roles) { - if (!(role as any)._existing) { - rolesToReturn[role.name] = { - name: role.name, - createDb: (role as any).createDb === undefined ? false : (role as any).createDb, - createRole: (role as any).createRole === undefined ? false : (role as any).createRole, - inherit: (role as any).inherit === undefined ? true : (role as any).inherit, - }; - } - } - const combinedViews = [...views, ...matViews]; - for (const view of combinedViews) { - let viewName; - let schema; - let query; - let selectedFields; - let isExisting; - let withOption; - let tablespace; - let using; - let withNoData; - let materialized: boolean = false; - - if (is(view, PgView)) { - ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); - } else { - ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = - getMaterializedViewConfig(view)); - - materialized = true; - } - - const viewSchema = schema ?? 'public'; - - const viewKey = `${viewSchema}.${viewName}`; - - const columnsObject: Record = {}; - const uniqueConstraintObject: Record = {}; - - const existingView = resultViews[viewKey]; - if (typeof existingView !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated view name across ${ - chalk.underline.blue(schema ?? 'public') - } schema. Please rename your view`, - ) - }`, - ); - process.exit(1); - } - - for (const key in selectedFields) { - if (is(selectedFields[key], PgColumn)) { - const column = selectedFields[key]; - - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - - const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; - const generated = column.generated; - const identity = column.generatedIdentity; - - const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; - const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) - ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); - const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) - ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); - const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) - ?? (parseFloat(increment) < 0 ? maxValue : minValue); - const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - typeSchema: typeSchema, - primaryKey, - notNull, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: 'stored', - } - : undefined, - identity: identity - ? { - type: identity.type, - name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, - schema: schema ?? 'public', - increment, - startWith, - minValue, - maxValue, - cache, - cycle: identity?.sequenceOptions?.cycle ?? false, - } - : undefined, - }; - - if (column.isUnique) { - const existingUnique = uniqueConstraintObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. - The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ - chalk.underline.blue( - column.name, - ) - } column is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue(existingUnique.columns.join(',')) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - uniqueConstraintObject[column.uniqueName!] = { - name: column.uniqueName!, - nullsNotDistinct: column.uniqueType === 'not distinct', - columns: [columnToSet.name], - }; - } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${column.default}'`; - } else { - if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if (sqlTypeLowered === 'timestamp') { - columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; - } else { - columnToSet.default = `'${column.default.toISOString()}'`; - } - } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { - columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; - } else { - // Should do for all types - // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; - columnToSet.default = column.default; - } - } - } - } - columnsObject[column.name] = columnToSet; - } - } - - resultViews[viewKey] = { - columns: columnsObject, - definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, - name: viewName, - schema: viewSchema, - isExisting, - with: withOption, - withNoData, - materialized, - tablespace, - using, - }; - } - - const enumsToReturn: Record = enums.reduce<{ - [key: string]: Enum; - }>((map, obj) => { - const enumSchema = obj.schema || 'public'; - const key = `${enumSchema}.${obj.enumName}`; - map[key] = { - name: obj.enumName, - schema: enumSchema, - values: obj.enumValues, - }; - return map; - }, {}); - - const schemasObject = Object.fromEntries( - schemas - .filter((it) => { - if (schemaFilter) { - return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; - } else { - return it.schemaName !== 'public'; - } - }) - .map((it) => [it.schemaName, it.schemaName]), - ); - - return { - version: '7', - dialect: 'postgresql', - tables: result, - enums: enumsToReturn, - schemas: schemasObject, - sequences: sequencesToReturn, - roles: rolesToReturn, - policies: policiesToReturn, - views: resultViews, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - }; -}; - -const trimChar = (str: string, char: string) => { - let start = 0; - let end = str.length; - - while (start < end && str[start] === char) ++start; - while (end > start && str[end - 1] === char) --end; - - // this.toString() due to ava deep equal issue with String { "value" } - return start > 0 || end < str.length ? str.substring(start, end) : str.toString(); -}; - -function prepareRoles(entities?: { - roles: boolean | { - provider?: string | undefined; - include?: string[] | undefined; - exclude?: string[] | undefined; - }; -}) { - let useRoles: boolean = false; - const includeRoles: string[] = []; - const excludeRoles: string[] = []; - - if (entities && entities.roles) { - if (typeof entities.roles === 'object') { - if (entities.roles.provider) { - if (entities.roles.provider === 'supabase') { - excludeRoles.push(...[ - 'anon', - 'authenticator', - 'authenticated', - 'service_role', - 'supabase_auth_admin', - 'supabase_storage_admin', - 'dashboard_user', - 'supabase_admin', - ]); - } else if (entities.roles.provider === 'neon') { - excludeRoles.push(...['authenticated', 'anonymous']); - } - } - if (entities.roles.include) { - includeRoles.push(...entities.roles.include); - } - if (entities.roles.exclude) { - excludeRoles.push(...entities.roles.exclude); - } - } else { - useRoles = entities.roles; - } - } - return { useRoles, includeRoles, excludeRoles }; -} - -function parsePgArray(input: string): string[] { - // Remove surrounding braces - const content = input.slice(1, -1); - - const result = []; - let current = ''; - let inQuotes = false; - - for (let i = 0; i < content.length; i++) { - const char = content[i]; - - if (char === '"') { - if (inQuotes && content[i + 1] === '"') { - // Escaped quote inside quoted string - current += '"'; - i++; // skip next quote - } else { - inQuotes = !inQuotes; - } - } else if (char === ',' && !inQuotes) { - result.push(current); - current = ''; - } else { - current += char; - } - } - result.push(current); - - return result.map((item) => item.trim()); -} - -export const fromDatabase = async ( - db: DB, - tablesFilter: (table: string) => boolean = () => true, - schemaFilters: string[], - entities?: { - roles: boolean | { - provider?: string | undefined; - include?: string[] | undefined; - exclude?: string[] | undefined; - }; - }, - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, - tsSchema?: PgSchemaInternal, -): Promise => { - const result: Record = {}; - const views: Record = {}; - const policies: Record = {}; - const internals: PgKitInternals = { tables: {} }; - - const where = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); - - const allTables = await db.query<{ table_schema: string; table_name: string; type: string; rls_enabled: boolean }>( - `SELECT - n.nspname AS table_schema, - c.relname AS table_name, - CASE - WHEN c.relkind = 'r' THEN 'table' - WHEN c.relkind = 'v' THEN 'view' - WHEN c.relkind = 'm' THEN 'materialized_view' - END AS type, - c.relrowsecurity AS rls_enabled -FROM - pg_catalog.pg_class c -JOIN - pg_catalog.pg_namespace n ON n.oid = c.relnamespace -WHERE - c.relkind IN ('r', 'v', 'm') - ${where === '' ? '' : ` AND ${where}`};`, - ); - - const schemas = new Set(allTables.map((it) => it.table_schema)); - schemas.delete('public'); - - const allSchemas = await db.query<{ - table_schema: string; - }>(`select s.nspname as table_schema - from pg_catalog.pg_namespace s - join pg_catalog.pg_user u on u.usesysid = s.nspowner - where nspname not in ('information_schema', 'pg_catalog', 'public') - and nspname not like 'pg_toast%' - and nspname not like 'pg_temp_%' - order by table_schema;`); - - allSchemas.forEach((item) => { - if (schemaFilters.includes(item.table_schema)) { - schemas.add(item.table_schema); - } - }); - - let columnsCount = 0; - let indexesCount = 0; - let foreignKeysCount = 0; - let tableCount = 0; - let checksCount = 0; - let viewsCount = 0; - - const sequencesToReturn: Record = {}; - - const seqWhere = schemaFilters.map((t) => `schemaname = '${t}'`).join(' or '); - - const allSequences = await db.query( - `select schemaname, sequencename, start_value, min_value, max_value, increment_by, cycle, cache_size from pg_sequences as seq${ - seqWhere === '' ? '' : ` WHERE ${seqWhere}` - };`, - ); - - for (const dbSeq of allSequences) { - const schemaName = dbSeq.schemaname; - const sequenceName = dbSeq.sequencename; - const startValue = stringFromDatabaseIdentityProperty(dbSeq.start_value); - const minValue = stringFromDatabaseIdentityProperty(dbSeq.min_value); - const maxValue = stringFromDatabaseIdentityProperty(dbSeq.max_value); - const incrementBy = stringFromDatabaseIdentityProperty(dbSeq.increment_by); - const cycle = dbSeq.cycle; - const cacheSize = stringFromDatabaseIdentityProperty(dbSeq.cache_size); - const key = `${schemaName}.${sequenceName}`; - - sequencesToReturn[key] = { - name: sequenceName, - schema: schemaName, - startWith: startValue, - minValue, - maxValue, - increment: incrementBy, - cycle, - cache: cacheSize, - }; - } - - const whereEnums = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); - - const allEnums = await db.query( - `select n.nspname as enum_schema, - t.typname as enum_name, - e.enumlabel as enum_value, - e.enumsortorder as sort_order - from pg_type t - join pg_enum e on t.oid = e.enumtypid - join pg_catalog.pg_namespace n ON n.oid = t.typnamespace - ${whereEnums === '' ? '' : ` WHERE ${whereEnums}`} - order by enum_schema, enum_name, sort_order;`, - ); - - const enumsToReturn: Record = {}; - - for (const dbEnum of allEnums) { - const enumName = dbEnum.enum_name; - const enumValue = dbEnum.enum_value as string; - const enumSchema: string = dbEnum.enum_schema || 'public'; - const key = `${enumSchema}.${enumName}`; - - if (enumsToReturn[key] !== undefined && enumsToReturn[key] !== null) { - enumsToReturn[key].values.push(enumValue); - } else { - enumsToReturn[key] = { - name: enumName, - values: [enumValue], - schema: enumSchema, - }; - } - } - if (progressCallback) { - progressCallback('enums', Object.keys(enumsToReturn).length, 'done'); - } - - const allRoles = await db.query< - { rolname: string; rolinherit: boolean; rolcreatedb: boolean; rolcreaterole: boolean } - >( - `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, - ); - - const rolesToReturn: Record = {}; - - const preparedRoles = prepareRoles(entities); - - if ( - preparedRoles.useRoles || !(preparedRoles.includeRoles.length === 0 && preparedRoles.excludeRoles.length === 0) - ) { - for (const dbRole of allRoles) { - if ( - preparedRoles.useRoles - ) { - rolesToReturn[dbRole.rolname] = { - createDb: dbRole.rolcreatedb, - createRole: dbRole.rolcreatedb, - inherit: dbRole.rolinherit, - name: dbRole.rolname, - }; - } else { - if (preparedRoles.includeRoles.length === 0 && preparedRoles.excludeRoles.length === 0) continue; - if ( - preparedRoles.includeRoles.includes(dbRole.rolname) && preparedRoles.excludeRoles.includes(dbRole.rolname) - ) continue; - if (preparedRoles.excludeRoles.includes(dbRole.rolname)) continue; - if (!preparedRoles.includeRoles.includes(dbRole.rolname)) continue; - - rolesToReturn[dbRole.rolname] = { - createDb: dbRole.rolcreatedb, - createRole: dbRole.rolcreaterole, - inherit: dbRole.rolinherit, - name: dbRole.rolname, - }; - } - } - } - - const schemasForLinkedPoliciesInSchema = Object.values(tsSchema?.policies ?? {}).map((it) => it.schema!); - - const wherePolicies = [...schemaFilters, ...schemasForLinkedPoliciesInSchema] - .map((t) => `schemaname = '${t}'`) - .join(' or '); - - const policiesByTable: Record> = {}; - - const allPolicies = await db.query< - { - schemaname: string; - tablename: string; - name: string; - as: string; - to: string; - for: string; - using: string; - withCheck: string; - } - >(`SELECT schemaname, tablename, policyname as name, permissive as "as", roles as to, cmd as for, qual as using, with_check as "withCheck" FROM pg_policies${ - wherePolicies === '' ? '' : ` WHERE ${wherePolicies}` - };`); - - for (const dbPolicy of allPolicies) { - const { tablename, schemaname, to, withCheck, using, ...rest } = dbPolicy; - const tableForPolicy = policiesByTable[`${schemaname}.${tablename}`]; - - const parsedTo = typeof to === 'string' ? to.slice(1, -1).split(',') : to; - - const parsedWithCheck = withCheck === null ? undefined : withCheck; - const parsedUsing = using === null ? undefined : using; - - if (tableForPolicy) { - tableForPolicy[dbPolicy.name] = { ...rest, to: parsedTo } as Policy; - } else { - policiesByTable[`${schemaname}.${tablename}`] = { - [dbPolicy.name]: { ...rest, to: parsedTo, withCheck: parsedWithCheck, using: parsedUsing } as Policy, - }; - } - - if (tsSchema?.policies[dbPolicy.name]) { - policies[dbPolicy.name] = { - ...rest, - to: parsedTo, - withCheck: parsedWithCheck, - using: parsedUsing, - on: tsSchema?.policies[dbPolicy.name].on, - } as Policy; - } - } - - if (progressCallback) { - progressCallback( - 'policies', - Object.values(policiesByTable).reduce((total, innerRecord) => { - return total + Object.keys(innerRecord).length; - }, 0), - 'done', - ); - } - - const sequencesInColumns: string[] = []; - - const all = allTables - .filter((it) => it.type === 'table') - .map((row) => { - return new Promise(async (res, rej) => { - const tableName = row.table_name as string; - if (!tablesFilter(tableName)) return res(''); - tableCount += 1; - const tableSchema = row.table_schema; - - try { - const columnToReturn: Record = {}; - const indexToReturn: Record = {}; - const foreignKeysToReturn: Record = {}; - const primaryKeys: Record = {}; - const uniqueConstrains: Record = {}; - const checkConstraints: Record = {}; - - const tableResponse = await getColumnsInfoQuery({ schema: tableSchema, table: tableName, db }); - - const tableConstraints = await db.query( - `SELECT c.column_name, c.data_type, constraint_type, constraint_name, constraint_schema - FROM information_schema.table_constraints tc - JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) - JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema - AND tc.table_name = c.table_name AND ccu.column_name = c.column_name - WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, - ); - - const tableChecks = await db.query(`SELECT - tc.constraint_name, - tc.constraint_type, - pg_get_constraintdef(con.oid) AS constraint_definition - FROM - information_schema.table_constraints AS tc - JOIN pg_constraint AS con - ON tc.constraint_name = con.conname - AND con.conrelid = ( - SELECT oid - FROM pg_class - WHERE relname = tc.table_name - AND relnamespace = ( - SELECT oid - FROM pg_namespace - WHERE nspname = tc.constraint_schema - ) - ) - WHERE - tc.table_name = '${tableName}' - AND tc.constraint_schema = '${tableSchema}' - AND tc.constraint_type = 'CHECK' - AND con.contype = 'c';`); - - columnsCount += tableResponse.length; - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } - - const tableForeignKeys = await db.query( - `SELECT - con.contype::text AS constraint_type, - nsp.nspname AS constraint_schema, - con.conname AS constraint_name, - rel.relname AS table_name, - pg_get_constraintdef(con.oid) AS expression, - fnsp.nspname AS foreign_table_schema, - frel.relname AS foreign_table_name, - -- Aggregate the local column names in order - array_agg(att.attname ORDER BY gs.n) AS column_names, - -- Aggregate the column order numbers (which will be 1,2,...) - array_agg(gs.n ORDER BY gs.n) AS column_positions, - -- Aggregate the foreign (referenced) column names in order - array_agg(fatt.attname ORDER BY gs.n) AS foreign_column_names, - CASE con.confupdtype - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'n' THEN 'SET NULL' - WHEN 'c' THEN 'CASCADE' - WHEN 'd' THEN 'SET DEFAULT' - END AS update_rule, - CASE con.confdeltype - WHEN 'a' THEN 'NO ACTION' - WHEN 'r' THEN 'RESTRICT' - WHEN 'n' THEN 'SET NULL' - WHEN 'c' THEN 'CASCADE' - WHEN 'd' THEN 'SET DEFAULT' - END AS delete_rule -FROM pg_constraint con -JOIN pg_namespace nsp ON nsp.oid = con.connamespace -JOIN pg_class rel ON rel.oid = con.conrelid --- Unnest the array of referencing column numbers with ordinality -JOIN LATERAL unnest(con.conkey) WITH ORDINALITY AS gs(attnum, n) ON true -JOIN pg_attribute att - ON att.attrelid = con.conrelid - AND att.attnum = gs.attnum -LEFT JOIN pg_class frel ON frel.oid = con.confrelid -LEFT JOIN pg_namespace fnsp ON fnsp.oid = frel.relnamespace --- Unnest the array of referenced column numbers in the same order -JOIN LATERAL unnest(con.confkey) WITH ORDINALITY AS gs2(attnum, n) ON gs.n = gs2.n -JOIN pg_attribute fatt - ON fatt.attrelid = con.confrelid - AND fatt.attnum = gs2.attnum -WHERE con.contype = 'f' - AND rel.relname = '${tableName}' - AND nsp.nspname = '${tableSchema}' -GROUP BY con.oid, nsp.nspname, con.conname, rel.relname, fnsp.nspname, frel.relname, - con.contype, con.confupdtype, con.confdeltype -ORDER BY con.conname;`, - ); - - foreignKeysCount += tableForeignKeys.length; - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'fetching'); - } - for (const fk of tableForeignKeys) { - const columnsFrom: string[] = parsePgArray(fk.column_names); - const tableTo = fk.foreign_table_name; - const columnsTo: string[] = parsePgArray(fk.foreign_column_names); - const schemaTo: string = fk.foreign_table_schema; - const foreignKeyName = fk.constraint_name; - const onUpdate = fk.update_rule?.toLowerCase(); - const onDelete = fk.delete_rule?.toLowerCase(); - - foreignKeysToReturn[foreignKeyName] = { - name: foreignKeyName, - tableFrom: tableName, - tableTo, - schemaTo, - columnsFrom: columnsFrom, - columnsTo: columnsTo, - onDelete, - onUpdate, - }; - } - - const uniqueConstrainsRows = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'UNIQUE'); - - for (const unqs of uniqueConstrainsRows) { - // const tableFrom = fk.table_name; - const columnName: string = unqs.column_name; - const constraintName: string = unqs.constraint_name; - - if (typeof uniqueConstrains[constraintName] !== 'undefined') { - uniqueConstrains[constraintName].columns.push(columnName); - } else { - uniqueConstrains[constraintName] = { - columns: [columnName], - nullsNotDistinct: false, - name: constraintName, - }; - } - } - - checksCount += tableChecks.length; - if (progressCallback) { - progressCallback('checks', checksCount, 'fetching'); - } - for (const checks of tableChecks) { - // CHECK (((email)::text <> 'test@gmail.com'::text)) - // Where (email) is column in table - let checkValue: string = checks.constraint_definition; - const constraintName: string = checks.constraint_name; - - checkValue = checkValue.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); - - checkConstraints[constraintName] = { - name: constraintName, - value: checkValue, - }; - } - - for (const columnResponse of tableResponse) { - const columnName = columnResponse.column_name; - const columnAdditionalDT = columnResponse.additional_dt; - const columnDimensions = columnResponse.array_dimensions; - const enumType: string = columnResponse.enum_name; - let columnType: string = columnResponse.data_type; - const typeSchema = columnResponse.type_schema; - const defaultValueRes: string = columnResponse.column_default; - - const isGenerated = columnResponse.is_generated === 'ALWAYS'; - const generationExpression = columnResponse.generation_expression; - const isIdentity = columnResponse.is_identity === 'YES'; - const identityGeneration = columnResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; - const identityStart = columnResponse.identity_start; - const identityIncrement = columnResponse.identity_increment; - const identityMaximum = columnResponse.identity_maximum; - const identityMinimum = columnResponse.identity_minimum; - const identityCycle = columnResponse.identity_cycle === 'YES'; - const identityName = columnResponse.seq_name; - - const primaryKey = tableConstraints.filter((mapRow) => - columnName === mapRow.column_name && mapRow.constraint_type === 'PRIMARY KEY' - ); - - const cprimaryKey = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'PRIMARY KEY'); - - if (cprimaryKey.length > 1) { - const tableCompositePkName = await db.query( - `SELECT conname AS primary_key - FROM pg_constraint join pg_class on (pg_class.oid = conrelid) - WHERE contype = 'p' - AND connamespace = $1::regnamespace - AND pg_class.relname = $2;`, - [tableSchema, tableName], - ); - primaryKeys[tableCompositePkName[0].primary_key] = { - name: tableCompositePkName[0].primary_key, - columns: cprimaryKey.map((c: any) => c.column_name), - }; - } - - let columnTypeMapped = columnType; - - // Set default to internal object - if (columnAdditionalDT === 'ARRAY') { - if (typeof internals.tables[tableName] === 'undefined') { - internals.tables[tableName] = { - columns: { - [columnName]: { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }, - }, - }; - } else { - if (typeof internals.tables[tableName]!.columns[columnName] === 'undefined') { - internals.tables[tableName]!.columns[columnName] = { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }; - } - } - } - - const defaultValue = defaultForColumn(columnResponse, internals, tableName); - if ( - defaultValue === 'NULL' - || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) - ) { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; - } - } - } - - const isSerial = columnType === 'serial'; - - if (columnTypeMapped.startsWith('numeric(')) { - columnTypeMapped = columnTypeMapped.replace(',', ', '); - } - - if (columnAdditionalDT === 'ARRAY') { - for (let i = 1; i < Number(columnDimensions); i++) { - columnTypeMapped += '[]'; - } - } - - columnTypeMapped = columnTypeMapped - .replace('character varying', 'varchar') - .replace(' without time zone', '') - // .replace("timestamp without time zone", "timestamp") - .replace('character', 'char'); - - columnTypeMapped = trimChar(columnTypeMapped, '"'); - - columnToReturn[columnName] = { - name: columnName, - type: - // filter vectors, but in future we should filter any extension that was installed by user - columnAdditionalDT === 'USER-DEFINED' - && !['vector', 'geometry', 'halfvec', 'sparsevec', 'bit'].includes(enumType) - ? enumType - : columnTypeMapped, - typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined - ? enumsToReturn[`${typeSchema}.${enumType}`].schema - : undefined, - primaryKey: primaryKey.length === 1 && cprimaryKey.length < 2, - // default: isSerial ? undefined : defaultValue, - notNull: columnResponse.is_nullable === 'NO', - generated: isGenerated - ? { as: generationExpression, type: 'stored' } - : undefined, - identity: isIdentity - ? { - type: identityGeneration, - name: identityName, - increment: stringFromDatabaseIdentityProperty(identityIncrement), - minValue: stringFromDatabaseIdentityProperty(identityMinimum), - maxValue: stringFromDatabaseIdentityProperty(identityMaximum), - startWith: stringFromDatabaseIdentityProperty(identityStart), - cache: sequencesToReturn[identityName]?.cache - ? sequencesToReturn[identityName]?.cache - : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache - : undefined, - cycle: identityCycle, - schema: tableSchema, - } - : undefined, - }; - - if (identityName && typeof identityName === 'string') { - // remove "" from sequence name - delete sequencesToReturn[ - `${tableSchema}.${ - identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName - }` - ]; - delete sequencesToReturn[identityName]; - } - - if (!isSerial && typeof defaultValue !== 'undefined') { - columnToReturn[columnName].default = defaultValue; - } - } - - const dbIndexes = await db.query( - `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname as table_name, ic.relname AS indexname, - k.i AS index_order, - i.indisunique as is_unique, - am.amname as method, - ic.reloptions as with, - coalesce(a.attname, pg_get_indexdef(i.indexrelid, k.i, false)) AS column_name, - CASE - WHEN pg_get_expr(i.indexprs, i.indrelid) IS NOT NULL THEN 1 - ELSE 0 - END AS is_expression, - i.indoption[k.i-1] & 1 = 1 AS descending, - i.indoption[k.i-1] & 2 = 2 AS nulls_first, - pg_get_expr( - i.indpred, - i.indrelid - ) as where, - opc.opcname - FROM pg_class t - LEFT JOIN pg_index i ON t.oid = i.indrelid - LEFT JOIN pg_class ic ON ic.oid = i.indexrelid - CROSS JOIN LATERAL (SELECT unnest(i.indkey), generate_subscripts(i.indkey, 1) + 1) AS k(attnum, i) - LEFT JOIN pg_attribute AS a - ON i.indrelid = a.attrelid AND k.attnum = a.attnum - JOIN pg_namespace c on c.oid = t.relnamespace - LEFT JOIN pg_am AS am ON ic.relam = am.oid - JOIN pg_opclass opc ON opc.oid = ANY(i.indclass) - WHERE - c.nspname = '${tableSchema}' AND - t.relname = '${tableName}';`, - ); - - const dbIndexFromConstraint = await db.query( - `SELECT - idx.indexrelname AS index_name, - idx.relname AS table_name, - schemaname, - CASE WHEN con.conname IS NOT NULL THEN 1 ELSE 0 END AS generated_by_constraint - FROM - pg_stat_user_indexes idx - LEFT JOIN - pg_constraint con ON con.conindid = idx.indexrelid - WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' - group by index_name, table_name,schemaname, generated_by_constraint;`, - ); - - const idxsInConsteraint = dbIndexFromConstraint.filter((it) => it.generated_by_constraint === 1).map((it) => - it.index_name - ); - - for (const dbIndex of dbIndexes) { - const indexName: string = dbIndex.indexname; - const indexColumnName: string = dbIndex.column_name; - const indexIsUnique = dbIndex.is_unique; - const indexMethod = dbIndex.method; - const indexWith: string[] = dbIndex.with; - const indexWhere: string = dbIndex.where; - const opclass: string = dbIndex.opcname; - const isExpression = dbIndex.is_expression === 1; - - const desc: boolean = dbIndex.descending; - const nullsFirst: boolean = dbIndex.nulls_first; - - const mappedWith: Record = {}; - - if (indexWith !== null) { - indexWith - // .slice(1, indexWith.length - 1) - // .split(",") - .forEach((it) => { - const splitted = it.split('='); - mappedWith[splitted[0]] = splitted[1]; - }); - } - - if (idxsInConsteraint.includes(indexName)) continue; - - if (typeof indexToReturn[indexName] !== 'undefined') { - indexToReturn[indexName].columns.push({ - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }); - } else { - indexToReturn[indexName] = { - name: indexName, - columns: [ - { - expression: indexColumnName, - asc: !desc, - nulls: nullsFirst ? 'first' : 'last', - opclass, - isExpression, - }, - ], - isUnique: indexIsUnique, - // should not be a part of diff detects - concurrently: false, - method: indexMethod, - where: indexWhere === null ? undefined : indexWhere, - with: mappedWith, - }; - } - } - - indexesCount += Object.keys(indexToReturn).length; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); - } - result[`${tableSchema}.${tableName}`] = { - name: tableName, - schema: tableSchema !== 'public' ? tableSchema : '', - columns: columnToReturn, - indexes: indexToReturn, - foreignKeys: foreignKeysToReturn, - compositePrimaryKeys: primaryKeys, - uniqueConstraints: uniqueConstrains, - checkConstraints: checkConstraints, - policies: policiesByTable[`${tableSchema}.${tableName}`] ?? {}, - isRLSEnabled: row.rls_enabled, - }; - } catch (e) { - rej(e); - return; - } - res(''); - }); - }); - - if (progressCallback) { - progressCallback('tables', tableCount, 'done'); - } - - for await (const _ of all) { - } - - const allViews = allTables - .filter((it) => it.type === 'view' || it.type === 'materialized_view') - .map((row) => { - return new Promise(async (res, rej) => { - const viewName = row.table_name as string; - if (!tablesFilter(viewName)) return res(''); - tableCount += 1; - const viewSchema = row.table_schema; - - try { - const columnToReturn: Record = {}; - - const viewResponses = await getColumnsInfoQuery({ schema: viewSchema, table: viewName, db }); - - for (const viewResponse of viewResponses) { - const columnName = viewResponse.column_name; - const columnAdditionalDT = viewResponse.additional_dt; - const columnDimensions = viewResponse.array_dimensions; - const enumType: string = viewResponse.enum_name; - let columnType: string = viewResponse.data_type; - const typeSchema = viewResponse.type_schema; - // const defaultValueRes: string = viewResponse.column_default; - - const isGenerated = viewResponse.is_generated === 'ALWAYS'; - const generationExpression = viewResponse.generation_expression; - const isIdentity = viewResponse.is_identity === 'YES'; - const identityGeneration = viewResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; - const identityStart = viewResponse.identity_start; - const identityIncrement = viewResponse.identity_increment; - const identityMaximum = viewResponse.identity_maximum; - const identityMinimum = viewResponse.identity_minimum; - const identityCycle = viewResponse.identity_cycle === 'YES'; - const identityName = viewResponse.seq_name; - const defaultValueRes = viewResponse.column_default; - - const primaryKey = viewResponse.constraint_type === 'PRIMARY KEY'; - - let columnTypeMapped = columnType; - - // Set default to internal object - if (columnAdditionalDT === 'ARRAY') { - if (typeof internals.tables[viewName] === 'undefined') { - internals.tables[viewName] = { - columns: { - [columnName]: { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }, - }, - }; - } else { - if (typeof internals.tables[viewName]!.columns[columnName] === 'undefined') { - internals.tables[viewName]!.columns[columnName] = { - isArray: true, - dimensions: columnDimensions, - rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), - }; - } - } - } - - const defaultValue = defaultForColumn(viewResponse, internals, viewName); - if ( - defaultValue === 'NULL' - || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) - ) { - if (typeof internals!.tables![viewName] === 'undefined') { - internals!.tables![viewName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if (typeof internals!.tables![viewName]!.columns[columnName] === 'undefined') { - internals!.tables![viewName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![viewName]!.columns[columnName]!.isDefaultAnExpression = true; - } - } - } - - const isSerial = columnType === 'serial'; - - if (columnTypeMapped.startsWith('numeric(')) { - columnTypeMapped = columnTypeMapped.replace(',', ', '); - } - - if (columnAdditionalDT === 'ARRAY') { - for (let i = 1; i < Number(columnDimensions); i++) { - columnTypeMapped += '[]'; - } - } - - columnTypeMapped = columnTypeMapped - .replace('character varying', 'varchar') - .replace(' without time zone', '') - // .replace("timestamp without time zone", "timestamp") - .replace('character', 'char'); - - columnTypeMapped = trimChar(columnTypeMapped, '"'); - - columnToReturn[columnName] = { - name: columnName, - type: - // filter vectors, but in future we should filter any extension that was installed by user - columnAdditionalDT === 'USER-DEFINED' - && !['vector', 'geometry', 'halfvec', 'sparsevec', 'bit'].includes(enumType) - ? enumType - : columnTypeMapped, - typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined - ? enumsToReturn[`${typeSchema}.${enumType}`].schema - : undefined, - primaryKey: primaryKey, - notNull: viewResponse.is_nullable === 'NO', - generated: isGenerated ? { as: generationExpression, type: 'stored' } : undefined, - identity: isIdentity - ? { - type: identityGeneration, - name: identityName, - increment: stringFromDatabaseIdentityProperty(identityIncrement), - minValue: stringFromDatabaseIdentityProperty(identityMinimum), - maxValue: stringFromDatabaseIdentityProperty(identityMaximum), - startWith: stringFromDatabaseIdentityProperty(identityStart), - cache: sequencesToReturn[identityName]?.cache - ? sequencesToReturn[identityName]?.cache - : sequencesToReturn[`${viewSchema}.${identityName}`]?.cache - ? sequencesToReturn[`${viewSchema}.${identityName}`]?.cache - : undefined, - cycle: identityCycle, - schema: viewSchema, - } - : undefined, - }; - - if (identityName) { - // remove "" from sequence name - delete sequencesToReturn[ - `${viewSchema}.${ - identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName - }` - ]; - delete sequencesToReturn[identityName]; - } - - if (!isSerial && typeof defaultValue !== 'undefined') { - columnToReturn[columnName].default = defaultValue; - } - } - - const [viewInfo] = await db.query<{ - view_name: string; - schema_name: string; - definition: string; - tablespace_name: string | null; - options: string[] | null; - location: string | null; - }>(` - SELECT - c.relname AS view_name, - n.nspname AS schema_name, - pg_get_viewdef(c.oid, true) AS definition, - ts.spcname AS tablespace_name, - c.reloptions AS options, - pg_tablespace_location(ts.oid) AS location -FROM - pg_class c -JOIN - pg_namespace n ON c.relnamespace = n.oid -LEFT JOIN - pg_tablespace ts ON c.reltablespace = ts.oid -WHERE - (c.relkind = 'm' OR c.relkind = 'v') - AND n.nspname = '${viewSchema}' - AND c.relname = '${viewName}';`); - - const resultWith: { [key: string]: string | boolean | number } = {}; - if (viewInfo.options) { - viewInfo.options.forEach((pair) => { - const splitted = pair.split('='); - const key = splitted[0]; - const value = splitted[1]; - - if (value === 'true') { - resultWith[key] = true; - } else if (value === 'false') { - resultWith[key] = false; - } else if (!isNaN(Number(value))) { - resultWith[key] = Number(value); - } else { - resultWith[key] = value; - } - }); - } - - const definition = viewInfo.definition.replace(/\s+/g, ' ').replace(';', '').trim(); - // { "check_option":"cascaded","security_barrier":true} -> // { "checkOption":"cascaded","securityBarrier":true} - const withOption = Object.values(resultWith).length - ? Object.fromEntries(Object.entries(resultWith).map(([key, value]) => [key.camelCase(), value])) - : undefined; - - const materialized = row.type === 'materialized_view'; - - views[`${viewSchema}.${viewName}`] = { - name: viewName, - schema: viewSchema, - columns: columnToReturn, - isExisting: false, - definition: definition, - materialized: materialized, - with: withOption, - tablespace: viewInfo.tablespace_name ?? undefined, - }; - } catch (e) { - rej(e); - return; - } - res(''); - }); - }); - - viewsCount = allViews.length; - - for await (const _ of allViews) { - } - - if (progressCallback) { - progressCallback('columns', columnsCount, 'done'); - progressCallback('indexes', indexesCount, 'done'); - progressCallback('fks', foreignKeysCount, 'done'); - progressCallback('checks', checksCount, 'done'); - progressCallback('views', viewsCount, 'done'); - } - - const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); - - return { - version: '7', - dialect: 'postgresql', - tables: result, - enums: enumsToReturn, - schemas: schemasObject, - sequences: sequencesToReturn, - roles: rolesToReturn, - policies, - views: views, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - internal: internals, - }; -}; - -const defaultForColumn = (column: any, internals: PgKitInternals, tableName: string) => { - const columnName = column.column_name; - const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; - - if ( - column.column_default === null - || column.column_default === undefined - || column.data_type === 'serial' - || column.data_type === 'smallserial' - || column.data_type === 'bigserial' - ) { - return undefined; - } - - if (column.column_default.endsWith('[]')) { - column.column_default = column.column_default.slice(0, -2); - } - - // if ( - // !['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type) - // ) { - column.column_default = column.column_default.replace(/::(.*?)(? { - if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type.slice(0, -2))) { - return value; - } else if (column.data_type.startsWith('timestamp')) { - return `${value}`; - } else if (column.data_type.slice(0, -2) === 'interval') { - return value.replaceAll('"', `\"`); - } else if (column.data_type.slice(0, -2) === 'boolean') { - return value === 't' ? 'true' : 'false'; - } else if (['json', 'jsonb'].includes(column.data_type.slice(0, -2))) { - return JSON.stringify(JSON.stringify(JSON.parse(JSON.parse(value)), null, 0)); - } else { - return `\"${value}\"`; - } - }) - .join(',') - }}'`; - } - - if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type)) { - if (/^-?[\d.]+(?:e-?\d+)?$/.test(columnDefaultAsString)) { - return Number(columnDefaultAsString); - } else { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; - } - } - return columnDefaultAsString; - } - } else if (column.data_type.includes('numeric')) { - // if numeric(1,1) and used '99' -> psql stores like '99'::numeric - return columnDefaultAsString.includes("'") ? columnDefaultAsString : `'${columnDefaultAsString}'`; - } else if (column.data_type === 'json' || column.data_type === 'jsonb') { - const jsonWithoutSpaces = JSON.stringify(JSON.parse(columnDefaultAsString.slice(1, -1))); - return `'${jsonWithoutSpaces}'::${column.data_type}`; - } else if (column.data_type === 'boolean') { - return column.column_default === 'true'; - } else if (columnDefaultAsString === 'NULL') { - return `NULL`; - } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { - return columnDefaultAsString; - } else { - return `${columnDefaultAsString.replace(/\\/g, '`\\')}`; - } -}; - -const getColumnsInfoQuery = ({ schema, table, db }: { schema: string; table: string; db: DB }) => { - return db.query( - `SELECT - a.attrelid::regclass::text AS table_name, -- Table, view, or materialized view name - a.attname AS column_name, -- Column name - CASE - WHEN NOT a.attisdropped THEN - CASE - WHEN a.attnotnull THEN 'NO' - ELSE 'YES' - END - ELSE NULL - END AS is_nullable, -- NULL or NOT NULL constraint - a.attndims AS array_dimensions, -- Array dimensions - CASE - WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) - AND EXISTS ( - SELECT FROM pg_attrdef ad - WHERE ad.adrelid = a.attrelid - AND ad.adnum = a.attnum - AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' - || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)' - ) - THEN CASE a.atttypid - WHEN 'int'::regtype THEN 'serial' - WHEN 'int8'::regtype THEN 'bigserial' - WHEN 'int2'::regtype THEN 'smallserial' - END - ELSE format_type(a.atttypid, a.atttypmod) - END AS data_type, -- Column data type --- ns.nspname AS type_schema, -- Schema name - pg_get_serial_sequence('"${schema}"."${table}"', a.attname)::regclass AS seq_name, -- Serial sequence (if any) - c.column_default, -- Column default value - c.data_type AS additional_dt, -- Data type from information_schema - c.udt_name AS enum_name, -- Enum type (if applicable) - c.is_generated, -- Is it a generated column? - c.generation_expression, -- Generation expression (if generated) - c.is_identity, -- Is it an identity column? - c.identity_generation, -- Identity generation strategy (ALWAYS or BY DEFAULT) - c.identity_start, -- Start value of identity column - c.identity_increment, -- Increment for identity column - c.identity_maximum, -- Maximum value for identity column - c.identity_minimum, -- Minimum value for identity column - c.identity_cycle, -- Does the identity column cycle? - enum_ns.nspname AS type_schema -- Schema of the enum type -FROM - pg_attribute a -JOIN - pg_class cls ON cls.oid = a.attrelid -- Join pg_class to get table/view/materialized view info -JOIN - pg_namespace ns ON ns.oid = cls.relnamespace -- Join namespace to get schema info -LEFT JOIN - information_schema.columns c ON c.column_name = a.attname - AND c.table_schema = ns.nspname - AND c.table_name = cls.relname -- Match schema and table/view name -LEFT JOIN - pg_type enum_t ON enum_t.oid = a.atttypid -- Join to get the type info -LEFT JOIN - pg_namespace enum_ns ON enum_ns.oid = enum_t.typnamespace -- Join to get the enum schema -WHERE - a.attnum > 0 -- Valid column numbers only - AND NOT a.attisdropped -- Skip dropped columns - AND cls.relkind IN ('r', 'v', 'm') -- Include regular tables ('r'), views ('v'), and materialized views ('m') - AND ns.nspname = '${schema}' -- Filter by schema - AND cls.relname = '${table}' -- Filter by table name -ORDER BY - a.attnum; -- Order by column number`, - ); -}; diff --git a/drizzle-kit/src/serializer/singlestoreImports.ts b/drizzle-kit/src/serializer/singlestoreImports.ts deleted file mode 100644 index 23c2d66a95..0000000000 --- a/drizzle-kit/src/serializer/singlestoreImports.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { is } from 'drizzle-orm'; -import { AnySingleStoreTable, SingleStoreTable } from 'drizzle-orm/singlestore-core'; -import { safeRegister } from '../cli/commands/utils'; - -export const prepareFromExports = (exports: Record) => { - const tables: AnySingleStoreTable[] = []; - /* const views: SingleStoreView[] = []; */ - - const i0values = Object.values(exports); - i0values.forEach((t) => { - if (is(t, SingleStoreTable)) { - tables.push(t); - } - - /* if (is(t, SingleStoreView)) { - views.push(t); - } */ - }); - - return { tables /* views */ }; -}; - -export const prepareFromSingleStoreImports = async (imports: string[]) => { - const tables: AnySingleStoreTable[] = []; - /* const views: SingleStoreView[] = []; */ - - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; - const i0: Record = require(`${it}`); - const prepared = prepareFromExports(i0); - - tables.push(...prepared.tables); - /* views.push(...prepared.views); */ - } - unregister(); - return { tables: Array.from(new Set(tables)) /* , views */ }; -}; diff --git a/drizzle-kit/src/serializer/singlestoreSchema.ts b/drizzle-kit/src/serializer/singlestoreSchema.ts deleted file mode 100644 index 9ff45ef5a7..0000000000 --- a/drizzle-kit/src/serializer/singlestoreSchema.ts +++ /dev/null @@ -1,257 +0,0 @@ -import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; -import { mapValues, originUUID, snapshotVersion } from '../global'; - -// ------- V3 -------- -const index = object({ - name: string(), - columns: string().array(), - isUnique: boolean(), - using: enumType(['btree', 'hash']).optional(), - algorithm: enumType(['default', 'inplace', 'copy']).optional(), - lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), -}).strict(); - -const column = object({ - name: string(), - type: string(), - primaryKey: boolean(), - notNull: boolean(), - autoincrement: boolean().optional(), - default: any().optional(), - onUpdate: any().optional(), - generated: object({ - type: enumType(['stored', 'virtual']), - as: string(), - }).optional(), -}).strict(); - -const compositePK = object({ - name: string(), - columns: string().array(), -}).strict(); - -const uniqueConstraint = object({ - name: string(), - columns: string().array(), -}).strict(); - -const table = object({ - name: string(), - columns: record(string(), column), - indexes: record(string(), index), - compositePrimaryKeys: record(string(), compositePK), - uniqueConstraints: record(string(), uniqueConstraint).default({}), -}).strict(); - -const viewMeta = object({ - algorithm: enumType(['undefined', 'merge', 'temptable']), - sqlSecurity: enumType(['definer', 'invoker']), - withCheckOption: enumType(['local', 'cascaded']).optional(), -}).strict(); - -/* export const view = object({ - name: string(), - columns: record(string(), column), - definition: string().optional(), - isExisting: boolean(), -}).strict().merge(viewMeta); -type SquasherViewMeta = Omit, 'definer'>; */ - -export const kitInternals = object({ - tables: record( - string(), - object({ - columns: record( - string(), - object({ isDefaultAnExpression: boolean().optional() }).optional(), - ), - }).optional(), - ).optional(), - indexes: record( - string(), - object({ - columns: record( - string(), - object({ isExpression: boolean().optional() }).optional(), - ), - }).optional(), - ).optional(), -}).optional(); - -// use main dialect -const dialect = literal('singlestore'); - -const schemaHash = object({ - id: string(), - prevId: string(), -}); - -export const schemaInternal = object({ - version: literal('1'), - dialect: dialect, - tables: record(string(), table), - /* views: record(string(), view).default({}), */ - _meta: object({ - tables: record(string(), string()), - columns: record(string(), string()), - }), - internal: kitInternals, -}).strict(); - -export const schema = schemaInternal.merge(schemaHash); - -const tableSquashed = object({ - name: string(), - columns: record(string(), column), - indexes: record(string(), string()), - compositePrimaryKeys: record(string(), string()), - uniqueConstraints: record(string(), string()).default({}), -}).strict(); - -/* const viewSquashed = view.omit({ - algorithm: true, - sqlSecurity: true, - withCheckOption: true, -}).extend({ meta: string() }); */ - -export const schemaSquashed = object({ - version: literal('1'), - dialect: dialect, - tables: record(string(), tableSquashed), - /* views: record(string(), viewSquashed), */ -}).strict(); - -export type Dialect = TypeOf; -export type Column = TypeOf; -export type Table = TypeOf; -export type SingleStoreSchema = TypeOf; -export type SingleStoreSchemaInternal = TypeOf; -export type SingleStoreKitInternals = TypeOf; -export type SingleStoreSchemaSquashed = TypeOf; -export type Index = TypeOf; -export type PrimaryKey = TypeOf; -export type UniqueConstraint = TypeOf; -/* export type View = TypeOf; */ -/* export type ViewSquashed = TypeOf; */ - -export const SingleStoreSquasher = { - squashIdx: (idx: Index) => { - index.parse(idx); - return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ - idx.lock ?? '' - }`; - }, - unsquashIdx: (input: string): Index => { - const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); - const destructed = { - name, - columns: columnsString.split(','), - isUnique: isUnique === 'true', - using: using ? using : undefined, - algorithm: algorithm ? algorithm : undefined, - lock: lock ? lock : undefined, - }; - return index.parse(destructed); - }, - squashPK: (pk: PrimaryKey) => { - return `${pk.name};${pk.columns.join(',')}`; - }, - unsquashPK: (pk: string): PrimaryKey => { - const splitted = pk.split(';'); - return { name: splitted[0], columns: splitted[1].split(',') }; - }, - squashUnique: (unq: UniqueConstraint) => { - return `${unq.name};${unq.columns.join(',')}`; - }, - unsquashUnique: (unq: string): UniqueConstraint => { - const [name, columns] = unq.split(';'); - return { name, columns: columns.split(',') }; - }, - /* squashView: (view: View): string => { - return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`; - }, - unsquashView: (meta: string): SquasherViewMeta => { - const [algorithm, sqlSecurity, withCheckOption] = meta.split(';'); - const toReturn = { - algorithm: algorithm, - sqlSecurity: sqlSecurity, - withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined, - }; - - return viewMeta.parse(toReturn); - }, */ -}; - -export const squashSingleStoreScheme = (json: SingleStoreSchema): SingleStoreSchemaSquashed => { - const mappedTables = Object.fromEntries( - Object.entries(json.tables).map((it) => { - const squashedIndexes = mapValues(it[1].indexes, (index) => { - return SingleStoreSquasher.squashIdx(index); - }); - - const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { - return SingleStoreSquasher.squashPK(pk); - }); - - const squashedUniqueConstraints = mapValues( - it[1].uniqueConstraints, - (unq) => { - return SingleStoreSquasher.squashUnique(unq); - }, - ); - - return [ - it[0], - { - name: it[1].name, - columns: it[1].columns, - indexes: squashedIndexes, - compositePrimaryKeys: squashedPKs, - uniqueConstraints: squashedUniqueConstraints, - }, - ]; - }), - ); - - /* const mappedViews = Object.fromEntries( - Object.entries(json.views).map(([key, value]) => { - const meta = SingleStoreSquasher.squashView(value); - - return [key, { - name: value.name, - isExisting: value.isExisting, - columns: value.columns, - definition: value.definition, - meta, - }]; - }), - ); */ - - return { - version: '1', - dialect: json.dialect, - tables: mappedTables, - /* views: mappedViews, */ - }; -}; - -export const singlestoreSchema = schema; -export const singlestoreSchemaSquashed = schemaSquashed; - -// no prev version -export const backwardCompatibleSingleStoreSchema = union([singlestoreSchema, schema]); - -export const drySingleStore = singlestoreSchema.parse({ - version: '1', - dialect: 'singlestore', - id: originUUID, - prevId: '', - tables: {}, - schemas: {}, - /* views: {}, */ - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, -}); diff --git a/drizzle-kit/src/serializer/singlestoreSerializer.ts b/drizzle-kit/src/serializer/singlestoreSerializer.ts deleted file mode 100644 index edd09ec671..0000000000 --- a/drizzle-kit/src/serializer/singlestoreSerializer.ts +++ /dev/null @@ -1,767 +0,0 @@ -import chalk from 'chalk'; -import { is, SQL } from 'drizzle-orm'; -import { - AnySingleStoreTable, - getTableConfig, - type PrimaryKey as PrimaryKeyORM, - SingleStoreDialect, - uniqueKeyName, -} from 'drizzle-orm/singlestore-core'; -import { RowDataPacket } from 'mysql2/promise'; -import { withStyle } from '../cli/validations/outputs'; -import { IntrospectStage, IntrospectStatus } from '../cli/views'; - -import { CasingType } from 'src/cli/validations/common'; -import type { DB } from '../utils'; -import { - Column, - Index, - PrimaryKey, - SingleStoreKitInternals, - SingleStoreSchemaInternal, - Table, - UniqueConstraint, -} from './singlestoreSchema'; -import { sqlToStr } from './utils'; - -const dialect = new SingleStoreDialect(); - -export const indexName = (tableName: string, columns: string[]) => { - return `${tableName}_${columns.join('_')}_index`; -}; - -export const generateSingleStoreSnapshot = ( - tables: AnySingleStoreTable[], - /* views: SingleStoreView[], */ - casing: CasingType | undefined, -): SingleStoreSchemaInternal => { - const dialect = new SingleStoreDialect({ casing }); - const result: Record = {}; - /* const resultViews: Record = {}; */ - const internal: SingleStoreKitInternals = { tables: {}, indexes: {} }; - for (const table of tables) { - const { - name: tableName, - columns, - indexes, - schema, - primaryKeys, - uniqueConstraints, - } = getTableConfig(table); - const columnsObject: Record = {}; - const indexesObject: Record = {}; - const primaryKeysObject: Record = {}; - const uniqueConstraintObject: Record = {}; - - columns.forEach((column) => { - const notNull: boolean = column.notNull; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - const autoIncrement = typeof (column as any).autoIncrement === 'undefined' - ? false - : (column as any).autoIncrement; - - const generated = column.generated; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - primaryKey: false, - // If field is autoincrement it's notNull by default - // notNull: autoIncrement ? true : notNull, - notNull, - autoincrement: autoIncrement, - onUpdate: (column as any).hasOnUpdateNow, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: generated.mode ?? 'stored', - } - : undefined, - }; - - if (column.primary) { - primaryKeysObject[`${tableName}_${column.name}`] = { - name: `${tableName}_${column.name}`, - columns: [column.name], - }; - } - - if (column.isUnique) { - const existingUnique = uniqueConstraintObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. - The unique constraint ${ - chalk.underline.blue( - column.uniqueName, - ) - } on the ${ - chalk.underline.blue( - column.name, - ) - } column is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`) - }`, - ); - process.exit(1); - } - uniqueConstraintObject[column.uniqueName!] = { - name: column.uniqueName!, - columns: [columnToSet.name], - }; - } - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${column.default}'`; - } else { - if (sqlTypeLowered === 'json' || Array.isArray(column.default)) { - columnToSet.default = `'${JSON.stringify(column.default)}'`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if ( - sqlTypeLowered.startsWith('datetime') - || sqlTypeLowered.startsWith('timestamp') - ) { - columnToSet.default = `'${ - column.default - .toISOString() - .replace('T', ' ') - .slice(0, 23) - }'`; - } - } else { - columnToSet.default = column.default; - } - } - // if (['blob', 'text', 'json'].includes(column.getSQLType())) { - // columnToSet.default = `(${columnToSet.default})`; - // } - } - } - columnsObject[column.name] = columnToSet; - }); - - primaryKeys.map((pk: PrimaryKeyORM) => { - const columnNames = pk.columns.map((c: any) => c.name); - primaryKeysObject[pk.getName()] = { - name: pk.getName(), - columns: columnNames, - }; - - // all composite pk's should be treated as notNull - for (const column of pk.columns) { - columnsObject[column.name].notNull = true; - } - }); - - uniqueConstraints?.map((unq) => { - const columnNames = unq.columns.map((c) => c.name); - - const name = unq.name ?? uniqueKeyName(table, columnNames); - - const existingUnique = uniqueConstraintObject[name]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. \nThe unique constraint ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - columnNames.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - - uniqueConstraintObject[name] = { - name: unq.name!, - columns: columnNames, - }; - }); - - indexes.forEach((value) => { - const columns = value.config.columns; - const name = value.config.name; - - let indexColumns = columns.map((it) => { - if (is(it, SQL)) { - const sql = dialect.sqlToQuery(it, 'indexes').sql; - if (typeof internal!.indexes![name] === 'undefined') { - internal!.indexes![name] = { - columns: { - [sql]: { - isExpression: true, - }, - }, - }; - } else { - if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { - internal!.indexes![name]!.columns[sql] = { - isExpression: true, - }; - } else { - internal!.indexes![name]!.columns[sql]!.isExpression = true; - } - } - return sql; - } else { - return `${it.name}`; - } - }); - - if (value.config.unique) { - if (typeof uniqueConstraintObject[name] !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. \nThe unique index ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - indexColumns.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - uniqueConstraintObject[name].columns.join(','), - ) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - } - - indexesObject[name] = { - name, - columns: indexColumns, - isUnique: value.config.unique ?? false, - using: value.config.using, - algorithm: value.config.algorithm, - lock: value.config.lock, - }; - }); - - // only handle tables without schemas - if (!schema) { - result[tableName] = { - name: tableName, - columns: columnsObject, - indexes: indexesObject, - compositePrimaryKeys: primaryKeysObject, - uniqueConstraints: uniqueConstraintObject, - }; - } - } - - /* for (const view of views) { - const { - isExisting, - name, - query, - schema, - selectedFields, - algorithm, - sqlSecurity, - withCheckOption, - } = getViewConfig(view); - - const columnsObject: Record = {}; - - const existingView = resultViews[name]; - if (typeof existingView !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated view name across ${ - chalk.underline.blue( - schema ?? 'public', - ) - } schema. Please rename your view`, - ) - }`, - ); - process.exit(1); - } - for (const key in selectedFields) { - if (is(selectedFields[key], SingleStoreColumn)) { - const column = selectedFields[key]; - - const notNull: boolean = column.notNull; - const sqlTypeLowered = column.getSQLType().toLowerCase(); - const autoIncrement = typeof (column as any).autoIncrement === 'undefined' - ? false - : (column as any).autoIncrement; - - const generated = column.generated; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - primaryKey: false, - // If field is autoincrement it's notNull by default - // notNull: autoIncrement ? true : notNull, - notNull, - autoincrement: autoIncrement, - onUpdate: (column as any).hasOnUpdateNow, - generated: generated - ? { - as: is(generated.as, SQL) - ? dialect.sqlToQuery(generated.as as SQL).sql - : typeof generated.as === 'function' - ? dialect.sqlToQuery(generated.as() as SQL).sql - : (generated.as as any), - type: generated.mode ?? 'stored', - } - : undefined, - }; - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - if (typeof column.default === 'string') { - columnToSet.default = `'${column.default}'`; - } else { - if (sqlTypeLowered === 'json') { - columnToSet.default = `'${JSON.stringify(column.default)}'`; - } else if (column.default instanceof Date) { - if (sqlTypeLowered === 'date') { - columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; - } else if ( - sqlTypeLowered.startsWith('datetime') - || sqlTypeLowered.startsWith('timestamp') - ) { - columnToSet.default = `'${ - column.default - .toISOString() - .replace('T', ' ') - .slice(0, 23) - }'`; - } - } else { - columnToSet.default = column.default; - } - } - } - } - columnsObject[column.name] = columnToSet; - } - } - - resultViews[name] = { - columns: columnsObject, - name, - isExisting, - definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, - withCheckOption, - algorithm: algorithm ?? 'undefined', // set default values - sqlSecurity: sqlSecurity ?? 'definer', // set default values - }; - } */ - - return { - version: '1', - dialect: 'singlestore', - tables: result, - /* views: resultViews, */ - _meta: { - tables: {}, - columns: {}, - }, - internal, - }; -}; - -function clearDefaults(defaultValue: any, collate: string) { - if (typeof collate === 'undefined' || collate === null) { - collate = `utf8mb4`; - } - - let resultDefault = defaultValue; - collate = `_${collate}`; - if (defaultValue.startsWith(collate)) { - resultDefault = resultDefault - .substring(collate.length, defaultValue.length) - .replace(/\\/g, ''); - if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { - return `('${resultDefault.substring(1, resultDefault.length - 1)}')`; - } else { - return `'${resultDefault}'`; - } - } else { - return `(${resultDefault})`; - } -} - -export const fromDatabase = async ( - db: DB, - inputSchema: string, - tablesFilter: (table: string) => boolean = (table) => true, - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, -): Promise => { - const result: Record = {}; - const internals: SingleStoreKitInternals = { tables: {}, indexes: {} }; - - const columns = await db.query(`select * from information_schema.columns - where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' - order by table_name, ordinal_position;`); - - const response = columns as RowDataPacket[]; - - const schemas: string[] = []; - - let columnsCount = 0; - let tablesCount = new Set(); - let indexesCount = 0; - /* let viewsCount = 0; */ - - const idxs = await db.query( - `select * from INFORMATION_SCHEMA.STATISTICS - WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, - ); - - const idxRows = idxs as RowDataPacket[]; - - for (const column of response) { - if (!tablesFilter(column['TABLE_NAME'] as string)) continue; - - columnsCount += 1; - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } - const schema: string = column['TABLE_SCHEMA']; - const tableName = column['TABLE_NAME']; - - tablesCount.add(`${schema}.${tableName}`); - if (progressCallback) { - progressCallback('columns', tablesCount.size, 'fetching'); - } - const columnName: string = column['COLUMN_NAME']; - const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' - const dataType = column['DATA_TYPE']; // varchar - const columnType = column['COLUMN_TYPE']; // varchar(256) - // const columnType = column["DATA_TYPE"]; - const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' - let columnDefault: string | null = column['COLUMN_DEFAULT']; - const collation: string = column['CHARACTER_SET_NAME']; - const geenratedExpression: string = column['GENERATION_EXPRESSION']; - - let columnExtra = column['EXTRA']; - let isAutoincrement = false; // 'auto_increment', '' - let isDefaultAnExpression = false; // 'auto_increment', '' - - if (typeof column['EXTRA'] !== 'undefined') { - columnExtra = column['EXTRA']; - isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' - isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' - } - - // if (isPrimary) { - // if (typeof tableToPk[tableName] === "undefined") { - // tableToPk[tableName] = [columnName]; - // } else { - // tableToPk[tableName].push(columnName); - // } - // } - - if (schema !== inputSchema) { - schemas.push(schema); - } - - const table = result[tableName]; - - // let changedType = columnType.replace("bigint unsigned", "serial") - let changedType = columnType; - - if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { - // check unique here - const uniqueIdx = idxRows.filter( - (it) => - it['COLUMN_NAME'] === columnName - && it['TABLE_NAME'] === tableName - && it['NON_UNIQUE'] === 0, - ); - if (uniqueIdx && uniqueIdx.length === 1) { - changedType = columnType.replace('bigint unsigned', 'serial'); - } - } - - if ( - columnType.startsWith('bigint(') - || columnType.startsWith('tinyint(') - || columnType.startsWith('date(') - || columnType.startsWith('int(') - || columnType.startsWith('mediumint(') - || columnType.startsWith('smallint(') - || columnType.startsWith('text(') - || columnType.startsWith('time(') - || columnType.startsWith('year(') - ) { - changedType = columnType.replace(/\(\s*[^)]*\)$/, ''); - } - - if (columnType.includes('decimal(10,0)')) { - changedType = columnType.replace('decimal(10,0)', 'decimal'); - } - - if (columnDefault?.endsWith('.')) { - columnDefault = columnDefault.slice(0, -1); - } - - let onUpdate: boolean | undefined = undefined; - if ( - columnType.startsWith('timestamp') - && typeof columnExtra !== 'undefined' - && columnExtra.includes('on update CURRENT_TIMESTAMP') - ) { - onUpdate = true; - } - - const newColumn: Column = { - default: columnDefault === null - ? undefined - : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) - && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) - ? Number(columnDefault) - : isDefaultAnExpression - ? clearDefaults(columnDefault, collation) - : columnDefault.startsWith('CURRENT_TIMESTAMP') - ? 'CURRENT_TIMESTAMP' - : `'${columnDefault}'`, - autoincrement: isAutoincrement, - name: columnName, - type: changedType, - primaryKey: false, - notNull: !isNullable, - onUpdate, - generated: geenratedExpression - ? { - as: geenratedExpression, - type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', - } - : undefined, - }; - - // Set default to internal object - if (isDefaultAnExpression) { - if (typeof internals!.tables![tableName] === 'undefined') { - internals!.tables![tableName] = { - columns: { - [columnName]: { - isDefaultAnExpression: true, - }, - }, - }; - } else { - if ( - typeof internals!.tables![tableName]!.columns[columnName] - === 'undefined' - ) { - internals!.tables![tableName]!.columns[columnName] = { - isDefaultAnExpression: true, - }; - } else { - internals!.tables![tableName]!.columns[ - columnName - ]!.isDefaultAnExpression = true; - } - } - } - - if (!table) { - result[tableName] = { - name: tableName, - columns: { - [columnName]: newColumn, - }, - compositePrimaryKeys: {}, - indexes: {}, - uniqueConstraints: {}, - }; - } else { - result[tableName]!.columns[columnName] = newColumn; - } - } - - const tablePks = await db.query( - `SELECT table_name, column_name, ordinal_position - FROM information_schema.table_constraints t - LEFT JOIN information_schema.key_column_usage k - USING(constraint_name,table_schema,table_name) - WHERE t.constraint_type='UNIQUE' - and table_name != '__drizzle_migrations' - AND t.table_schema = '${inputSchema}' - ORDER BY ordinal_position`, - ); - - const tableToPk: { [tname: string]: string[] } = {}; - - const tableToPkRows = tablePks as RowDataPacket[]; - for (const tableToPkRow of tableToPkRows) { - const tableName: string = tableToPkRow['table_name']; - const columnName: string = tableToPkRow['column_name']; - const position: string = tableToPkRow['ordinal_position']; - - if (typeof result[tableName] === 'undefined') { - continue; - } - - if (typeof tableToPk[tableName] === 'undefined') { - tableToPk[tableName] = [columnName]; - } else { - tableToPk[tableName].push(columnName); - } - } - - for (const [key, value] of Object.entries(tableToPk)) { - // if (value.length > 1) { - result[key].compositePrimaryKeys = { - [`${key}_${value.join('_')}`]: { - name: `${key}_${value.join('_')}`, - columns: value, - }, - }; - // } else if (value.length === 1) { - // result[key].columns[value[0]].primaryKey = true; - // } else { - // } - } - if (progressCallback) { - progressCallback('columns', columnsCount, 'done'); - progressCallback('tables', tablesCount.size, 'done'); - } - - for (const idxRow of idxRows) { - const tableSchema = idxRow['TABLE_SCHEMA']; - const tableName = idxRow['TABLE_NAME']; - const constraintName = idxRow['INDEX_NAME']; - const columnName: string = idxRow['COLUMN_NAME']; - const isUnique = idxRow['NON_UNIQUE'] === 0; - - const tableInResult = result[tableName]; - if (typeof tableInResult === 'undefined') continue; - - // if (tableInResult.columns[columnName].type === "serial") continue; - - indexesCount += 1; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); - } - - if (isUnique) { - if ( - typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' - ) { - tableInResult.uniqueConstraints[constraintName]!.columns.push( - columnName, - ); - } else { - tableInResult.uniqueConstraints[constraintName] = { - name: constraintName, - columns: [columnName], - }; - } - } - } - - /* const views = await db.query( - `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, - ); */ - - /* const resultViews: Record = {}; */ - - /* viewsCount = views.length; - if (progressCallback) { - progressCallback('views', viewsCount, 'fetching'); - } - for await (const view of views) { - const viewName = view['TABLE_NAME']; - const definition = view['VIEW_DEFINITION']; - - const withCheckOption = view['CHECK_OPTION'] === 'NONE' - ? undefined - : view['CHECK_OPTION'].toLowerCase(); - const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); - - const [createSqlStatement] = await db.query( - `SHOW CREATE VIEW \`${viewName}\`;`, - ); - const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); - const algorithm = algorithmMatch - ? algorithmMatch[1].toLowerCase() - : undefined; - - const columns = result[viewName].columns; - delete result[viewName]; - - resultViews[viewName] = { - columns: columns, - isExisting: false, - name: viewName, - algorithm, - definition, - sqlSecurity, - withCheckOption, - }; - } */ - - if (progressCallback) { - progressCallback('indexes', indexesCount, 'done'); - // progressCallback("enums", 0, "fetching"); - progressCallback('enums', 0, 'done'); - } - - return { - version: '1', - dialect: 'singlestore', - tables: result, - /* views: resultViews, */ - _meta: { - tables: {}, - columns: {}, - }, - internal: internals, - }; -}; diff --git a/drizzle-kit/src/serializer/sqliteImports.ts b/drizzle-kit/src/serializer/sqliteImports.ts deleted file mode 100644 index 0164604d11..0000000000 --- a/drizzle-kit/src/serializer/sqliteImports.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { is } from 'drizzle-orm'; -import { AnySQLiteTable, SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; -import { safeRegister } from '../cli/commands/utils'; - -export const prepareFromExports = (exports: Record) => { - const tables: AnySQLiteTable[] = []; - const views: SQLiteView[] = []; - - const i0values = Object.values(exports); - i0values.forEach((t) => { - if (is(t, SQLiteTable)) { - tables.push(t); - } - - if (is(t, SQLiteView)) { - views.push(t); - } - }); - - return { tables, views }; -}; - -export const prepareFromSqliteImports = async (imports: string[]) => { - const tables: AnySQLiteTable[] = []; - const views: SQLiteView[] = []; - - const { unregister } = await safeRegister(); - for (let i = 0; i < imports.length; i++) { - const it = imports[i]; - - const i0: Record = require(`${it}`); - const prepared = prepareFromExports(i0); - - tables.push(...prepared.tables); - views.push(...prepared.views); - } - - unregister(); - - return { tables: Array.from(new Set(tables)), views }; -}; diff --git a/drizzle-kit/src/serializer/sqliteSchema.ts b/drizzle-kit/src/serializer/sqliteSchema.ts deleted file mode 100644 index 8fd98d99d5..0000000000 --- a/drizzle-kit/src/serializer/sqliteSchema.ts +++ /dev/null @@ -1,352 +0,0 @@ -import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; -import { customMapEntries, mapValues, originUUID } from '../global'; - -// ------- V3 -------- -const index = object({ - name: string(), - columns: string().array(), - where: string().optional(), - isUnique: boolean(), -}).strict(); - -const fk = object({ - name: string(), - tableFrom: string(), - columnsFrom: string().array(), - tableTo: string(), - columnsTo: string().array(), - onUpdate: string().optional(), - onDelete: string().optional(), -}).strict(); - -const compositePK = object({ - columns: string().array(), - name: string().optional(), -}).strict(); - -const column = object({ - name: string(), - type: string(), - primaryKey: boolean(), - notNull: boolean(), - autoincrement: boolean().optional(), - default: any().optional(), - generated: object({ - type: enumType(['stored', 'virtual']), - as: string(), - }).optional(), -}).strict(); - -const tableV3 = object({ - name: string(), - columns: record(string(), column), - indexes: record(string(), index), - foreignKeys: record(string(), fk), -}).strict(); - -const uniqueConstraint = object({ - name: string(), - columns: string().array(), -}).strict(); - -const checkConstraint = object({ - name: string(), - value: string(), -}).strict(); - -const table = object({ - name: string(), - columns: record(string(), column), - indexes: record(string(), index), - foreignKeys: record(string(), fk), - compositePrimaryKeys: record(string(), compositePK), - uniqueConstraints: record(string(), uniqueConstraint).default({}), - checkConstraints: record(string(), checkConstraint).default({}), -}).strict(); - -export const view = object({ - name: string(), - columns: record(string(), column), - definition: string().optional(), - isExisting: boolean(), -}).strict(); - -// use main dialect -const dialect = enumType(['sqlite']); - -const schemaHash = object({ - id: string(), - prevId: string(), -}).strict(); - -export const schemaInternalV3 = object({ - version: literal('3'), - dialect: dialect, - tables: record(string(), tableV3), - enums: object({}), -}).strict(); - -export const schemaInternalV4 = object({ - version: literal('4'), - dialect: dialect, - tables: record(string(), table), - views: record(string(), view).default({}), - enums: object({}), -}).strict(); - -export const schemaInternalV5 = object({ - version: literal('5'), - dialect: dialect, - tables: record(string(), table), - enums: object({}), - _meta: object({ - tables: record(string(), string()), - columns: record(string(), string()), - }), -}).strict(); - -export const kitInternals = object({ - indexes: record( - string(), - object({ - columns: record( - string(), - object({ isExpression: boolean().optional() }).optional(), - ), - }).optional(), - ).optional(), -}).optional(); - -const latestVersion = literal('6'); -export const schemaInternal = object({ - version: latestVersion, - dialect: dialect, - tables: record(string(), table), - views: record(string(), view).default({}), - enums: object({}), - _meta: object({ - tables: record(string(), string()), - columns: record(string(), string()), - }), - internal: kitInternals, -}).strict(); - -export const schemaV3 = schemaInternalV3.merge(schemaHash).strict(); -export const schemaV4 = schemaInternalV4.merge(schemaHash).strict(); -export const schemaV5 = schemaInternalV5.merge(schemaHash).strict(); -export const schema = schemaInternal.merge(schemaHash).strict(); - -const tableSquashed = object({ - name: string(), - columns: record(string(), column), - indexes: record(string(), string()), - foreignKeys: record(string(), string()), - compositePrimaryKeys: record(string(), string()), - uniqueConstraints: record(string(), string()).default({}), - checkConstraints: record(string(), string()).default({}), -}).strict(); - -export const schemaSquashed = object({ - version: latestVersion, - dialect: dialect, - tables: record(string(), tableSquashed), - views: record(string(), view), - enums: any(), -}).strict(); - -export type Dialect = TypeOf; -export type Column = TypeOf; -export type Table = TypeOf; -export type SQLiteSchema = TypeOf; -export type SQLiteSchemaV3 = TypeOf; -export type SQLiteSchemaV4 = TypeOf; -export type SQLiteSchemaInternal = TypeOf; -export type SQLiteSchemaSquashed = TypeOf; -export type SQLiteKitInternals = TypeOf; -export type Index = TypeOf; -export type ForeignKey = TypeOf; -export type PrimaryKey = TypeOf; -export type UniqueConstraint = TypeOf; -export type CheckConstraint = TypeOf; -export type View = TypeOf; - -export const SQLiteSquasher = { - squashIdx: (idx: Index) => { - index.parse(idx); - return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.where ?? ''}`; - }, - unsquashIdx: (input: string): Index => { - const [name, columnsString, isUnique, where] = input.split(';'); - - const result: Index = index.parse({ - name, - columns: columnsString.split(','), - isUnique: isUnique === 'true', - where: where ?? undefined, - }); - return result; - }, - squashUnique: (unq: UniqueConstraint) => { - return `${unq.name};${unq.columns.join(',')}`; - }, - unsquashUnique: (unq: string): UniqueConstraint => { - const [name, columns] = unq.split(';'); - return { name, columns: columns.split(',') }; - }, - squashFK: (fk: ForeignKey) => { - return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ - fk.onUpdate ?? '' - };${fk.onDelete ?? ''}`; - }, - unsquashFK: (input: string): ForeignKey => { - const [ - name, - tableFrom, - columnsFromStr, - tableTo, - columnsToStr, - onUpdate, - onDelete, - ] = input.split(';'); - - const result: ForeignKey = fk.parse({ - name, - tableFrom, - columnsFrom: columnsFromStr.split(','), - tableTo, - columnsTo: columnsToStr.split(','), - onUpdate, - onDelete, - }); - return result; - }, - squashPushFK: (fk: ForeignKey) => { - return `${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${fk.onUpdate ?? ''};${ - fk.onDelete ?? '' - }`; - }, - unsquashPushFK: (input: string): ForeignKey => { - const [ - tableFrom, - columnsFromStr, - tableTo, - columnsToStr, - onUpdate, - onDelete, - ] = input.split(';'); - - const result: ForeignKey = fk.parse({ - name: '', - tableFrom, - columnsFrom: columnsFromStr.split(','), - tableTo, - columnsTo: columnsToStr.split(','), - onUpdate, - onDelete, - }); - return result; - }, - squashPK: (pk: PrimaryKey) => { - return pk.columns.join(','); - }, - unsquashPK: (pk: string) => { - return pk.split(','); - }, - squashCheck: (check: CheckConstraint) => { - return `${check.name};${check.value}`; - }, - unsquashCheck: (input: string): CheckConstraint => { - const [ - name, - value, - ] = input.split(';'); - - return { name, value }; - }, -}; - -export const squashSqliteScheme = ( - json: SQLiteSchema | SQLiteSchemaV4, - action?: 'push' | undefined, -): SQLiteSchemaSquashed => { - const mappedTables = Object.fromEntries( - Object.entries(json.tables).map((it) => { - const squashedIndexes = mapValues(it[1].indexes, (index: Index) => { - return SQLiteSquasher.squashIdx(index); - }); - - const squashedFKs = customMapEntries( - it[1].foreignKeys, - (key, value) => { - return action === 'push' - ? [ - SQLiteSquasher.squashPushFK(value), - SQLiteSquasher.squashPushFK(value), - ] - : [key, SQLiteSquasher.squashFK(value)]; - }, - ); - - const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { - return SQLiteSquasher.squashPK(pk); - }); - - const squashedUniqueConstraints = mapValues( - it[1].uniqueConstraints, - (unq) => { - return SQLiteSquasher.squashUnique(unq); - }, - ); - - const squashedCheckConstraints = mapValues( - it[1].checkConstraints, - (check) => { - return SQLiteSquasher.squashCheck(check); - }, - ); - - return [ - it[0], - { - name: it[1].name, - columns: it[1].columns, - indexes: squashedIndexes, - foreignKeys: squashedFKs, - compositePrimaryKeys: squashedPKs, - uniqueConstraints: squashedUniqueConstraints, - checkConstraints: squashedCheckConstraints, - }, - ]; - }), - ); - - return { - version: '6', - dialect: json.dialect, - tables: mappedTables, - views: json.views, - enums: json.enums, - }; -}; - -export const drySQLite = schema.parse({ - version: '6', - dialect: 'sqlite', - id: originUUID, - prevId: '', - tables: {}, - views: {}, - enums: {}, - _meta: { - tables: {}, - columns: {}, - }, -}); - -export const sqliteSchemaV3 = schemaV3; -export const sqliteSchemaV4 = schemaV4; -export const sqliteSchemaV5 = schemaV5; -export const sqliteSchema = schema; -export const SQLiteSchemaSquashed = schemaSquashed; - -export const backwardCompatibleSqliteSchema = union([sqliteSchemaV5, schema]); diff --git a/drizzle-kit/src/serializer/sqliteSerializer.ts b/drizzle-kit/src/serializer/sqliteSerializer.ts deleted file mode 100644 index 87b44fa1b4..0000000000 --- a/drizzle-kit/src/serializer/sqliteSerializer.ts +++ /dev/null @@ -1,953 +0,0 @@ -import chalk from 'chalk'; -import { getTableName, is, SQL } from 'drizzle-orm'; -import { - AnySQLiteTable, - getTableConfig, - getViewConfig, - SQLiteBaseInteger, - SQLiteColumn, - SQLiteSyncDialect, - SQLiteView, - uniqueKeyName, -} from 'drizzle-orm/sqlite-core'; -import { CasingType } from 'src/cli/validations/common'; -import { withStyle } from '../cli/validations/outputs'; -import type { IntrospectStage, IntrospectStatus } from '../cli/views'; -import type { - CheckConstraint, - Column, - ForeignKey, - Index, - PrimaryKey, - SQLiteKitInternals, - SQLiteSchemaInternal, - Table, - UniqueConstraint, - View, -} from '../serializer/sqliteSchema'; -import { escapeSingleQuotes, type SQLiteDB } from '../utils'; -import { getColumnCasing, sqlToStr } from './utils'; - -export const generateSqliteSnapshot = ( - tables: AnySQLiteTable[], - views: SQLiteView[], - casing: CasingType | undefined, -): SQLiteSchemaInternal => { - const dialect = new SQLiteSyncDialect({ casing }); - const result: Record = {}; - const resultViews: Record = {}; - - const internal: SQLiteKitInternals = { indexes: {} }; - for (const table of tables) { - // const tableName = getTableName(table); - const columnsObject: Record = {}; - const indexesObject: Record = {}; - const foreignKeysObject: Record = {}; - const primaryKeysObject: Record = {}; - const uniqueConstraintObject: Record = {}; - const checkConstraintObject: Record = {}; - - const checksInTable: Record = {}; - - const { - name: tableName, - columns, - indexes, - checks, - foreignKeys: tableForeignKeys, - primaryKeys, - uniqueConstraints, - } = getTableConfig(table); - - columns.forEach((column) => { - const name = getColumnCasing(column, casing); - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const generated = column.generated; - - const columnToSet: Column = { - name, - type: column.getSQLType(), - primaryKey, - notNull, - autoincrement: is(column, SQLiteBaseInteger) - ? column.autoIncrement - : false, - generated: generated - ? { - as: is(generated.as, SQL) - ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` - : typeof generated.as === 'function' - ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` - : `(${generated.as as any})`, - type: generated.mode ?? 'virtual', - } - : undefined, - }; - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - columnToSet.default = typeof column.default === 'string' - ? `'${escapeSingleQuotes(column.default)}'` - : typeof column.default === 'object' - || Array.isArray(column.default) - ? `'${JSON.stringify(column.default)}'` - : column.default; - } - } - columnsObject[name] = columnToSet; - - if (column.isUnique) { - const existingUnique = indexesObject[column.uniqueName!]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. - The unique constraint ${ - chalk.underline.blue( - column.uniqueName, - ) - } on the ${ - chalk.underline.blue( - name, - ) - } column is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`) - }`, - ); - process.exit(1); - } - indexesObject[column.uniqueName!] = { - name: column.uniqueName!, - columns: [columnToSet.name], - isUnique: true, - }; - } - }); - - const foreignKeys: ForeignKey[] = tableForeignKeys.map((fk) => { - const tableFrom = tableName; - const onDelete = fk.onDelete ?? 'no action'; - const onUpdate = fk.onUpdate ?? 'no action'; - const reference = fk.reference(); - - const referenceFT = reference.foreignTable; - - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - const tableTo = getTableName(referenceFT); - - const originalColumnsFrom = reference.columns.map((it) => it.name); - const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); - const originalColumnsTo = reference.foreignColumns.map((it) => it.name); - const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); - - let name = fk.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnsFrom.length; i++) { - name = name.replace(originalColumnsFrom[i], columnsFrom[i]); - } - for (let i = 0; i < originalColumnsTo.length; i++) { - name = name.replace(originalColumnsTo[i], columnsTo[i]); - } - } - - return { - name, - tableFrom, - tableTo, - columnsFrom, - columnsTo, - onDelete, - onUpdate, - } as ForeignKey; - }); - - foreignKeys.forEach((it) => { - foreignKeysObject[it.name] = it; - }); - - indexes.forEach((value) => { - const columns = value.config.columns; - const name = value.config.name; - - let indexColumns = columns.map((it) => { - if (is(it, SQL)) { - const sql = dialect.sqlToQuery(it, 'indexes').sql; - if (typeof internal!.indexes![name] === 'undefined') { - internal!.indexes![name] = { - columns: { - [sql]: { - isExpression: true, - }, - }, - }; - } else { - if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { - internal!.indexes![name]!.columns[sql] = { - isExpression: true, - }; - } else { - internal!.indexes![name]!.columns[sql]!.isExpression = true; - } - } - return sql; - } else { - return getColumnCasing(it, casing); - } - }); - - let where: string | undefined = undefined; - if (value.config.where !== undefined) { - if (is(value.config.where, SQL)) { - where = dialect.sqlToQuery(value.config.where).sql; - } - } - - indexesObject[name] = { - name, - columns: indexColumns, - isUnique: value.config.unique ?? false, - where, - }; - }); - - uniqueConstraints?.map((unq) => { - const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); - - const name = unq.name ?? uniqueKeyName(table, columnNames); - - const existingUnique = indexesObject[name]; - if (typeof existingUnique !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated unique constraint names in ${ - chalk.underline.blue( - tableName, - ) - } table. \nThe unique constraint ${ - chalk.underline.blue( - name, - ) - } on the ${ - chalk.underline.blue( - columnNames.join(','), - ) - } columns is confilcting with a unique constraint name already defined for ${ - chalk.underline.blue( - existingUnique.columns.join(','), - ) - } columns\n`, - ) - }`, - ); - process.exit(1); - } - - indexesObject[name] = { - name: unq.name!, - columns: columnNames, - isUnique: true, - }; - }); - - primaryKeys.forEach((it) => { - if (it.columns.length > 1) { - const originalColumnNames = it.columns.map((c) => c.name); - const columnNames = it.columns.map((c) => getColumnCasing(c, casing)); - - let name = it.getName(); - if (casing !== undefined) { - for (let i = 0; i < originalColumnNames.length; i++) { - name = name.replace(originalColumnNames[i], columnNames[i]); - } - } - - primaryKeysObject[name] = { - columns: columnNames, - name, - }; - } else { - columnsObject[getColumnCasing(it.columns[0], casing)].primaryKey = true; - } - }); - - checks.forEach((check) => { - const checkName = check.name; - if (typeof checksInTable[tableName] !== 'undefined') { - if (checksInTable[tableName].includes(check.name)) { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated check constraint name in ${ - chalk.underline.blue( - tableName, - ) - }. Please rename your check constraint in the ${ - chalk.underline.blue( - tableName, - ) - } table`, - ) - }`, - ); - process.exit(1); - } - checksInTable[tableName].push(checkName); - } else { - checksInTable[tableName] = [check.name]; - } - - checkConstraintObject[checkName] = { - name: checkName, - value: dialect.sqlToQuery(check.value).sql, - }; - }); - - result[tableName] = { - name: tableName, - columns: columnsObject, - indexes: indexesObject, - foreignKeys: foreignKeysObject, - compositePrimaryKeys: primaryKeysObject, - uniqueConstraints: uniqueConstraintObject, - checkConstraints: checkConstraintObject, - }; - } - - for (const view of views) { - const { name, isExisting, selectedFields, query, schema } = getViewConfig(view); - - const columnsObject: Record = {}; - - const existingView = resultViews[name]; - if (typeof existingView !== 'undefined') { - console.log( - `\n${ - withStyle.errorWarning( - `We\'ve found duplicated view name across ${ - chalk.underline.blue( - schema ?? 'public', - ) - } schema. Please rename your view`, - ) - }`, - ); - process.exit(1); - } - - for (const key in selectedFields) { - if (is(selectedFields[key], SQLiteColumn)) { - const column = selectedFields[key]; - const notNull: boolean = column.notNull; - const primaryKey: boolean = column.primary; - const generated = column.generated; - - const columnToSet: Column = { - name: column.name, - type: column.getSQLType(), - primaryKey, - notNull, - autoincrement: is(column, SQLiteBaseInteger) - ? column.autoIncrement - : false, - generated: generated - ? { - as: is(generated.as, SQL) - ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` - : typeof generated.as === 'function' - ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` - : `(${generated.as as any})`, - type: generated.mode ?? 'virtual', - } - : undefined, - }; - - if (column.default !== undefined) { - if (is(column.default, SQL)) { - columnToSet.default = sqlToStr(column.default, casing); - } else { - columnToSet.default = typeof column.default === 'string' - ? `'${column.default}'` - : typeof column.default === 'object' - || Array.isArray(column.default) - ? `'${JSON.stringify(column.default)}'` - : column.default; - } - } - columnsObject[column.name] = columnToSet; - } - } - - resultViews[name] = { - columns: columnsObject, - name, - isExisting, - definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, - }; - } - - return { - version: '6', - dialect: 'sqlite', - tables: result, - views: resultViews, - enums: {}, - _meta: { - tables: {}, - columns: {}, - }, - internal, - }; -}; - -function mapSqlToSqliteType(sqlType: string): string { - const lowered = sqlType.toLowerCase(); - if ( - [ - 'int', - 'integer', - 'integer auto_increment', - 'tinyint', - 'smallint', - 'mediumint', - 'bigint', - 'unsigned big int', - 'int2', - 'int8', - ].some((it) => lowered.startsWith(it)) - ) { - return 'integer'; - } else if ( - [ - 'character', - 'varchar', - 'varying character', - 'national varying character', - 'nchar', - 'native character', - 'nvarchar', - 'text', - 'clob', - ].some((it) => lowered.startsWith(it)) - ) { - const match = lowered.match(/\d+/); - - if (match) { - return `text(${match[0]})`; - } - - return 'text'; - } else if (lowered.startsWith('blob')) { - return 'blob'; - } else if ( - ['real', 'double', 'double precision', 'float'].some((it) => lowered.startsWith(it)) - ) { - return 'real'; - } else { - return 'numeric'; - } -} - -interface ColumnInfo { - columnName: string; - expression: string; - type: 'stored' | 'virtual'; -} - -function extractGeneratedColumns(input: string): Record { - const columns: Record = {}; - const lines = input.split(/,\s*(?![^()]*\))/); // Split by commas outside parentheses - - for (const line of lines) { - if (line.includes('GENERATED ALWAYS AS')) { - const parts = line.trim().split(/\s+/); - const columnName = parts[0].replace(/[`'"]/g, ''); // Remove quotes around the column name - const expression = line - .substring(line.indexOf('('), line.indexOf(')') + 1) - .trim(); - - // Extract type ensuring to remove any trailing characters like ')' - const typeIndex = parts.findIndex((part) => part.match(/(stored|virtual)/i)); - let type: ColumnInfo['type'] = 'virtual'; - if (typeIndex !== -1) { - type = parts[typeIndex] - .replace(/[^a-z]/gi, '') - .toLowerCase() as ColumnInfo['type']; - } - - columns[columnName] = { - columnName: columnName, - expression: expression, - type, - }; - } - } - return columns; -} - -function filterIgnoredTablesByField(fieldName: string) { - // _cf_ is a prefix for internal Cloudflare D1 tables (e.g. _cf_KV, _cf_METADATA) - // _litestream_ is a prefix for internal Litestream tables (e.g. _litestream_seq, _litestream_lock) - // libsql_ is a prefix for internal libSQL tables (e.g. libsql_wasm_func_table) - // sqlite_ is a prefix for internal SQLite tables (e.g. sqlite_sequence, sqlite_stat1) - return `${fieldName} != '__drizzle_migrations' - AND ${fieldName} NOT LIKE '\\_cf\\_%' ESCAPE '\\' - AND ${fieldName} NOT LIKE '\\_litestream\\_%' ESCAPE '\\' - AND ${fieldName} NOT LIKE 'libsql\\_%' ESCAPE '\\' - AND ${fieldName} NOT LIKE 'sqlite\\_%' ESCAPE '\\'`; -} - -export const fromDatabase = async ( - db: SQLiteDB, - tablesFilter: (table: string) => boolean = (table) => true, - progressCallback?: ( - stage: IntrospectStage, - count: number, - status: IntrospectStatus, - ) => void, -): Promise => { - const result: Record = {}; - const resultViews: Record = {}; - - const columns = await db.query<{ - tableName: string; - columnName: string; - columnType: string; - notNull: number; - defaultValue: string; - pk: number; - seq: number; - hidden: number; - sql: string; - type: 'view' | 'table'; - }>(`SELECT - m.name as "tableName", - p.name as "columnName", - p.type as "columnType", - p."notnull" as "notNull", - p.dflt_value as "defaultValue", - p.pk as pk, - p.hidden as hidden, - m.sql, - m.type as type - FROM sqlite_master AS m - JOIN pragma_table_xinfo(m.name) AS p - WHERE (m.type = 'table' OR m.type = 'view') - AND ${filterIgnoredTablesByField('m.tbl_name')};`); - - const tablesWithSeq: string[] = []; - - const seq = await db.query<{ - name: string; - }>(`SELECT - * - FROM sqlite_master - WHERE sql GLOB '*[ *' || CHAR(9) || CHAR(10) || CHAR(13) || ']AUTOINCREMENT[^'']*' - AND ${filterIgnoredTablesByField('tbl_name')};`); - - for (const s of seq) { - tablesWithSeq.push(s.name); - } - - let columnsCount = 0; - let tablesCount = new Set(); - let indexesCount = 0; - let foreignKeysCount = 0; - let checksCount = 0; - let viewsCount = 0; - - // append primaryKeys by table - const tableToPk: { [tname: string]: string[] } = {}; - - let tableToGeneratedColumnsInfo: Record< - string, - Record - > = {}; - - for (const column of columns) { - if (!tablesFilter(column.tableName)) continue; - - // TODO - if (column.type !== 'view') { - columnsCount += 1; - } - if (progressCallback) { - progressCallback('columns', columnsCount, 'fetching'); - } - const tableName = column.tableName; - - tablesCount.add(tableName); - if (progressCallback) { - progressCallback('tables', tablesCount.size, 'fetching'); - } - const columnName = column.columnName; - const isNotNull = column.notNull === 1; // 'YES', 'NO' - const columnType = column.columnType; // varchar(256) - const isPrimary = column.pk !== 0; // 'PRI', '' - const columnDefault: string = column.defaultValue; - - const isAutoincrement = isPrimary && tablesWithSeq.includes(tableName); - - if (isPrimary) { - if (typeof tableToPk[tableName] === 'undefined') { - tableToPk[tableName] = [columnName]; - } else { - tableToPk[tableName].push(columnName); - } - } - - const table = result[tableName]; - - if (column.hidden === 2 || column.hidden === 3) { - if ( - typeof tableToGeneratedColumnsInfo[column.tableName] === 'undefined' - ) { - tableToGeneratedColumnsInfo[column.tableName] = extractGeneratedColumns( - column.sql, - ); - } - } - - const newColumn: Column = { - default: columnDefault === null - ? undefined - : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) - ? Number(columnDefault) - : ['CURRENT_TIME', 'CURRENT_DATE', 'CURRENT_TIMESTAMP'].includes( - columnDefault, - ) - ? `(${columnDefault})` - : columnDefault === 'false' - ? false - : columnDefault === 'true' - ? true - : columnDefault.startsWith("'") && columnDefault.endsWith("'") - ? columnDefault - // ? columnDefault.substring(1, columnDefault.length - 1) - : `(${columnDefault})`, - autoincrement: isAutoincrement, - name: columnName, - type: mapSqlToSqliteType(columnType), - primaryKey: false, - notNull: isNotNull, - generated: tableToGeneratedColumnsInfo[tableName] - && tableToGeneratedColumnsInfo[tableName][columnName] - ? { - type: tableToGeneratedColumnsInfo[tableName][columnName].type, - as: tableToGeneratedColumnsInfo[tableName][columnName].expression, - } - : undefined, - }; - - if (!table) { - result[tableName] = { - name: tableName, - columns: { - [columnName]: newColumn, - }, - compositePrimaryKeys: {}, - indexes: {}, - foreignKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }; - } else { - result[tableName]!.columns[columnName] = newColumn; - } - } - - for (const [key, value] of Object.entries(tableToPk)) { - if (value.length > 1) { - result[key].compositePrimaryKeys = { - [`${key}_${value.join('_')}_pk`]: { - columns: value, - name: `${key}_${value.join('_')}_pk`, - }, - }; - } else if (value.length === 1) { - result[key].columns[value[0]].primaryKey = true; - } else { - } - } - - if (progressCallback) { - progressCallback('columns', columnsCount, 'done'); - progressCallback('tables', tablesCount.size, 'done'); - } - try { - const fks = await db.query<{ - tableFrom: string; - tableTo: string; - from: string; - to: string; - onUpdate: string; - onDelete: string; - seq: number; - id: number; - }>(`SELECT - m.name as "tableFrom", - f.id as "id", - f."table" as "tableTo", - f."from", - f."to", - f."on_update" as "onUpdate", - f."on_delete" as "onDelete", - f.seq as "seq" - FROM - sqlite_master m, - pragma_foreign_key_list(m.name) as f - WHERE ${filterIgnoredTablesByField('m.tbl_name')};`); - - const fkByTableName: Record = {}; - - for (const fkRow of fks) { - foreignKeysCount += 1; - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'fetching'); - } - const tableName: string = fkRow.tableFrom; - const columnName: string = fkRow.from; - const refTableName = fkRow.tableTo; - const refColumnName: string = fkRow.to; - const updateRule: string = fkRow.onUpdate; - const deleteRule = fkRow.onDelete; - const sequence = fkRow.seq; - const id = fkRow.id; - - const tableInResult = result[tableName]; - if (typeof tableInResult === 'undefined') continue; - - if (typeof fkByTableName[`${tableName}_${id}`] !== 'undefined') { - fkByTableName[`${tableName}_${id}`]!.columnsFrom.push(columnName); - fkByTableName[`${tableName}_${id}`]!.columnsTo.push(refColumnName); - } else { - fkByTableName[`${tableName}_${id}`] = { - name: '', - tableFrom: tableName, - tableTo: refTableName, - columnsFrom: [columnName], - columnsTo: [refColumnName], - onDelete: deleteRule?.toLowerCase(), - onUpdate: updateRule?.toLowerCase(), - }; - } - - const columnsFrom = fkByTableName[`${tableName}_${id}`].columnsFrom; - const columnsTo = fkByTableName[`${tableName}_${id}`].columnsTo; - fkByTableName[ - `${tableName}_${id}` - ].name = `${tableName}_${ - columnsFrom.join( - '_', - ) - }_${refTableName}_${columnsTo.join('_')}_fk`; - } - - for (const idx of Object.keys(fkByTableName)) { - const value = fkByTableName[idx]; - result[value.tableFrom].foreignKeys[value.name] = value; - } - } catch (e) { - // console.log(`Can't proccess foreign keys`); - } - if (progressCallback) { - progressCallback('fks', foreignKeysCount, 'done'); - } - const idxs = await db.query<{ - tableName: string; - indexName: string; - columnName: string; - isUnique: number; - seq: string; - }>(`SELECT - m.tbl_name as tableName, - il.name as indexName, - ii.name as columnName, - il.[unique] as isUnique, - il.seq as seq - FROM - sqlite_master AS m, - pragma_index_list(m.name) AS il, - pragma_index_info(il.name) AS ii - WHERE - m.type = 'table' - AND il.name NOT LIKE 'sqlite\\_autoindex\\_%' ESCAPE '\\' - AND ${filterIgnoredTablesByField('m.tbl_name')};`); - - for (const idxRow of idxs) { - const tableName = idxRow.tableName; - const constraintName = idxRow.indexName; - const columnName: string = idxRow.columnName; - const isUnique = idxRow.isUnique === 1; - - const tableInResult = result[tableName]; - if (typeof tableInResult === 'undefined') continue; - - indexesCount += 1; - if (progressCallback) { - progressCallback('indexes', indexesCount, 'fetching'); - } - - if ( - typeof tableInResult.indexes[constraintName] !== 'undefined' - && columnName - ) { - tableInResult.indexes[constraintName]!.columns.push(columnName); - } else { - tableInResult.indexes[constraintName] = { - name: constraintName, - columns: columnName ? [columnName] : [], - isUnique: isUnique, - }; - } - // if (isUnique) { - // if (typeof tableInResult.uniqueConstraints[constraintName] !== "undefined") { - // tableInResult.uniqueConstraints[constraintName]!.columns.push(columnName); - // } else { - // tableInResult.uniqueConstraints[constraintName] = { - // name: constraintName, - // columns: [columnName], - // }; - // } - // } else { - // if (typeof tableInResult.indexes[constraintName] !== "undefined") { - // tableInResult.indexes[constraintName]!.columns.push(columnName); - // } else { - // tableInResult.indexes[constraintName] = { - // name: constraintName, - // columns: [columnName], - // isUnique: isUnique, - // }; - // } - // } - } - if (progressCallback) { - progressCallback('indexes', indexesCount, 'done'); - // progressCallback("enums", 0, "fetching"); - progressCallback('enums', 0, 'done'); - } - - const views = await db.query( - `SELECT name AS view_name, sql AS sql FROM sqlite_master WHERE type = 'view';`, - ); - - viewsCount = views.length; - - if (progressCallback) { - progressCallback('views', viewsCount, 'fetching'); - } - for (const view of views) { - const viewName = view['view_name']; - const sql = view['sql']; - - const regex = new RegExp(`\\bAS\\b\\s+(SELECT.+)$`, 'i'); - const match = sql.match(regex); - - if (!match) { - console.log('Could not process view'); - process.exit(1); - } - - const viewDefinition = match[1] as string; - - const columns = result[viewName].columns; - delete result[viewName]; - - resultViews[viewName] = { - columns: columns, - isExisting: false, - name: viewName, - definition: viewDefinition, - }; - } - if (progressCallback) { - progressCallback('views', viewsCount, 'done'); - } - - const namedCheckPattern = /CONSTRAINT\s*["']?(\w+)["']?\s*CHECK\s*\((.*?)\)/gi; - const unnamedCheckPattern = /CHECK\s*\((.*?)\)/gi; - let checkCounter = 0; - const checkConstraints: Record = {}; - const checks = await db.query<{ - tableName: string; - sql: string; - }>(`SELECT - name as "tableName", - sql as "sql" - FROM sqlite_master - WHERE type = 'table' - AND ${filterIgnoredTablesByField('tbl_name')};`); - for (const check of checks) { - if (!tablesFilter(check.tableName)) continue; - - const { tableName, sql } = check; - - // Find named CHECK constraints - let namedChecks = [...sql.matchAll(namedCheckPattern)]; - if (namedChecks.length > 0) { - namedChecks.forEach(([_, checkName, checkValue]) => { - checkConstraints[checkName] = { - name: checkName, - value: checkValue.trim(), - }; - }); - } else { - // If no named constraints, find unnamed CHECK constraints and assign names - let unnamedChecks = [...sql.matchAll(unnamedCheckPattern)]; - unnamedChecks.forEach(([_, checkValue]) => { - let checkName = `${tableName}_check_${++checkCounter}`; - checkConstraints[checkName] = { - name: checkName, - value: checkValue.trim(), - }; - }); - } - - checksCount += Object.values(checkConstraints).length; - if (progressCallback) { - progressCallback('checks', checksCount, 'fetching'); - } - - const table = result[tableName]; - - if (!table) { - result[tableName] = { - name: tableName, - columns: {}, - compositePrimaryKeys: {}, - indexes: {}, - foreignKeys: {}, - uniqueConstraints: {}, - checkConstraints: checkConstraints, - }; - } else { - result[tableName]!.checkConstraints = checkConstraints; - } - } - - if (progressCallback) { - progressCallback('checks', checksCount, 'done'); - } - - return { - version: '6', - dialect: 'sqlite', - tables: result, - views: resultViews, - enums: {}, - _meta: { - tables: {}, - columns: {}, - }, - }; -}; diff --git a/drizzle-kit/src/serializer/utils.ts b/drizzle-kit/src/serializer/utils.ts deleted file mode 100644 index 18d5bb9ad8..0000000000 --- a/drizzle-kit/src/serializer/utils.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { SQL } from 'drizzle-orm'; -import { CasingCache, toCamelCase, toSnakeCase } from 'drizzle-orm/casing'; -import { CasingType } from '../cli/validations/common'; - -export function getColumnCasing( - column: { keyAsName: boolean; name: string | undefined }, - casing: CasingType | undefined, -) { - if (!column.name) return ''; - return !column.keyAsName || casing === undefined - ? column.name - : casing === 'camelCase' - ? toCamelCase(column.name) - : toSnakeCase(column.name); -} - -export const sqlToStr = (sql: SQL, casing: CasingType | undefined) => { - return sql.toQuery({ - escapeName: () => { - throw new Error("we don't support params for `sql` default values"); - }, - escapeParam: () => { - throw new Error("we don't support params for `sql` default values"); - }, - escapeString: () => { - throw new Error("we don't support params for `sql` default values"); - }, - casing: new CasingCache(casing), - }).sql; -}; - -export const sqlToStrGenerated = (sql: SQL, casing: CasingType | undefined) => { - return sql.toQuery({ - escapeName: () => { - throw new Error("we don't support params for `sql` default values"); - }, - escapeParam: () => { - throw new Error("we don't support params for `sql` default values"); - }, - escapeString: () => { - throw new Error("we don't support params for `sql` default values"); - }, - casing: new CasingCache(casing), - }).sql; -}; diff --git a/drizzle-kit/src/simulator.ts b/drizzle-kit/src/simulator.ts deleted file mode 100644 index 71dbac1aad..0000000000 --- a/drizzle-kit/src/simulator.ts +++ /dev/null @@ -1,157 +0,0 @@ -declare global { - interface Array { - exactlyOne(): T; - } -} - -Array.prototype.exactlyOne = function() { - if (this.length !== 1) { - return undefined; - } - return this[0]; -}; - -interface TablesHandler { - can(added: T[], removed: T[]): boolean; - handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; -} - -interface ColumnsHandler { - can(tableName: string, added: T[], removed: T[]): boolean; - handle( - tableName: string, - added: T[], - removed: T[], - ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; -} - -class DryRun implements TablesHandler { - can(added: T[], removed: T[]): boolean { - return added.length === 0 && removed.length === 0; - } - handle(added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - return { created: added, deleted: [], renamed: [] }; - } -} - -// class Fallback implements Handler { -// can(_: Table[], __: Table[]): boolean { -// return true -// } -// handle(added: Table[], _: Table[]): { created: Table[]; deleted: Table[]; renamed: { from: Table; to: Table; }[]; } { -// return { created: added, deleted: , renamed: [] } -// } -// } - -class Case1 implements TablesHandler { - can(_: T[], removed: T[]): boolean { - return removed.length === 1 && removed[0].name === 'citiess'; - } - - handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - return { created: added, deleted: removed, renamed: [] }; - } -} -class Case2 implements TablesHandler { - // authOtp, deleted, users -> authOtp renamed, cities added, deleted deleted - can(_: T[], removed: T[]): boolean { - return removed.length === 3 && removed[0].name === 'auth_otp'; - } - - handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - return { created: added.slice(1), deleted: removed.slice(1), renamed: [{ from: removed[0], to: added[0] }] }; - } -} - -type Named = { name: string }; - -const handlers: TablesHandler[] = []; -handlers.push(new Case1()); -handlers.push(new Case2()); -handlers.push(new DryRun()); - -export const resolveTables = (added: T[], removed: T[]) => { - const handler = handlers.filter((it) => { - return it.can(added, removed); - }).exactlyOne(); - - if (!handler) { - console.log('added', added.map((it) => it.name).join()); - console.log('removed', removed.map((it) => it.name).join()); - throw new Error('No handler'); - } - - console.log(`Simluated by ${handler.constructor.name}`); - return handler.handle(added, removed); -}; -class LehaColumnsHandler implements ColumnsHandler { - can(tableName: string, _: T[], __: T[]): boolean { - return tableName === 'users'; - } - - handle( - tableName: string, - added: T[], - removed: T[], - ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - return { tableName, created: [], deleted: [], renamed: [{ from: removed[0], to: added[0] }] }; - } -} - -class DryRunColumnsHandler implements ColumnsHandler { - can(tableName: string, _: T[], __: T[]): boolean { - return true; - } - - handle( - tableName: string, - added: T[], - removed: T[], - ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - return { tableName, created: added, deleted: removed, renamed: [] }; - } -} - -class V1V2AuthOtpColumnsHandler implements ColumnsHandler { - can(tableName: string, _: T[], __: T[]): boolean { - return tableName === 'auth_otp'; - } - - handle( - tableName: string, - added: T[], - removed: T[], - ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { - const phonePrev = removed.filter((it) => it.name === 'phone')[0]; - const phoneNew = added.filter((it) => it.name === 'phone1')[0]; - - const newAdded = added.filter((it) => it.name !== 'phone1'); - const newRemoved = removed.filter((it) => it.name !== 'phone'); - - return { tableName, created: newAdded, deleted: newRemoved, renamed: [{ from: phonePrev, to: phoneNew }] }; - } - - // handle(tableName:string, added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T; }[]; } { - // return { created: added, deleted: [], renamed: [] } - // } -} - -const columnsHandlers: ColumnsHandler[] = []; -columnsHandlers.push(new V1V2AuthOtpColumnsHandler()); -columnsHandlers.push(new LehaColumnsHandler()); -columnsHandlers.push(new DryRunColumnsHandler()); - -export const resolveColumns = (tableName: string, added: T[], removed: T[]) => { - const handler = columnsHandlers.filter((it) => { - return it.can(tableName, added, removed); - })[0]; - - if (!handler) { - console.log('added', added.map((it) => it.name).join()); - console.log('removed', removed.map((it) => it.name).join()); - throw new Error('No columns handler for table: ' + tableName); - } - - console.log(`${tableName} columns simluated by ${handler.constructor.name}`); - return handler.handle(tableName, added, removed); -}; diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts deleted file mode 100644 index 3a77c97623..0000000000 --- a/drizzle-kit/src/snapshotsDiffer.ts +++ /dev/null @@ -1,4331 +0,0 @@ -import { - any, - array, - boolean, - enum as enumType, - literal, - never, - object, - record, - string, - TypeOf, - union, - ZodTypeAny, -} from 'zod'; -import { applyJsonDiff, diffColumns, diffIndPolicies, diffPolicies, diffSchemasOrTables } from './jsonDiffer'; -import { fromJson } from './sqlgenerator'; - -import { - _prepareAddColumns, - _prepareDropColumns, - _prepareSqliteAddColumns, - JsonAddColumnStatement, - JsonAlterCompositePK, - JsonAlterIndPolicyStatement, - JsonAlterMySqlViewStatement, - JsonAlterPolicyStatement, - JsonAlterTableSetSchema, - JsonAlterUniqueConstraint, - JsonAlterViewStatement, - JsonCreateCheckConstraint, - JsonCreateCompositePK, - JsonCreateIndPolicyStatement, - JsonCreateMySqlViewStatement, - JsonCreatePgViewStatement, - JsonCreatePolicyStatement, - JsonCreateReferenceStatement, - JsonCreateSqliteViewStatement, - JsonCreateUniqueConstraint, - JsonDeleteCheckConstraint, - JsonDeleteCompositePK, - JsonDeleteUniqueConstraint, - JsonDisableRLSStatement, - JsonDropColumnStatement, - JsonDropIndPolicyStatement, - JsonDropPolicyStatement, - JsonDropViewStatement, - JsonEnableRLSStatement, - JsonIndRenamePolicyStatement, - JsonReferenceStatement, - JsonRenameColumnStatement, - JsonRenamePolicyStatement, - JsonRenameRoleStatement, - JsonRenameViewStatement, - JsonSqliteAddColumnStatement, - JsonStatement, - prepareAddCheckConstraint, - prepareAddCompositePrimaryKeyMySql, - prepareAddCompositePrimaryKeyPg, - prepareAddCompositePrimaryKeySqlite, - prepareAddUniqueConstraintPg as prepareAddUniqueConstraint, - prepareAddValuesToEnumJson, - prepareAlterColumnsMysql, - prepareAlterCompositePrimaryKeyMySql, - prepareAlterCompositePrimaryKeyPg, - prepareAlterCompositePrimaryKeySqlite, - prepareAlterIndPolicyJson, - prepareAlterPolicyJson, - prepareAlterReferencesJson, - prepareAlterRoleJson, - prepareAlterSequenceJson, - prepareCreateEnumJson, - prepareCreateIndexesJson, - prepareCreateIndPolicyJsons, - prepareCreatePolicyJsons, - prepareCreateReferencesJson, - prepareCreateRoleJson, - prepareCreateSchemasJson, - prepareCreateSequenceJson, - prepareDeleteCheckConstraint, - prepareDeleteCompositePrimaryKeyMySql, - prepareDeleteCompositePrimaryKeyPg, - prepareDeleteCompositePrimaryKeySqlite, - prepareDeleteSchemasJson as prepareDropSchemasJson, - prepareDeleteUniqueConstraintPg as prepareDeleteUniqueConstraint, - prepareDropEnumJson, - prepareDropEnumValues, - prepareDropIndexesJson, - prepareDropIndPolicyJsons, - prepareDropPolicyJsons, - prepareDropReferencesJson, - prepareDropRoleJson, - prepareDropSequenceJson, - prepareDropTableJson, - prepareDropViewJson, - prepareLibSQLCreateReferencesJson, - prepareLibSQLDropReferencesJson, - prepareMoveEnumJson, - prepareMoveSequenceJson, - prepareMySqlAlterView, - prepareMySqlCreateTableJson, - prepareMySqlCreateViewJson, - preparePgAlterColumns, - preparePgAlterViewAddWithOptionJson, - preparePgAlterViewAlterSchemaJson, - preparePgAlterViewAlterTablespaceJson, - preparePgAlterViewAlterUsingJson, - preparePgAlterViewDropWithOptionJson, - preparePgCreateIndexesJson, - preparePgCreateTableJson, - preparePgCreateViewJson, - prepareRenameColumns, - prepareRenameEnumJson, - prepareRenameIndPolicyJsons, - prepareRenamePolicyJsons, - prepareRenameRoleJson, - prepareRenameSchemasJson, - prepareRenameSequenceJson, - prepareRenameTableJson, - prepareRenameViewJson, - prepareSingleStoreCreateTableJson, - prepareSqliteAlterColumns, - prepareSQLiteCreateTable, - prepareSqliteCreateViewJson, -} from './jsonStatements'; - -import { Named, NamedWithSchema } from './cli/commands/migrate'; -import { mapEntries, mapKeys, mapValues } from './global'; -import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher, ViewSquashed } from './serializer/mysqlSchema'; -import { - mergedViewWithOption, - PgSchema, - PgSchemaSquashed, - PgSquasher, - Policy, - policy, - policySquashed, - Role, - roleSchema, - sequenceSquashed, - View, -} from './serializer/pgSchema'; -import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema'; -import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema'; -import { libSQLCombineStatements, singleStoreCombineStatements, sqliteCombineStatements } from './statementCombiner'; -import { copy, prepareMigrationMeta } from './utils'; - -const makeChanged = (schema: T) => { - return object({ - type: enumType(['changed']), - old: schema, - new: schema, - }); -}; - -const makeSelfOrChanged = (schema: T) => { - return union([ - schema, - object({ - type: enumType(['changed']), - old: schema, - new: schema, - }), - ]); -}; - -export const makePatched = (schema: T) => { - return union([ - object({ - type: literal('added'), - value: schema, - }), - object({ - type: literal('deleted'), - value: schema, - }), - object({ - type: literal('changed'), - old: schema, - new: schema, - }), - ]); -}; - -export const makeSelfOrPatched = (schema: T) => { - return union([ - object({ - type: literal('none'), - value: schema, - }), - object({ - type: literal('added'), - value: schema, - }), - object({ - type: literal('deleted'), - value: schema, - }), - object({ - type: literal('changed'), - old: schema, - new: schema, - }), - ]); -}; - -const columnSchema = object({ - name: string(), - type: string(), - typeSchema: string().optional(), - primaryKey: boolean().optional(), - default: any().optional(), - notNull: boolean().optional(), - // should it be optional? should if be here? - autoincrement: boolean().optional(), - onUpdate: boolean().optional(), - isUnique: any().optional(), - uniqueName: string().optional(), - nullsNotDistinct: boolean().optional(), - generated: object({ - as: string(), - type: enumType(['stored', 'virtual']).default('stored'), - }).optional(), - identity: string().optional(), -}).strict(); - -const alteredColumnSchema = object({ - name: makeSelfOrChanged(string()), - type: makeChanged(string()).optional(), - default: makePatched(any()).optional(), - primaryKey: makePatched(boolean()).optional(), - notNull: makePatched(boolean()).optional(), - typeSchema: makePatched(string()).optional(), - onUpdate: makePatched(boolean()).optional(), - autoincrement: makePatched(boolean()).optional(), - generated: makePatched( - object({ - as: string(), - type: enumType(['stored', 'virtual']).default('stored'), - }), - ).optional(), - - identity: makePatched(string()).optional(), -}).strict(); - -const enumSchema = object({ - name: string(), - schema: string(), - values: array(string()), -}).strict(); - -const changedEnumSchema = object({ - name: string(), - schema: string(), - addedValues: object({ - before: string(), - value: string(), - }).array(), - deletedValues: array(string()), -}).strict(); - -const tableScheme = object({ - name: string(), - schema: string().default(''), - columns: record(string(), columnSchema), - indexes: record(string(), string()), - foreignKeys: record(string(), string()), - compositePrimaryKeys: record(string(), string()).default({}), - uniqueConstraints: record(string(), string()).default({}), - policies: record(string(), string()).default({}), - checkConstraints: record(string(), string()).default({}), - isRLSEnabled: boolean().default(false), -}).strict(); - -export const alteredTableScheme = object({ - name: string(), - schema: string(), - altered: alteredColumnSchema.array(), - addedIndexes: record(string(), string()), - deletedIndexes: record(string(), string()), - alteredIndexes: record( - string(), - object({ - __new: string(), - __old: string(), - }).strict(), - ), - addedForeignKeys: record(string(), string()), - deletedForeignKeys: record(string(), string()), - alteredForeignKeys: record( - string(), - object({ - __new: string(), - __old: string(), - }).strict(), - ), - addedCompositePKs: record(string(), string()), - deletedCompositePKs: record(string(), string()), - alteredCompositePKs: record( - string(), - object({ - __new: string(), - __old: string(), - }), - ), - addedUniqueConstraints: record(string(), string()), - deletedUniqueConstraints: record(string(), string()), - alteredUniqueConstraints: record( - string(), - object({ - __new: string(), - __old: string(), - }), - ), - addedPolicies: record(string(), string()), - deletedPolicies: record(string(), string()), - alteredPolicies: record( - string(), - object({ - __new: string(), - __old: string(), - }), - ), - addedCheckConstraints: record( - string(), - string(), - ), - deletedCheckConstraints: record( - string(), - string(), - ), - alteredCheckConstraints: record( - string(), - object({ - __new: string(), - __old: string(), - }), - ), -}).strict(); - -const alteredViewCommon = object({ - name: string(), - alteredDefinition: object({ - __old: string(), - __new: string(), - }).strict().optional(), - alteredExisting: object({ - __old: boolean(), - __new: boolean(), - }).strict().optional(), -}); - -export const alteredPgViewSchema = alteredViewCommon.merge( - object({ - schema: string(), - deletedWithOption: mergedViewWithOption.optional(), - addedWithOption: mergedViewWithOption.optional(), - addedWith: mergedViewWithOption.optional(), - deletedWith: mergedViewWithOption.optional(), - alteredWith: mergedViewWithOption.optional(), - alteredSchema: object({ - __old: string(), - __new: string(), - }).strict().optional(), - alteredTablespace: object({ - __old: string(), - __new: string(), - }).strict().optional(), - alteredUsing: object({ - __old: string(), - __new: string(), - }).strict().optional(), - }).strict(), -); - -const alteredMySqlViewSchema = alteredViewCommon.merge( - object({ - alteredMeta: object({ - __old: string(), - __new: string(), - }).strict().optional(), - }).strict(), -); - -export const diffResultScheme = object({ - alteredTablesWithColumns: alteredTableScheme.array(), - alteredEnums: changedEnumSchema.array(), - alteredSequences: sequenceSquashed.array(), - alteredRoles: roleSchema.array(), - alteredPolicies: policySquashed.array(), - alteredViews: alteredPgViewSchema.array(), -}).strict(); - -export const diffResultSchemeMysql = object({ - alteredTablesWithColumns: alteredTableScheme.array(), - alteredEnums: never().array(), - alteredViews: alteredMySqlViewSchema.array(), -}); - -export const diffResultSchemeSingleStore = object({ - alteredTablesWithColumns: alteredTableScheme.array(), - alteredEnums: never().array(), -}); - -export const diffResultSchemeSQLite = object({ - alteredTablesWithColumns: alteredTableScheme.array(), - alteredEnums: never().array(), - alteredViews: alteredViewCommon.array(), -}); - -export type Column = TypeOf; -export type AlteredColumn = TypeOf; -export type Enum = TypeOf; -export type Sequence = TypeOf; -export type Table = TypeOf; -export type AlteredTable = TypeOf; -export type DiffResult = TypeOf; -export type DiffResultMysql = TypeOf; -export type DiffResultSingleStore = TypeOf; -export type DiffResultSQLite = TypeOf; - -export interface ResolverInput { - created: T[]; - deleted: T[]; -} - -export interface ResolverOutput { - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -export interface ResolverOutputWithMoved { - created: T[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -export interface ColumnsResolverInput { - tableName: string; - schema: string; - created: T[]; - deleted: T[]; -} - -export interface TablePolicyResolverInput { - tableName: string; - schema: string; - created: T[]; - deleted: T[]; -} - -export interface TablePolicyResolverOutput { - tableName: string; - schema: string; - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -export interface PolicyResolverInput { - created: T[]; - deleted: T[]; -} - -export interface PolicyResolverOutput { - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -export interface RolesResolverInput { - created: T[]; - deleted: T[]; -} - -export interface RolesResolverOutput { - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -export interface ColumnsResolverOutput { - tableName: string; - schema: string; - created: T[]; - renamed: { from: T; to: T }[]; - deleted: T[]; -} - -const schemaChangeFor = ( - table: NamedWithSchema, - renamedSchemas: { from: Named; to: Named }[], -) => { - for (let ren of renamedSchemas) { - if (table.schema === ren.from.name) { - return { key: `${ren.to.name}.${table.name}`, schema: ren.to.name }; - } - } - - return { - key: `${table.schema || 'public'}.${table.name}`, - schema: table.schema, - }; -}; - -const nameChangeFor = (table: Named, renamed: { from: Named; to: Named }[]) => { - for (let ren of renamed) { - if (table.name === ren.from.name) { - return { name: ren.to.name }; - } - } - - return { - name: table.name, - }; -}; - -const nameSchemaChangeFor = ( - table: NamedWithSchema, - renamedTables: { from: NamedWithSchema; to: NamedWithSchema }[], -) => { - for (let ren of renamedTables) { - if (table.name === ren.from.name && table.schema === ren.from.schema) { - return { - key: `${ren.to.schema || 'public'}.${ren.to.name}`, - name: ren.to.name, - schema: ren.to.schema, - }; - } - } - - return { - key: `${table.schema || 'public'}.${table.name}`, - name: table.name, - schema: table.schema, - }; -}; - -const columnChangeFor = ( - column: string, - renamedColumns: { from: Named; to: Named }[], -) => { - for (let ren of renamedColumns) { - if (column === ren.from.name) { - return ren.to.name; - } - } - - return column; -}; - -// resolve roles same as enums -// create new json statements -// sql generators - -// tests everything! - -export const applyPgSnapshotsDiff = async ( - json1: PgSchemaSquashed, - json2: PgSchemaSquashed, - schemasResolver: ( - input: ResolverInput, - ) => Promise>, - enumsResolver: ( - input: ResolverInput, - ) => Promise>, - sequencesResolver: ( - input: ResolverInput, - ) => Promise>, - policyResolver: ( - input: TablePolicyResolverInput, - ) => Promise>, - indPolicyResolver: ( - input: PolicyResolverInput, - ) => Promise>, - roleResolver: ( - input: RolesResolverInput, - ) => Promise>, - tablesResolver: ( - input: ResolverInput
, - ) => Promise>, - columnsResolver: ( - input: ColumnsResolverInput, - ) => Promise>, - viewsResolver: ( - input: ResolverInput, - ) => Promise>, - prevFull: PgSchema, - curFull: PgSchema, - action?: 'push' | undefined, -): Promise<{ - statements: JsonStatement[]; - sqlStatements: string[]; - _meta: - | { - schemas: {}; - tables: {}; - columns: {}; - } - | undefined; -}> => { - const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas); - - const { - created: createdSchemas, - deleted: deletedSchemas, - renamed: renamedSchemas, - } = await schemasResolver({ - created: schemasDiff.added.map((it) => ({ name: it })), - deleted: schemasDiff.deleted.map((it) => ({ name: it })), - }); - - const schemasPatchedSnap1 = copy(json1); - schemasPatchedSnap1.tables = mapEntries( - schemasPatchedSnap1.tables, - (_, it) => { - const { key, schema } = schemaChangeFor(it, renamedSchemas); - it.schema = schema; - return [key, it]; - }, - ); - - schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { - const { key, schema } = schemaChangeFor(it, renamedSchemas); - it.schema = schema; - return [key, it]; - }); - - const enumsDiff = diffSchemasOrTables(schemasPatchedSnap1.enums, json2.enums); - - const { - created: createdEnums, - deleted: deletedEnums, - renamed: renamedEnums, - moved: movedEnums, - } = await enumsResolver({ - created: enumsDiff.added, - deleted: enumsDiff.deleted, - }); - - schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { - const { key, name, schema } = nameSchemaChangeFor(it, renamedEnums); - it.name = name; - it.schema = schema; - return [key, it]; - }); - - const columnTypesChangeMap = renamedEnums.reduce( - (acc, it) => { - acc[`${it.from.schema}.${it.from.name}`] = { - nameFrom: it.from.name, - nameTo: it.to.name, - schemaFrom: it.from.schema, - schemaTo: it.to.schema, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - schemaFrom: string; - schemaTo: string; - } - >, - ); - - const columnTypesMovesMap = movedEnums.reduce( - (acc, it) => { - acc[`${it.schemaFrom}.${it.name}`] = { - nameFrom: it.name, - nameTo: it.name, - schemaFrom: it.schemaFrom, - schemaTo: it.schemaTo, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - schemaFrom: string; - schemaTo: string; - } - >, - ); - - schemasPatchedSnap1.tables = mapEntries( - schemasPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapValues(tableValue.columns, (column) => { - const key = `${column.typeSchema || 'public'}.${column.type}`; - const change = columnTypesChangeMap[key] || columnTypesMovesMap[key]; - - if (change) { - column.type = change.nameTo; - column.typeSchema = change.schemaTo; - } - - return column; - }); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - schemasPatchedSnap1.sequences = mapEntries( - schemasPatchedSnap1.sequences, - (_, it) => { - const { key, schema } = schemaChangeFor(it, renamedSchemas); - it.schema = schema; - return [key, it]; - }, - ); - - const sequencesDiff = diffSchemasOrTables( - schemasPatchedSnap1.sequences, - json2.sequences, - ); - - const { - created: createdSequences, - deleted: deletedSequences, - renamed: renamedSequences, - moved: movedSequences, - } = await sequencesResolver({ - created: sequencesDiff.added, - deleted: sequencesDiff.deleted, - }); - - schemasPatchedSnap1.sequences = mapEntries( - schemasPatchedSnap1.sequences, - (_, it) => { - const { key, name, schema } = nameSchemaChangeFor(it, renamedSequences); - it.name = name; - it.schema = schema; - return [key, it]; - }, - ); - - const sequencesChangeMap = renamedSequences.reduce( - (acc, it) => { - acc[`${it.from.schema}.${it.from.name}`] = { - nameFrom: it.from.name, - nameTo: it.to.name, - schemaFrom: it.from.schema, - schemaTo: it.to.schema, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - schemaFrom: string; - schemaTo: string; - } - >, - ); - - const sequencesMovesMap = movedSequences.reduce( - (acc, it) => { - acc[`${it.schemaFrom}.${it.name}`] = { - nameFrom: it.name, - nameTo: it.name, - schemaFrom: it.schemaFrom, - schemaTo: it.schemaTo, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - schemaFrom: string; - schemaTo: string; - } - >, - ); - - schemasPatchedSnap1.tables = mapEntries( - schemasPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapValues(tableValue.columns, (column) => { - const key = `${column.typeSchema || 'public'}.${column.type}`; - const change = sequencesChangeMap[key] || sequencesMovesMap[key]; - - if (change) { - column.type = change.nameTo; - column.typeSchema = change.schemaTo; - } - - return column; - }); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - const rolesDiff = diffSchemasOrTables( - schemasPatchedSnap1.roles, - json2.roles, - ); - - const { - created: createdRoles, - deleted: deletedRoles, - renamed: renamedRoles, - } = await roleResolver({ - created: rolesDiff.added, - deleted: rolesDiff.deleted, - }); - - schemasPatchedSnap1.roles = mapEntries( - schemasPatchedSnap1.roles, - (_, it) => { - const { name } = nameChangeFor(it, renamedRoles); - it.name = name; - return [name, it]; - }, - ); - - const rolesChangeMap = renamedRoles.reduce( - (acc, it) => { - acc[it.from.name] = { - nameFrom: it.from.name, - nameTo: it.to.name, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - } - >, - ); - - schemasPatchedSnap1.roles = mapEntries( - schemasPatchedSnap1.roles, - (roleKey, roleValue) => { - const key = roleKey; - const change = rolesChangeMap[key]; - - if (change) { - roleValue.name = change.nameTo; - } - - return [roleKey, roleValue]; - }, - ); - - const tablesDiff = diffSchemasOrTables( - schemasPatchedSnap1.tables as Record, - json2.tables, - ); - - const { - created: createdTables, - deleted: deletedTables, - moved: movedTables, - renamed: renamedTables, // renamed or moved - } = await tablesResolver({ - created: tablesDiff.added, - deleted: tablesDiff.deleted, - }); - - const tablesPatchedSnap1 = copy(schemasPatchedSnap1); - tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { - const { key, name, schema } = nameSchemaChangeFor(it, renamedTables); - it.name = name; - it.schema = schema; - return [key, it]; - }); - - const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); - - const columnRenames = [] as { - table: string; - schema: string; - renames: { from: Column; to: Column }[]; - }[]; - - const columnCreates = [] as { - table: string; - schema: string; - columns: Column[]; - }[]; - - const columnDeletes = [] as { - table: string; - schema: string; - columns: Column[]; - }[]; - - for (let entry of Object.values(res)) { - const { renamed, created, deleted } = await columnsResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.columns.deleted, - created: entry.columns.added, - }); - - if (created.length > 0) { - columnCreates.push({ - table: entry.name, - schema: entry.schema, - columns: created, - }); - } - - if (deleted.length > 0) { - columnDeletes.push({ - table: entry.name, - schema: entry.schema, - columns: deleted, - }); - } - - if (renamed.length > 0) { - columnRenames.push({ - table: entry.name, - schema: entry.schema, - renames: renamed, - }); - } - } - - const columnRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[`${it.schema || 'public'}.${it.table}`] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const columnsPatchedSnap1 = copy(tablesPatchedSnap1); - columnsPatchedSnap1.tables = mapEntries( - columnsPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapKeys( - tableValue.columns, - (columnKey, column) => { - const rens = columnRenamesDict[ - `${tableValue.schema || 'public'}.${tableValue.name}` - ] || []; - - const newName = columnChangeFor(columnKey, rens); - column.name = newName; - return newName; - }, - ); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - //// Policies - - const policyRes = diffPolicies(tablesPatchedSnap1.tables, json2.tables); - - const policyRenames = [] as { - table: string; - schema: string; - renames: { from: Policy; to: Policy }[]; - }[]; - - const policyCreates = [] as { - table: string; - schema: string; - columns: Policy[]; - }[]; - - const policyDeletes = [] as { - table: string; - schema: string; - columns: Policy[]; - }[]; - - for (let entry of Object.values(policyRes)) { - const { renamed, created, deleted } = await policyResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.policies.deleted.map( - action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy, - ), - created: entry.policies.added.map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), - }); - - if (created.length > 0) { - policyCreates.push({ - table: entry.name, - schema: entry.schema, - columns: created, - }); - } - - if (deleted.length > 0) { - policyDeletes.push({ - table: entry.name, - schema: entry.schema, - columns: deleted, - }); - } - - if (renamed.length > 0) { - policyRenames.push({ - table: entry.name, - schema: entry.schema, - renames: renamed, - }); - } - } - - const policyRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[`${it.schema || 'public'}.${it.table}`] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const policyPatchedSnap1 = copy(tablesPatchedSnap1); - policyPatchedSnap1.tables = mapEntries( - policyPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedPolicies = mapKeys( - tableValue.policies, - (policyKey, policy) => { - const rens = policyRenamesDict[ - `${tableValue.schema || 'public'}.${tableValue.name}` - ] || []; - - const newName = columnChangeFor(policyKey, rens); - const unsquashedPolicy = action === 'push' - ? PgSquasher.unsquashPolicyPush(policy) - : PgSquasher.unsquashPolicy(policy); - unsquashedPolicy.name = newName; - policy = PgSquasher.squashPolicy(unsquashedPolicy); - return newName; - }, - ); - - tableValue.policies = patchedPolicies; - return [tableKey, tableValue]; - }, - ); - - //// Individual policies - - const indPolicyRes = diffIndPolicies(policyPatchedSnap1.policies, json2.policies); - - const indPolicyCreates = [] as { - policies: Policy[]; - }[]; - - const indPolicyDeletes = [] as { - policies: Policy[]; - }[]; - - const { renamed: indPolicyRenames, created, deleted } = await indPolicyResolver({ - deleted: indPolicyRes.deleted.map((t) => - action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) - ), - created: indPolicyRes.added.map((t) => - action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) - ), - }); - - if (created.length > 0) { - indPolicyCreates.push({ - policies: created, - }); - } - - if (deleted.length > 0) { - indPolicyDeletes.push({ - policies: deleted, - }); - } - - const indPolicyRenamesDict = indPolicyRenames.reduce( - (acc, it) => { - acc[it.from.name] = { - nameFrom: it.from.name, - nameTo: it.to.name, - }; - return acc; - }, - {} as Record< - string, - { - nameFrom: string; - nameTo: string; - } - >, - ); - - const indPolicyPatchedSnap1 = copy(policyPatchedSnap1); - indPolicyPatchedSnap1.policies = mapEntries( - indPolicyPatchedSnap1.policies, - (policyKey, policyValue) => { - const key = policyKey; - const change = indPolicyRenamesDict[key]; - - if (change) { - policyValue.name = change.nameTo; - } - - return [policyKey, policyValue]; - }, - ); - - //// - const viewsDiff = diffSchemasOrTables(indPolicyPatchedSnap1.views, json2.views); - - const { - created: createdViews, - deleted: deletedViews, - renamed: renamedViews, - moved: movedViews, - } = await viewsResolver({ - created: viewsDiff.added, - deleted: viewsDiff.deleted, - }); - - const renamesViewDic: Record = {}; - renamedViews.forEach((it) => { - renamesViewDic[`${it.from.schema}.${it.from.name}`] = { to: it.to.name, from: it.from.name }; - }); - - const movedViewDic: Record = {}; - movedViews.forEach((it) => { - movedViewDic[`${it.schemaFrom}.${it.name}`] = { to: it.schemaTo, from: it.schemaFrom }; - }); - - const viewsPatchedSnap1 = copy(policyPatchedSnap1); - viewsPatchedSnap1.views = mapEntries( - viewsPatchedSnap1.views, - (viewKey, viewValue) => { - const rename = renamesViewDic[`${viewValue.schema}.${viewValue.name}`]; - const moved = movedViewDic[`${viewValue.schema}.${viewValue.name}`]; - - if (rename) { - viewValue.name = rename.to; - viewKey = `${viewValue.schema}.${viewValue.name}`; - } - - if (moved) viewKey = `${moved.to}.${viewValue.name}`; - - return [viewKey, viewValue]; - }, - ); - - const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); - - const typedResult: DiffResult = diffResultScheme.parse(diffResult); - - const jsonStatements: JsonStatement[] = []; - - const jsonCreateIndexesForCreatedTables = createdTables - .map((it) => { - return preparePgCreateIndexesJson( - it.name, - it.schema, - it.indexes, - curFull, - action, - ); - }) - .flat(); - - const jsonDropTables = deletedTables.map((it) => { - return prepareDropTableJson(it); - }); - - const jsonRenameTables = renamedTables.map((it) => { - return prepareRenameTableJson(it.from, it.to); - }); - - const alteredTables = typedResult.alteredTablesWithColumns; - - const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = []; - const jsonDropColumnsStatemets: JsonDropColumnStatement[] = []; - const jsonAddColumnsStatemets: JsonAddColumnStatement[] = []; - - for (let it of columnRenames) { - jsonRenameColumnsStatements.push( - ...prepareRenameColumns(it.table, it.schema, it.renames), - ); - } - - for (let it of columnDeletes) { - jsonDropColumnsStatemets.push( - ..._prepareDropColumns(it.table, it.schema, it.columns), - ); - } - - for (let it of columnCreates) { - jsonAddColumnsStatemets.push( - ..._prepareAddColumns(it.table, it.schema, it.columns), - ); - } - - const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; - const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; - const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; - - const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - const jsonSetTableSchemas: JsonAlterTableSetSchema[] = []; - - if (movedTables) { - for (let it of movedTables) { - jsonSetTableSchemas.push({ - type: 'alter_table_set_schema', - tableName: it.name, - schemaFrom: it.schemaFrom || 'public', - schemaTo: it.schemaTo || 'public', - }); - } - } - - const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; - - for (let it of alteredTables) { - // This part is needed to make sure that same columns in a table are not triggered for change - // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name - // We double-check that pk with same set of columns are both in added and deleted diffs - let addedColumns: { name: string; columns: string[] } | undefined; - for (const addedPkName of Object.keys(it.addedCompositePKs)) { - const addedPkColumns = it.addedCompositePKs[addedPkName]; - addedColumns = PgSquasher.unsquashPK(addedPkColumns); - } - - let deletedColumns: { name: string; columns: string[] } | undefined; - for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { - const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; - deletedColumns = PgSquasher.unsquashPK(deletedPkColumns); - } - - // Don't need to sort, but need to add tests for it - // addedColumns.sort(); - // deletedColumns.sort(); - const doPerformDeleteAndCreate = JSON.stringify(addedColumns ?? {}) !== JSON.stringify(deletedColumns ?? {}); - - let addedCompositePKs: JsonCreateCompositePK[] = []; - let deletedCompositePKs: JsonDeleteCompositePK[] = []; - let alteredCompositePKs: JsonAlterCompositePK[] = []; - if (doPerformDeleteAndCreate) { - addedCompositePKs = prepareAddCompositePrimaryKeyPg( - it.name, - it.schema, - it.addedCompositePKs, - curFull as PgSchema, - ); - deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg( - it.name, - it.schema, - it.deletedCompositePKs, - prevFull as PgSchema, - ); - } - alteredCompositePKs = prepareAlterCompositePrimaryKeyPg( - it.name, - it.schema, - it.alteredCompositePKs, - prevFull as PgSchema, - curFull as PgSchema, - ); - - // add logic for unique constraints - let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - let createCheckConstraints: JsonCreateCheckConstraint[] = []; - let deleteCheckConstraints: JsonDeleteCheckConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - createCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); - deleteCheckConstraints = prepareDeleteCheckConstraint( - it.name, - it.schema, - it.deletedCheckConstraints, - ); - - if (it.alteredCheckConstraints && action !== 'push') { - const added: Record = {}; - const deleted: Record = {}; - - for (const k of Object.keys(it.alteredCheckConstraints)) { - added[k] = it.alteredCheckConstraints[k].__new; - deleted[k] = it.alteredCheckConstraints[k].__old; - } - createCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); - deleteCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); - } - - jsonCreatedCheckConstraints.push(...createCheckConstraints); - jsonDeletedCheckConstraints.push(...deleteCheckConstraints); - - jsonAddedCompositePKs.push(...addedCompositePKs); - jsonDeletedCompositePKs.push(...deletedCompositePKs); - jsonAlteredCompositePKs.push(...alteredCompositePKs); - - jsonAddedUniqueConstraints.push(...addedUniqueConstraints); - jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); - jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); - } - - const rColumns = jsonRenameColumnsStatements.map((it) => { - const tableName = it.tableName; - const schema = it.schema; - return { - from: { schema, table: tableName, column: it.oldColumnName }, - to: { schema, table: tableName, column: it.newColumnName }, - }; - }); - - const jsonTableAlternations = alteredTables - .map((it) => { - return preparePgAlterColumns( - it.name, - it.schema, - it.altered, - json2, - json1, - action, - ); - }) - .flat(); - - const jsonCreateIndexesFoAlteredTables = alteredTables - .map((it) => { - return preparePgCreateIndexesJson( - it.name, - it.schema, - it.addedIndexes || {}, - curFull, - action, - ); - }) - .flat(); - - const jsonDropIndexesForAllAlteredTables = alteredTables - .map((it) => { - return prepareDropIndexesJson( - it.name, - it.schema, - it.deletedIndexes || {}, - ); - }) - .flat(); - - const jsonCreatePoliciesStatements: JsonCreatePolicyStatement[] = []; - const jsonDropPoliciesStatements: JsonDropPolicyStatement[] = []; - const jsonAlterPoliciesStatements: JsonAlterPolicyStatement[] = []; - const jsonRenamePoliciesStatements: JsonRenamePolicyStatement[] = []; - - const jsonRenameIndPoliciesStatements: JsonIndRenamePolicyStatement[] = []; - const jsonCreateIndPoliciesStatements: JsonCreateIndPolicyStatement[] = []; - const jsonDropIndPoliciesStatements: JsonDropIndPolicyStatement[] = []; - const jsonAlterIndPoliciesStatements: JsonAlterIndPolicyStatement[] = []; - - const jsonEnableRLSStatements: JsonEnableRLSStatement[] = []; - const jsonDisableRLSStatements: JsonDisableRLSStatement[] = []; - - for (let it of indPolicyRenames) { - jsonRenameIndPoliciesStatements.push( - ...prepareRenameIndPolicyJsons([it]), - ); - } - - for (const it of indPolicyCreates) { - jsonCreateIndPoliciesStatements.push( - ...prepareCreateIndPolicyJsons( - it.policies, - ), - ); - } - - for (const it of indPolicyDeletes) { - jsonDropIndPoliciesStatements.push( - ...prepareDropIndPolicyJsons( - it.policies, - ), - ); - } - - typedResult.alteredPolicies.forEach(({ values }) => { - // return prepareAlterIndPolicyJson(json1.policies[it.name], json2.policies[it.name]); - - const policy = action === 'push' ? PgSquasher.unsquashPolicyPush(values) : PgSquasher.unsquashPolicy(values); - - const newPolicy = action === 'push' - ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) - : PgSquasher.unsquashPolicy(json2.policies[policy.name].values); - const oldPolicy = action === 'push' - ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) - : PgSquasher.unsquashPolicy(json1.policies[policy.name].values); - - if (newPolicy.as !== oldPolicy.as) { - jsonDropIndPoliciesStatements.push( - ...prepareDropIndPolicyJsons( - [oldPolicy], - ), - ); - - jsonCreateIndPoliciesStatements.push( - ...prepareCreateIndPolicyJsons( - [newPolicy], - ), - ); - return; - } - - if (newPolicy.for !== oldPolicy.for) { - jsonDropIndPoliciesStatements.push( - ...prepareDropIndPolicyJsons( - [oldPolicy], - ), - ); - - jsonCreateIndPoliciesStatements.push( - ...prepareCreateIndPolicyJsons( - [newPolicy], - ), - ); - return; - } - - // alter - jsonAlterIndPoliciesStatements.push( - prepareAlterIndPolicyJson( - oldPolicy, - newPolicy, - ), - ); - }); - - for (let it of policyRenames) { - jsonRenamePoliciesStatements.push( - ...prepareRenamePolicyJsons(it.table, it.schema, it.renames), - ); - } - - for (const it of policyCreates) { - jsonCreatePoliciesStatements.push( - ...prepareCreatePolicyJsons( - it.table, - it.schema, - it.columns, - ), - ); - } - - for (const it of policyDeletes) { - jsonDropPoliciesStatements.push( - ...prepareDropPolicyJsons( - it.table, - it.schema, - it.columns, - ), - ); - } - - alteredTables.forEach((it) => { - // handle policies - Object.keys(it.alteredPolicies).forEach((policyName: string) => { - const newPolicy = action === 'push' - ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__new) - : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__new); - const oldPolicy = action === 'push' - ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__old) - : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__old); - - if (newPolicy.as !== oldPolicy.as) { - jsonDropPoliciesStatements.push( - ...prepareDropPolicyJsons( - it.name, - it.schema, - [oldPolicy], - ), - ); - - jsonCreatePoliciesStatements.push( - ...prepareCreatePolicyJsons( - it.name, - it.schema, - [newPolicy], - ), - ); - return; - } - - if (newPolicy.for !== oldPolicy.for) { - jsonDropPoliciesStatements.push( - ...prepareDropPolicyJsons( - it.name, - it.schema, - [oldPolicy], - ), - ); - - jsonCreatePoliciesStatements.push( - ...prepareCreatePolicyJsons( - it.name, - it.schema, - [newPolicy], - ), - ); - return; - } - - // alter - jsonAlterPoliciesStatements.push( - prepareAlterPolicyJson( - it.name, - it.schema, - it.alteredPolicies[policyName].__old, - it.alteredPolicies[policyName].__new, - ), - ); - }); - - // Handle enabling and disabling RLS - for (const table of Object.values(json2.tables)) { - const policiesInCurrentState = Object.keys(table.policies); - const tableInPreviousState = - columnsPatchedSnap1.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; - const policiesInPreviousState = tableInPreviousState ? Object.keys(tableInPreviousState.policies) : []; - - // const indPoliciesInCurrentState = Object.keys(table.policies); - // const indPoliciesInPreviousState = Object.keys(columnsPatchedSnap1.policies); - - if ( - (policiesInPreviousState.length === 0 && policiesInCurrentState.length > 0) && !table.isRLSEnabled - ) { - jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); - } - - if ( - (policiesInPreviousState.length > 0 && policiesInCurrentState.length === 0) && !table.isRLSEnabled - ) { - jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); - } - - // handle table.isRLSEnabled - const wasRlsEnabled = tableInPreviousState ? tableInPreviousState.isRLSEnabled : false; - if (table.isRLSEnabled !== wasRlsEnabled) { - if (table.isRLSEnabled) { - // was force enabled - jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); - } else if ( - !table.isRLSEnabled && policiesInCurrentState.length === 0 - ) { - // was force disabled - jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); - } - } - } - - for (const table of Object.values(columnsPatchedSnap1.tables)) { - const tableInCurrentState = json2.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; - - if (tableInCurrentState === undefined && !table.isRLSEnabled) { - jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); - } - } - - // handle indexes - const droppedIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__old; - return current; - }, - {} as Record, - ); - const createdIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__new; - return current; - }, - {} as Record, - ); - - jsonCreateIndexesFoAlteredTables.push( - ...preparePgCreateIndexesJson( - it.name, - it.schema, - createdIndexes || {}, - curFull, - action, - ), - ); - jsonDropIndexesForAllAlteredTables.push( - ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), - ); - }); - - const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables - .map((it) => { - return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); - }) - .flat(); - - const jsonReferencesForAlteredTables: JsonReferenceStatement[] = alteredTables - .map((it) => { - const forAdded = prepareCreateReferencesJson( - it.name, - it.schema, - it.addedForeignKeys, - ); - - const forAltered = prepareDropReferencesJson( - it.name, - it.schema, - it.deletedForeignKeys, - ); - - const alteredFKs = prepareAlterReferencesJson( - it.name, - it.schema, - it.alteredForeignKeys, - ); - - return [...forAdded, ...forAltered, ...alteredFKs]; - }) - .flat(); - - const jsonCreatedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => - t.type === 'create_reference' - ); - - const jsonDroppedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => - t.type === 'delete_reference' - ); - - // Sequences - // - create sequence ✅ - // - create sequence inside schema ✅ - // - rename sequence ✅ - // - change sequence schema ✅ - // - change sequence schema + name ✅ - // - drop sequence - check if sequence is in use. If yes - ??? - // - change sequence values ✅ - - // Generated columns - // - add generated - // - drop generated - // - create table with generated - // - alter - should be not triggered, but should get warning - - const createEnums = createdEnums.map((it) => { - return prepareCreateEnumJson(it.name, it.schema, it.values); - }) ?? []; - - const dropEnums = deletedEnums.map((it) => { - return prepareDropEnumJson(it.name, it.schema); - }); - - const moveEnums = movedEnums.map((it) => { - return prepareMoveEnumJson(it.name, it.schemaFrom, it.schemaTo); - }); - - const renameEnums = renamedEnums.map((it) => { - return prepareRenameEnumJson(it.from.name, it.to.name, it.to.schema); - }); - - const jsonAlterEnumsWithAddedValues = typedResult.alteredEnums - .map((it) => { - return prepareAddValuesToEnumJson(it.name, it.schema, it.addedValues); - }) - .flat() ?? []; - - const jsonAlterEnumsWithDroppedValues = typedResult.alteredEnums - .map((it) => { - return prepareDropEnumValues(it.name, it.schema, it.deletedValues, curFull); - }) - .flat() ?? []; - - const createSequences = createdSequences.map((it) => { - return prepareCreateSequenceJson(it); - }) ?? []; - - const dropSequences = deletedSequences.map((it) => { - return prepareDropSequenceJson(it.name, it.schema); - }); - - const moveSequences = movedSequences.map((it) => { - return prepareMoveSequenceJson(it.name, it.schemaFrom, it.schemaTo); - }); - - const renameSequences = renamedSequences.map((it) => { - return prepareRenameSequenceJson(it.from.name, it.to.name, it.to.schema); - }); - - const jsonAlterSequences = typedResult.alteredSequences - .map((it) => { - return prepareAlterSequenceJson(it); - }) - .flat() ?? []; - - //////////// - - const createRoles = createdRoles.map((it) => { - return prepareCreateRoleJson(it); - }) ?? []; - - const dropRoles = deletedRoles.map((it) => { - return prepareDropRoleJson(it.name); - }); - - const renameRoles = renamedRoles.map((it) => { - return prepareRenameRoleJson(it.from.name, it.to.name); - }); - - const jsonAlterRoles = typedResult.alteredRoles - .map((it) => { - return prepareAlterRoleJson(it); - }) - .flat() ?? []; - - //////////// - const createSchemas = prepareCreateSchemasJson( - createdSchemas.map((it) => it.name), - ); - - const renameSchemas = prepareRenameSchemasJson( - renamedSchemas.map((it) => ({ from: it.from.name, to: it.to.name })), - ); - - const dropSchemas = prepareDropSchemasJson( - deletedSchemas.map((it) => it.name), - ); - - const createTables = createdTables.map((it) => { - return preparePgCreateTableJson(it, curFull); - }); - - jsonCreatePoliciesStatements.push(...([] as JsonCreatePolicyStatement[]).concat( - ...(createdTables.map((it) => - prepareCreatePolicyJsons( - it.name, - it.schema, - Object.values(it.policies).map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), - ) - )), - )); - const createViews: JsonCreatePgViewStatement[] = []; - const dropViews: JsonDropViewStatement[] = []; - const renameViews: JsonRenameViewStatement[] = []; - const alterViews: JsonAlterViewStatement[] = []; - - createViews.push( - ...createdViews.filter((it) => !it.isExisting).map((it) => { - return preparePgCreateViewJson( - it.name, - it.schema, - it.definition!, - it.materialized, - it.withNoData, - it.with, - it.using, - it.tablespace, - ); - }), - ); - - dropViews.push( - ...deletedViews.filter((it) => !it.isExisting).map((it) => { - return prepareDropViewJson(it.name, it.schema, it.materialized); - }), - ); - - renameViews.push( - ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[`${it.from.schema}.${it.from.name}`].isExisting) - .map((it) => { - return prepareRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized); - }), - ); - - alterViews.push( - ...movedViews.filter((it) => - !json2.views[`${it.schemaTo}.${it.name}`].isExisting && !json1.views[`${it.schemaFrom}.${it.name}`].isExisting - ).map((it) => { - return preparePgAlterViewAlterSchemaJson( - it.schemaTo, - it.schemaFrom, - it.name, - json2.views[`${it.schemaTo}.${it.name}`].materialized, - ); - }), - ); - - const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting); - - for (const alteredView of alteredViews) { - const viewKey = `${alteredView.schema}.${alteredView.name}`; - - const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey]; - - if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { - dropViews.push(prepareDropViewJson(alteredView.name, alteredView.schema, materialized)); - - createViews.push( - preparePgCreateViewJson( - alteredView.name, - alteredView.schema, - definition!, - materialized, - withNoData, - withOption, - using, - tablespace, - ), - ); - - continue; - } - - if (alteredView.addedWithOption) { - alterViews.push( - preparePgAlterViewAddWithOptionJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.addedWithOption, - ), - ); - } - - if (alteredView.deletedWithOption) { - alterViews.push( - preparePgAlterViewDropWithOptionJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.deletedWithOption, - ), - ); - } - - if (alteredView.addedWith) { - alterViews.push( - preparePgAlterViewAddWithOptionJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.addedWith, - ), - ); - } - - if (alteredView.deletedWith) { - alterViews.push( - preparePgAlterViewDropWithOptionJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.deletedWith, - ), - ); - } - - if (alteredView.alteredWith) { - alterViews.push( - preparePgAlterViewAddWithOptionJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.alteredWith, - ), - ); - } - - if (alteredView.alteredTablespace) { - alterViews.push( - preparePgAlterViewAlterTablespaceJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.alteredTablespace.__new, - ), - ); - } - - if (alteredView.alteredUsing) { - alterViews.push( - preparePgAlterViewAlterUsingJson( - alteredView.name, - alteredView.schema, - materialized, - alteredView.alteredUsing.__new, - ), - ); - } - } - - jsonStatements.push(...createSchemas); - jsonStatements.push(...renameSchemas); - jsonStatements.push(...createEnums); - jsonStatements.push(...moveEnums); - jsonStatements.push(...renameEnums); - jsonStatements.push(...jsonAlterEnumsWithAddedValues); - - jsonStatements.push(...createSequences); - jsonStatements.push(...moveSequences); - jsonStatements.push(...renameSequences); - jsonStatements.push(...jsonAlterSequences); - - jsonStatements.push(...renameRoles); - jsonStatements.push(...dropRoles); - jsonStatements.push(...createRoles); - jsonStatements.push(...jsonAlterRoles); - - jsonStatements.push(...createTables); - - jsonStatements.push(...jsonEnableRLSStatements); - jsonStatements.push(...jsonDisableRLSStatements); - jsonStatements.push(...dropViews); - jsonStatements.push(...renameViews); - jsonStatements.push(...alterViews); - - jsonStatements.push(...jsonDropTables); - jsonStatements.push(...jsonSetTableSchemas); - jsonStatements.push(...jsonRenameTables); - jsonStatements.push(...jsonRenameColumnsStatements); - - jsonStatements.push(...jsonDeletedUniqueConstraints); - jsonStatements.push(...jsonDeletedCheckConstraints); - - jsonStatements.push(...jsonDroppedReferencesForAlteredTables); - - jsonStatements.push(...jsonAlterEnumsWithDroppedValues); - - // Will need to drop indexes before changing any columns in table - // Then should go column alternations and then index creation - jsonStatements.push(...jsonDropIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDeletedCompositePKs); - jsonStatements.push(...jsonTableAlternations); - jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAddColumnsStatemets); - - jsonStatements.push(...jsonCreateReferencesForCreatedTables); - jsonStatements.push(...jsonCreateIndexesForCreatedTables); - - jsonStatements.push(...jsonCreatedReferencesForAlteredTables); - jsonStatements.push(...jsonCreateIndexesFoAlteredTables); - - jsonStatements.push(...jsonDropColumnsStatemets); - jsonStatements.push(...jsonAlteredCompositePKs); - - jsonStatements.push(...jsonAddedUniqueConstraints); - jsonStatements.push(...jsonCreatedCheckConstraints); - - jsonStatements.push(...jsonAlteredUniqueConstraints); - - jsonStatements.push(...createViews); - - jsonStatements.push(...jsonRenamePoliciesStatements); - jsonStatements.push(...jsonDropPoliciesStatements); - jsonStatements.push(...jsonCreatePoliciesStatements); - jsonStatements.push(...jsonAlterPoliciesStatements); - - jsonStatements.push(...jsonRenameIndPoliciesStatements); - jsonStatements.push(...jsonDropIndPoliciesStatements); - jsonStatements.push(...jsonCreateIndPoliciesStatements); - jsonStatements.push(...jsonAlterIndPoliciesStatements); - - jsonStatements.push(...dropEnums); - jsonStatements.push(...dropSequences); - jsonStatements.push(...dropSchemas); - - // generate filters - const filteredJsonStatements = jsonStatements.filter((st) => { - if (st.type === 'alter_table_alter_column_drop_notnull') { - if ( - jsonStatements.find( - (it) => - it.type === 'alter_table_alter_column_drop_identity' - && it.tableName === st.tableName - && it.schema === st.schema, - ) - ) { - return false; - } - } - if (st.type === 'alter_table_alter_column_set_notnull') { - if ( - jsonStatements.find( - (it) => - it.type === 'alter_table_alter_column_set_identity' - && it.tableName === st.tableName - && it.schema === st.schema, - ) - ) { - return false; - } - } - return true; - }); - - // enum filters - // Need to find add and drop enum values in same enum and remove add values - const filteredEnumsJsonStatements = filteredJsonStatements.filter((st) => { - if (st.type === 'alter_type_add_value') { - if ( - filteredJsonStatements.find( - (it) => - it.type === 'alter_type_drop_value' - && it.name === st.name - && it.enumSchema === st.schema, - ) - ) { - return false; - } - } - return true; - }); - - // This is needed because in sql generator on type pg_alter_table_alter_column_set_type and alter_type_drop_value - // drizzle kit checks whether column has defaults to cast them to new types properly - const filteredEnums2JsonStatements = filteredEnumsJsonStatements.filter((st) => { - if (st.type === 'alter_table_alter_column_set_default') { - if ( - filteredEnumsJsonStatements.find( - (it) => - it.type === 'pg_alter_table_alter_column_set_type' - && it.columnDefault === st.newDefaultValue - && it.columnName === st.columnName - && it.tableName === st.tableName - && it.schema === st.schema, - ) - ) { - return false; - } - - if ( - filteredEnumsJsonStatements.find( - (it) => - it.type === 'alter_type_drop_value' - && it.columnsWithEnum.find((column) => - column.default === st.newDefaultValue - && column.column === st.columnName - && column.table === st.tableName - && column.tableSchema === st.schema - ), - ) - ) { - return false; - } - } - return true; - }); - - const sqlStatements = fromJson(filteredEnums2JsonStatements, 'postgresql', action); - - const uniqueSqlStatements: string[] = []; - sqlStatements.forEach((ss) => { - if (!uniqueSqlStatements.includes(ss)) { - uniqueSqlStatements.push(ss); - } - }); - - const rSchemas = renamedSchemas.map((it) => ({ - from: it.from.name, - to: it.to.name, - })); - - const rTables = renamedTables.map((it) => { - return { from: it.from, to: it.to }; - }); - - const _meta = prepareMigrationMeta(rSchemas, rTables, rColumns); - - return { - statements: filteredEnums2JsonStatements, - sqlStatements: uniqueSqlStatements, - _meta, - }; -}; - -export const applyMysqlSnapshotsDiff = async ( - json1: MySqlSchemaSquashed, - json2: MySqlSchemaSquashed, - tablesResolver: ( - input: ResolverInput
, - ) => Promise>, - columnsResolver: ( - input: ColumnsResolverInput, - ) => Promise>, - viewsResolver: ( - input: ResolverInput, - ) => Promise>, - prevFull: MySqlSchema, - curFull: MySqlSchema, - action?: 'push' | undefined, -): Promise<{ - statements: JsonStatement[]; - sqlStatements: string[]; - _meta: - | { - schemas: {}; - tables: {}; - columns: {}; - } - | undefined; -}> => { - // squash indexes and fks - - // squash uniqueIndexes and uniqueConstraint into constraints object - // it should be done for mysql only because it has no diffs for it - - // TODO: @AndriiSherman - // Add an upgrade to v6 and move all snaphosts to this strcutre - // After that we can generate mysql in 1 object directly(same as sqlite) - for (const tableName in json1.tables) { - const table = json1.tables[tableName]; - for (const indexName in table.indexes) { - const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); - if (index.isUnique) { - table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ - name: index.name, - columns: index.columns, - }); - delete json1.tables[tableName].indexes[index.name]; - } - } - } - - for (const tableName in json2.tables) { - const table = json2.tables[tableName]; - for (const indexName in table.indexes) { - const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); - if (index.isUnique) { - table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ - name: index.name, - columns: index.columns, - }); - delete json2.tables[tableName].indexes[index.name]; - } - } - } - - const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); - - const { - created: createdTables, - deleted: deletedTables, - renamed: renamedTables, // renamed or moved - } = await tablesResolver({ - created: tablesDiff.added, - deleted: tablesDiff.deleted, - }); - - const tablesPatchedSnap1 = copy(json1); - tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { - const { name } = nameChangeFor(it, renamedTables); - it.name = name; - return [name, it]; - }); - - const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); - const columnRenames = [] as { - table: string; - renames: { from: Column; to: Column }[]; - }[]; - - const columnCreates = [] as { - table: string; - columns: Column[]; - }[]; - - const columnDeletes = [] as { - table: string; - columns: Column[]; - }[]; - - for (let entry of Object.values(res)) { - const { renamed, created, deleted } = await columnsResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.columns.deleted, - created: entry.columns.added, - }); - - if (created.length > 0) { - columnCreates.push({ - table: entry.name, - columns: created, - }); - } - - if (deleted.length > 0) { - columnDeletes.push({ - table: entry.name, - columns: deleted, - }); - } - - if (renamed.length > 0) { - columnRenames.push({ - table: entry.name, - renames: renamed, - }); - } - } - - const columnRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[it.table] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const columnsPatchedSnap1 = copy(tablesPatchedSnap1); - columnsPatchedSnap1.tables = mapEntries( - columnsPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapKeys( - tableValue.columns, - (columnKey, column) => { - const rens = columnRenamesDict[tableValue.name] || []; - const newName = columnChangeFor(columnKey, rens); - column.name = newName; - return newName; - }, - ); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - const viewsDiff = diffSchemasOrTables(json1.views, json2.views); - - const { - created: createdViews, - deleted: deletedViews, - renamed: renamedViews, // renamed or moved - } = await viewsResolver({ - created: viewsDiff.added, - deleted: viewsDiff.deleted, - }); - - const renamesViewDic: Record = {}; - renamedViews.forEach((it) => { - renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; - }); - - const viewsPatchedSnap1 = copy(columnsPatchedSnap1); - viewsPatchedSnap1.views = mapEntries( - viewsPatchedSnap1.views, - (viewKey, viewValue) => { - const rename = renamesViewDic[viewValue.name]; - - if (rename) { - viewValue.name = rename.to; - viewKey = rename.to; - } - - return [viewKey, viewValue]; - }, - ); - - const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); - - const typedResult: DiffResultMysql = diffResultSchemeMysql.parse(diffResult); - - const jsonStatements: JsonStatement[] = []; - - const jsonCreateIndexesForCreatedTables = createdTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.indexes, - curFull.internal, - ); - }) - .flat(); - - const jsonDropTables = deletedTables.map((it) => { - return prepareDropTableJson(it); - }); - - const jsonRenameTables = renamedTables.map((it) => { - return prepareRenameTableJson(it.from, it.to); - }); - - const alteredTables = typedResult.alteredTablesWithColumns; - - const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; - const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; - const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; - - const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; - const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - - const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames - .map((it) => prepareRenameColumns(it.table, '', it.renames)) - .flat(); - - const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates - .map((it) => _prepareAddColumns(it.table, '', it.columns)) - .flat(); - - const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes - .map((it) => _prepareDropColumns(it.table, '', it.columns)) - .flat(); - - alteredTables.forEach((it) => { - // This part is needed to make sure that same columns in a table are not triggered for change - // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name - // We double-check that pk with same set of columns are both in added and deleted diffs - let addedColumns: string[] = []; - for (const addedPkName of Object.keys(it.addedCompositePKs)) { - const addedPkColumns = it.addedCompositePKs[addedPkName]; - addedColumns = MySqlSquasher.unsquashPK(addedPkColumns).columns; - } - - let deletedColumns: string[] = []; - for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { - const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; - deletedColumns = MySqlSquasher.unsquashPK(deletedPkColumns).columns; - } - - // Don't need to sort, but need to add tests for it - // addedColumns.sort(); - // deletedColumns.sort(); - const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); - - let addedCompositePKs: JsonCreateCompositePK[] = []; - let deletedCompositePKs: JsonDeleteCompositePK[] = []; - let alteredCompositePKs: JsonAlterCompositePK[] = []; - - addedCompositePKs = prepareAddCompositePrimaryKeyMySql( - it.name, - it.addedCompositePKs, - prevFull, - curFull, - ); - deletedCompositePKs = prepareDeleteCompositePrimaryKeyMySql( - it.name, - it.deletedCompositePKs, - prevFull, - ); - // } - alteredCompositePKs = prepareAlterCompositePrimaryKeyMySql( - it.name, - it.alteredCompositePKs, - prevFull, - curFull, - ); - - // add logic for unique constraints - let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - let createdCheckConstraints: JsonCreateCheckConstraint[] = []; - let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); - deletedCheckConstraints = prepareDeleteCheckConstraint( - it.name, - it.schema, - it.deletedCheckConstraints, - ); - - // skip for push - if (it.alteredCheckConstraints && action !== 'push') { - const added: Record = {}; - const deleted: Record = {}; - - for (const k of Object.keys(it.alteredCheckConstraints)) { - added[k] = it.alteredCheckConstraints[k].__new; - deleted[k] = it.alteredCheckConstraints[k].__old; - } - createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); - deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); - } - - jsonAddedCompositePKs.push(...addedCompositePKs); - jsonDeletedCompositePKs.push(...deletedCompositePKs); - jsonAlteredCompositePKs.push(...alteredCompositePKs); - - jsonAddedUniqueConstraints.push(...addedUniqueConstraints); - jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); - jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); - - jsonCreatedCheckConstraints.push(...createdCheckConstraints); - jsonDeletedCheckConstraints.push(...deletedCheckConstraints); - }); - - const rColumns = jsonRenameColumnsStatements.map((it) => { - const tableName = it.tableName; - const schema = it.schema; - return { - from: { schema, table: tableName, column: it.oldColumnName }, - to: { schema, table: tableName, column: it.newColumnName }, - }; - }); - - const jsonTableAlternations = alteredTables - .map((it) => { - return prepareAlterColumnsMysql( - it.name, - it.schema, - it.altered, - json1, - json2, - action, - ); - }) - .flat(); - - const jsonCreateIndexesForAllAlteredTables = alteredTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.addedIndexes || {}, - curFull.internal, - ); - }) - .flat(); - - const jsonDropIndexesForAllAlteredTables = alteredTables - .map((it) => { - return prepareDropIndexesJson( - it.name, - it.schema, - it.deletedIndexes || {}, - ); - }) - .flat(); - - alteredTables.forEach((it) => { - const droppedIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__old; - return current; - }, - {} as Record, - ); - const createdIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__new; - return current; - }, - {} as Record, - ); - - jsonCreateIndexesForAllAlteredTables.push( - ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), - ); - jsonDropIndexesForAllAlteredTables.push( - ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), - ); - }); - - const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables - .map((it) => { - return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); - }) - .flat(); - - const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = alteredTables - .map((it) => { - const forAdded = prepareCreateReferencesJson( - it.name, - it.schema, - it.addedForeignKeys, - ); - - const forAltered = prepareDropReferencesJson( - it.name, - it.schema, - it.deletedForeignKeys, - ); - - const alteredFKs = prepareAlterReferencesJson( - it.name, - it.schema, - it.alteredForeignKeys, - ); - - return [...forAdded, ...forAltered, ...alteredFKs]; - }) - .flat(); - - const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'create_reference', - ); - const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'delete_reference', - ); - - const jsonMySqlCreateTables = createdTables.map((it) => { - return prepareMySqlCreateTableJson( - it, - curFull as MySqlSchema, - curFull.internal, - ); - }); - - const createViews: JsonCreateMySqlViewStatement[] = []; - const dropViews: JsonDropViewStatement[] = []; - const renameViews: JsonRenameViewStatement[] = []; - const alterViews: JsonAlterMySqlViewStatement[] = []; - - createViews.push( - ...createdViews.filter((it) => !it.isExisting).map((it) => { - return prepareMySqlCreateViewJson( - it.name, - it.definition!, - it.meta, - ); - }), - ); - - dropViews.push( - ...deletedViews.filter((it) => !it.isExisting).map((it) => { - return prepareDropViewJson(it.name); - }), - ); - - renameViews.push( - ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { - return prepareRenameViewJson(it.to.name, it.from.name); - }), - ); - - const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); - - for (const alteredView of alteredViews) { - const { definition, meta } = json2.views[alteredView.name]; - - if (alteredView.alteredExisting) { - dropViews.push(prepareDropViewJson(alteredView.name)); - - createViews.push( - prepareMySqlCreateViewJson( - alteredView.name, - definition!, - meta, - ), - ); - - continue; - } - - if (alteredView.alteredDefinition && action !== 'push') { - createViews.push( - prepareMySqlCreateViewJson( - alteredView.name, - definition!, - meta, - true, - ), - ); - continue; - } - - if (alteredView.alteredMeta) { - const view = curFull['views'][alteredView.name]; - alterViews.push( - prepareMySqlAlterView(view), - ); - } - } - - jsonStatements.push(...jsonMySqlCreateTables); - - jsonStatements.push(...jsonDropTables); - jsonStatements.push(...jsonRenameTables); - jsonStatements.push(...jsonRenameColumnsStatements); - - jsonStatements.push(...dropViews); - jsonStatements.push(...renameViews); - jsonStatements.push(...alterViews); - - jsonStatements.push(...jsonDeletedUniqueConstraints); - jsonStatements.push(...jsonDeletedCheckConstraints); - - jsonStatements.push(...jsonDroppedReferencesForAlteredTables); - - // Will need to drop indexes before changing any columns in table - // Then should go column alternations and then index creation - jsonStatements.push(...jsonDropIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDeletedCompositePKs); - jsonStatements.push(...jsonTableAlternations); - jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAddColumnsStatemets); - - jsonStatements.push(...jsonAddedUniqueConstraints); - jsonStatements.push(...jsonDeletedUniqueConstraints); - - jsonStatements.push(...jsonCreateReferencesForCreatedTables); - jsonStatements.push(...jsonCreateIndexesForCreatedTables); - jsonStatements.push(...jsonCreatedCheckConstraints); - - jsonStatements.push(...jsonCreatedReferencesForAlteredTables); - jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDropColumnsStatemets); - - // jsonStatements.push(...jsonDeletedCompositePKs); - // jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAlteredCompositePKs); - - jsonStatements.push(...createViews); - - jsonStatements.push(...jsonAlteredUniqueConstraints); - - const sqlStatements = fromJson(jsonStatements, 'mysql'); - - const uniqueSqlStatements: string[] = []; - sqlStatements.forEach((ss) => { - if (!uniqueSqlStatements.includes(ss)) { - uniqueSqlStatements.push(ss); - } - }); - - const rTables = renamedTables.map((it) => { - return { from: it.from, to: it.to }; - }); - - const _meta = prepareMigrationMeta([], rTables, rColumns); - - return { - statements: jsonStatements, - sqlStatements: uniqueSqlStatements, - _meta, - }; -}; - -export const applySingleStoreSnapshotsDiff = async ( - json1: SingleStoreSchemaSquashed, - json2: SingleStoreSchemaSquashed, - tablesResolver: ( - input: ResolverInput
, - ) => Promise>, - columnsResolver: ( - input: ColumnsResolverInput, - ) => Promise>, - /* viewsResolver: ( - input: ResolverInput, - ) => Promise>, */ - prevFull: SingleStoreSchema, - curFull: SingleStoreSchema, - action?: 'push' | undefined, -): Promise<{ - statements: JsonStatement[]; - sqlStatements: string[]; - _meta: - | { - schemas: {}; - tables: {}; - columns: {}; - } - | undefined; -}> => { - // squash indexes and fks - - // squash uniqueIndexes and uniqueConstraint into constraints object - // it should be done for singlestore only because it has no diffs for it - - // TODO: @AndriiSherman - // Add an upgrade to v6 and move all snaphosts to this strcutre - // After that we can generate singlestore in 1 object directly(same as sqlite) - for (const tableName in json1.tables) { - const table = json1.tables[tableName]; - for (const indexName in table.indexes) { - const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); - if (index.isUnique) { - table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ - name: index.name, - columns: index.columns, - }); - delete json1.tables[tableName].indexes[index.name]; - } - } - } - - for (const tableName in json2.tables) { - const table = json2.tables[tableName]; - for (const indexName in table.indexes) { - const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); - if (index.isUnique) { - table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ - name: index.name, - columns: index.columns, - }); - delete json2.tables[tableName].indexes[index.name]; - } - } - } - - const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); - - const { - created: createdTables, - deleted: deletedTables, - renamed: renamedTables, // renamed or moved - } = await tablesResolver({ - created: tablesDiff.added, - deleted: tablesDiff.deleted, - }); - - const tablesPatchedSnap1 = copy(json1); - tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { - const { name } = nameChangeFor(it, renamedTables); - it.name = name; - return [name, it]; - }); - - const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); - const columnRenames = [] as { - table: string; - renames: { from: Column; to: Column }[]; - }[]; - - const columnCreates = [] as { - table: string; - columns: Column[]; - }[]; - - const columnDeletes = [] as { - table: string; - columns: Column[]; - }[]; - - for (let entry of Object.values(res)) { - const { renamed, created, deleted } = await columnsResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.columns.deleted, - created: entry.columns.added, - }); - - if (created.length > 0) { - columnCreates.push({ - table: entry.name, - columns: created, - }); - } - - if (deleted.length > 0) { - columnDeletes.push({ - table: entry.name, - columns: deleted, - }); - } - - if (renamed.length > 0) { - columnRenames.push({ - table: entry.name, - renames: renamed, - }); - } - } - - const columnRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[it.table] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const columnsPatchedSnap1 = copy(tablesPatchedSnap1); - columnsPatchedSnap1.tables = mapEntries( - columnsPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapKeys( - tableValue.columns, - (columnKey, column) => { - const rens = columnRenamesDict[tableValue.name] || []; - const newName = columnChangeFor(columnKey, rens); - column.name = newName; - return newName; - }, - ); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - /* const viewsDiff = diffSchemasOrTables(json1.views, json2.views); - - const { - created: createdViews, - deleted: deletedViews, - renamed: renamedViews, // renamed or moved - } = await viewsResolver({ - created: viewsDiff.added, - deleted: viewsDiff.deleted, - }); - - const renamesViewDic: Record = {}; - renamedViews.forEach((it) => { - renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; - }); - - const viewsPatchedSnap1 = copy(columnsPatchedSnap1); - viewsPatchedSnap1.views = mapEntries( - viewsPatchedSnap1.views, - (viewKey, viewValue) => { - const rename = renamesViewDic[viewValue.name]; - - if (rename) { - viewValue.name = rename.to; - viewKey = rename.to; - } - - return [viewKey, viewValue]; - }, - ); - */ - const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); // replace columnsPatchedSnap1 with viewsPatchedSnap1 - - const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult); - - const jsonStatements: JsonStatement[] = []; - - const jsonCreateIndexesForCreatedTables = createdTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.indexes, - curFull.internal, - ); - }) - .flat(); - - const jsonDropTables = deletedTables.map((it) => { - return prepareDropTableJson(it); - }); - - const jsonRenameTables = renamedTables.map((it) => { - return prepareRenameTableJson(it.from, it.to); - }); - - const alteredTables = typedResult.alteredTablesWithColumns; - - const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; - - const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames - .map((it) => prepareRenameColumns(it.table, '', it.renames)) - .flat(); - - const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates - .map((it) => _prepareAddColumns(it.table, '', it.columns)) - .flat(); - - const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes - .map((it) => _prepareDropColumns(it.table, '', it.columns)) - .flat(); - - alteredTables.forEach((it) => { - // This part is needed to make sure that same columns in a table are not triggered for change - // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name - // We double-check that pk with same set of columns are both in added and deleted diffs - let addedColumns: string[] = []; - for (const addedPkName of Object.keys(it.addedCompositePKs)) { - const addedPkColumns = it.addedCompositePKs[addedPkName]; - addedColumns = SingleStoreSquasher.unsquashPK(addedPkColumns).columns; - } - - let deletedColumns: string[] = []; - for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { - const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; - deletedColumns = SingleStoreSquasher.unsquashPK(deletedPkColumns).columns; - } - - // Don't need to sort, but need to add tests for it - // addedColumns.sort(); - // deletedColumns.sort(); - const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); - - // add logic for unique constraints - let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - let createdCheckConstraints: JsonCreateCheckConstraint[] = []; - let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); - deletedCheckConstraints = prepareDeleteCheckConstraint( - it.name, - it.schema, - it.deletedCheckConstraints, - ); - - // skip for push - if (it.alteredCheckConstraints && action !== 'push') { - const added: Record = {}; - const deleted: Record = {}; - - for (const k of Object.keys(it.alteredCheckConstraints)) { - added[k] = it.alteredCheckConstraints[k].__new; - deleted[k] = it.alteredCheckConstraints[k].__old; - } - createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); - deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); - } - - jsonAddedUniqueConstraints.push(...addedUniqueConstraints); - jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); - jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); - }); - - const rColumns = jsonRenameColumnsStatements.map((it) => { - const tableName = it.tableName; - const schema = it.schema; - return { - from: { schema, table: tableName, column: it.oldColumnName }, - to: { schema, table: tableName, column: it.newColumnName }, - }; - }); - - const jsonTableAlternations = alteredTables - .map((it) => { - return prepareAlterColumnsMysql( - it.name, - it.schema, - it.altered, - json1, - json2, - action, - ); - }) - .flat(); - - const jsonCreateIndexesForAllAlteredTables = alteredTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.addedIndexes || {}, - curFull.internal, - ); - }) - .flat(); - - const jsonDropIndexesForAllAlteredTables = alteredTables - .map((it) => { - return prepareDropIndexesJson( - it.name, - it.schema, - it.deletedIndexes || {}, - ); - }) - .flat(); - - alteredTables.forEach((it) => { - const droppedIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__old; - return current; - }, - {} as Record, - ); - const createdIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__new; - return current; - }, - {} as Record, - ); - - jsonCreateIndexesForAllAlteredTables.push( - ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), - ); - jsonDropIndexesForAllAlteredTables.push( - ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), - ); - }); - - const jsonSingleStoreCreateTables = createdTables.map((it) => { - return prepareSingleStoreCreateTableJson( - it, - curFull as SingleStoreSchema, - curFull.internal, - ); - }); - - /* const createViews: JsonCreateSingleStoreViewStatement[] = []; - const dropViews: JsonDropViewStatement[] = []; - const renameViews: JsonRenameViewStatement[] = []; - const alterViews: JsonAlterSingleStoreViewStatement[] = []; - - createViews.push( - ...createdViews.filter((it) => !it.isExisting).map((it) => { - return prepareSingleStoreCreateViewJson( - it.name, - it.definition!, - it.meta, - ); - }), - ); - - dropViews.push( - ...deletedViews.filter((it) => !it.isExisting).map((it) => { - return prepareDropViewJson(it.name); - }), - ); - - renameViews.push( - ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { - return prepareRenameViewJson(it.to.name, it.from.name); - }), - ); - - const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); - - for (const alteredView of alteredViews) { - const { definition, meta } = json2.views[alteredView.name]; - - if (alteredView.alteredExisting) { - dropViews.push(prepareDropViewJson(alteredView.name)); - - createViews.push( - prepareSingleStoreCreateViewJson( - alteredView.name, - definition!, - meta, - ), - ); - - continue; - } - - if (alteredView.alteredDefinition && action !== 'push') { - createViews.push( - prepareSingleStoreCreateViewJson( - alteredView.name, - definition!, - meta, - true, - ), - ); - continue; - } - - if (alteredView.alteredMeta) { - const view = curFull['views'][alteredView.name]; - alterViews.push( - prepareSingleStoreAlterView(view), - ); - } - } */ - - jsonStatements.push(...jsonSingleStoreCreateTables); - - jsonStatements.push(...jsonDropTables); - jsonStatements.push(...jsonRenameTables); - jsonStatements.push(...jsonRenameColumnsStatements); - - /*jsonStatements.push(...createViews); - jsonStatements.push(...dropViews); - jsonStatements.push(...renameViews); - jsonStatements.push(...alterViews); - */ - jsonStatements.push(...jsonDeletedUniqueConstraints); - - // Will need to drop indexes before changing any columns in table - // Then should go column alternations and then index creation - jsonStatements.push(...jsonDropIndexesForAllAlteredTables); - - jsonStatements.push(...jsonTableAlternations); - jsonStatements.push(...jsonAddedCompositePKs); - - jsonStatements.push(...jsonAddedUniqueConstraints); - jsonStatements.push(...jsonDeletedUniqueConstraints); - - jsonStatements.push(...jsonAddColumnsStatemets); - - jsonStatements.push(...jsonCreateIndexesForCreatedTables); - - jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDropColumnsStatemets); - - jsonStatements.push(...jsonAddedCompositePKs); - - jsonStatements.push(...jsonAlteredUniqueConstraints); - - const combinedJsonStatements = singleStoreCombineStatements(jsonStatements, json2); - const sqlStatements = fromJson(combinedJsonStatements, 'singlestore'); - - const uniqueSqlStatements: string[] = []; - sqlStatements.forEach((ss) => { - if (!uniqueSqlStatements.includes(ss)) { - uniqueSqlStatements.push(ss); - } - }); - - const rTables = renamedTables.map((it) => { - return { from: it.from, to: it.to }; - }); - - const _meta = prepareMigrationMeta([], rTables, rColumns); - - return { - statements: combinedJsonStatements, - sqlStatements: uniqueSqlStatements, - _meta, - }; -}; - -export const applySqliteSnapshotsDiff = async ( - json1: SQLiteSchemaSquashed, - json2: SQLiteSchemaSquashed, - tablesResolver: ( - input: ResolverInput
, - ) => Promise>, - columnsResolver: ( - input: ColumnsResolverInput, - ) => Promise>, - viewsResolver: ( - input: ResolverInput, - ) => Promise>, - prevFull: SQLiteSchema, - curFull: SQLiteSchema, - action?: 'push' | undefined, -): Promise<{ - statements: JsonStatement[]; - sqlStatements: string[]; - _meta: - | { - schemas: {}; - tables: {}; - columns: {}; - } - | undefined; -}> => { - const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); - - const { - created: createdTables, - deleted: deletedTables, - renamed: renamedTables, - } = await tablesResolver({ - created: tablesDiff.added, - deleted: tablesDiff.deleted, - }); - - const tablesPatchedSnap1 = copy(json1); - tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { - const { name } = nameChangeFor(it, renamedTables); - it.name = name; - return [name, it]; - }); - - const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); - - const columnRenames = [] as { - table: string; - renames: { from: Column; to: Column }[]; - }[]; - - const columnCreates = [] as { - table: string; - columns: Column[]; - }[]; - - const columnDeletes = [] as { - table: string; - columns: Column[]; - }[]; - - for (let entry of Object.values(res)) { - const { renamed, created, deleted } = await columnsResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.columns.deleted, - created: entry.columns.added, - }); - - if (created.length > 0) { - columnCreates.push({ - table: entry.name, - columns: created, - }); - } - - if (deleted.length > 0) { - columnDeletes.push({ - table: entry.name, - columns: deleted, - }); - } - - if (renamed.length > 0) { - columnRenames.push({ - table: entry.name, - renames: renamed, - }); - } - } - - const columnRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[it.table] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const columnsPatchedSnap1 = copy(tablesPatchedSnap1); - columnsPatchedSnap1.tables = mapEntries( - columnsPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapKeys( - tableValue.columns, - (columnKey, column) => { - const rens = columnRenamesDict[tableValue.name] || []; - const newName = columnChangeFor(columnKey, rens); - column.name = newName; - return newName; - }, - ); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - const viewsDiff = diffSchemasOrTables(json1.views, json2.views); - - const { - created: createdViews, - deleted: deletedViews, - renamed: renamedViews, // renamed or moved - } = await viewsResolver({ - created: viewsDiff.added, - deleted: viewsDiff.deleted, - }); - - const renamesViewDic: Record = {}; - renamedViews.forEach((it) => { - renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; - }); - - const viewsPatchedSnap1 = copy(columnsPatchedSnap1); - viewsPatchedSnap1.views = mapEntries( - viewsPatchedSnap1.views, - (viewKey, viewValue) => { - const rename = renamesViewDic[viewValue.name]; - - if (rename) { - viewValue.name = rename.to; - } - - return [viewKey, viewValue]; - }, - ); - - const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); - - const typedResult = diffResultSchemeSQLite.parse(diffResult); - - // Map array of objects to map - const tablesMap: { - [key: string]: (typeof typedResult.alteredTablesWithColumns)[number]; - } = {}; - - typedResult.alteredTablesWithColumns.forEach((obj) => { - tablesMap[obj.name] = obj; - }); - - const jsonCreateTables = createdTables.map((it) => { - return prepareSQLiteCreateTable(it, action); - }); - - const jsonCreateIndexesForCreatedTables = createdTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.indexes, - curFull.internal, - ); - }) - .flat(); - - const jsonDropTables = deletedTables.map((it) => { - return prepareDropTableJson(it); - }); - - const jsonRenameTables = renamedTables.map((it) => { - return prepareRenameTableJson(it.from, it.to); - }); - - const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames - .map((it) => prepareRenameColumns(it.table, '', it.renames)) - .flat(); - - const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes - .map((it) => _prepareDropColumns(it.table, '', it.columns)) - .flat(); - - const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates - .map((it) => { - return _prepareSqliteAddColumns( - it.table, - it.columns, - tablesMap[it.table] && tablesMap[it.table].addedForeignKeys - ? Object.values(tablesMap[it.table].addedForeignKeys) - : [], - ); - }) - .flat(); - - const allAltered = typedResult.alteredTablesWithColumns; - - const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; - const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; - const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; - - const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; - - allAltered.forEach((it) => { - // This part is needed to make sure that same columns in a table are not triggered for change - // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name - // We double-check that pk with same set of columns are both in added and deleted diffs - let addedColumns: string[] = []; - for (const addedPkName of Object.keys(it.addedCompositePKs)) { - const addedPkColumns = it.addedCompositePKs[addedPkName]; - addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); - } - - let deletedColumns: string[] = []; - for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { - const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; - deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); - } - - // Don't need to sort, but need to add tests for it - // addedColumns.sort(); - // deletedColumns.sort(); - - const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); - - let addedCompositePKs: JsonCreateCompositePK[] = []; - let deletedCompositePKs: JsonDeleteCompositePK[] = []; - let alteredCompositePKs: JsonAlterCompositePK[] = []; - if (doPerformDeleteAndCreate) { - addedCompositePKs = prepareAddCompositePrimaryKeySqlite( - it.name, - it.addedCompositePKs, - ); - deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite( - it.name, - it.deletedCompositePKs, - ); - } - alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite( - it.name, - it.alteredCompositePKs, - ); - - // add logic for unique constraints - let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - let createdCheckConstraints: JsonCreateCheckConstraint[] = []; - let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); - deletedCheckConstraints = prepareDeleteCheckConstraint( - it.name, - it.schema, - it.deletedCheckConstraints, - ); - - // skip for push - if (it.alteredCheckConstraints && action !== 'push') { - const added: Record = {}; - const deleted: Record = {}; - - for (const k of Object.keys(it.alteredCheckConstraints)) { - added[k] = it.alteredCheckConstraints[k].__new; - deleted[k] = it.alteredCheckConstraints[k].__old; - } - createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); - deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); - } - - jsonAddedCompositePKs.push(...addedCompositePKs); - jsonDeletedCompositePKs.push(...deletedCompositePKs); - jsonAlteredCompositePKs.push(...alteredCompositePKs); - - jsonAddedUniqueConstraints.push(...addedUniqueConstraints); - jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); - jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); - - jsonCreatedCheckConstraints.push(...createdCheckConstraints); - jsonDeletedCheckConstraints.push(...deletedCheckConstraints); - }); - - const rColumns = jsonRenameColumnsStatements.map((it) => { - const tableName = it.tableName; - const schema = it.schema; - return { - from: { schema, table: tableName, column: it.oldColumnName }, - to: { schema, table: tableName, column: it.newColumnName }, - }; - }); - - const jsonTableAlternations = allAltered - .map((it) => { - return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2); - }) - .flat(); - - const jsonCreateIndexesForAllAlteredTables = allAltered - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.addedIndexes || {}, - curFull.internal, - ); - }) - .flat(); - - const jsonDropIndexesForAllAlteredTables = allAltered - .map((it) => { - return prepareDropIndexesJson( - it.name, - it.schema, - it.deletedIndexes || {}, - ); - }) - .flat(); - - allAltered.forEach((it) => { - const droppedIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__old; - return current; - }, - {} as Record, - ); - const createdIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__new; - return current; - }, - {} as Record, - ); - - jsonCreateIndexesForAllAlteredTables.push( - ...prepareCreateIndexesJson( - it.name, - it.schema, - createdIndexes || {}, - curFull.internal, - ), - ); - jsonDropIndexesForAllAlteredTables.push( - ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), - ); - }); - - const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered - .map((it) => { - const forAdded = prepareCreateReferencesJson( - it.name, - it.schema, - it.addedForeignKeys, - ); - - const forAltered = prepareDropReferencesJson( - it.name, - it.schema, - it.deletedForeignKeys, - ); - - const alteredFKs = prepareAlterReferencesJson( - it.name, - it.schema, - it.alteredForeignKeys, - ); - - return [...forAdded, ...forAltered, ...alteredFKs]; - }) - .flat(); - - const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'create_reference', - ); - const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'delete_reference', - ); - - const createViews: JsonCreateSqliteViewStatement[] = []; - const dropViews: JsonDropViewStatement[] = []; - - createViews.push( - ...createdViews.filter((it) => !it.isExisting).map((it) => { - return prepareSqliteCreateViewJson( - it.name, - it.definition!, - ); - }), - ); - - dropViews.push( - ...deletedViews.filter((it) => !it.isExisting).map((it) => { - return prepareDropViewJson(it.name); - }), - ); - - dropViews.push( - ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { - return prepareDropViewJson(it.from.name); - }), - ); - createViews.push( - ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { - return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); - }), - ); - - const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); - - for (const alteredView of alteredViews) { - const { definition } = json2.views[alteredView.name]; - - if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { - dropViews.push(prepareDropViewJson(alteredView.name)); - - createViews.push( - prepareSqliteCreateViewJson( - alteredView.name, - definition!, - ), - ); - } - } - - const jsonStatements: JsonStatement[] = []; - jsonStatements.push(...jsonCreateTables); - - jsonStatements.push(...jsonDropTables); - jsonStatements.push(...jsonRenameTables); - jsonStatements.push(...jsonRenameColumnsStatements); - - jsonStatements.push(...jsonDroppedReferencesForAlteredTables); - jsonStatements.push(...jsonDeletedCheckConstraints); - - // Will need to drop indexes before changing any columns in table - // Then should go column alternations and then index creation - jsonStatements.push(...jsonDropIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDeletedCompositePKs); - jsonStatements.push(...jsonTableAlternations); - jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAddColumnsStatemets); - - jsonStatements.push(...jsonCreateIndexesForCreatedTables); - jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); - - jsonStatements.push(...jsonCreatedCheckConstraints); - - jsonStatements.push(...jsonCreatedReferencesForAlteredTables); - - jsonStatements.push(...jsonDropColumnsStatemets); - - // jsonStatements.push(...jsonDeletedCompositePKs); - // jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAlteredCompositePKs); - - jsonStatements.push(...jsonAlteredUniqueConstraints); - - jsonStatements.push(...dropViews); - jsonStatements.push(...createViews); - - const combinedJsonStatements = sqliteCombineStatements(jsonStatements, json2, action); - const sqlStatements = fromJson(combinedJsonStatements, 'sqlite'); - - const uniqueSqlStatements: string[] = []; - sqlStatements.forEach((ss) => { - if (!uniqueSqlStatements.includes(ss)) { - uniqueSqlStatements.push(ss); - } - }); - - const rTables = renamedTables.map((it) => { - return { from: it.from, to: it.to }; - }); - - const _meta = prepareMigrationMeta([], rTables, rColumns); - - return { - statements: combinedJsonStatements, - sqlStatements: uniqueSqlStatements, - _meta, - }; -}; - -export const applyLibSQLSnapshotsDiff = async ( - json1: SQLiteSchemaSquashed, - json2: SQLiteSchemaSquashed, - tablesResolver: ( - input: ResolverInput
, - ) => Promise>, - columnsResolver: ( - input: ColumnsResolverInput, - ) => Promise>, - viewsResolver: ( - input: ResolverInput, - ) => Promise>, - prevFull: SQLiteSchema, - curFull: SQLiteSchema, - action?: 'push', -): Promise<{ - statements: JsonStatement[]; - sqlStatements: string[]; - _meta: - | { - schemas: {}; - tables: {}; - columns: {}; - } - | undefined; -}> => { - const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); - const { - created: createdTables, - deleted: deletedTables, - renamed: renamedTables, - } = await tablesResolver({ - created: tablesDiff.added, - deleted: tablesDiff.deleted, - }); - - const tablesPatchedSnap1 = copy(json1); - tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { - const { name } = nameChangeFor(it, renamedTables); - it.name = name; - return [name, it]; - }); - - const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); - - const columnRenames = [] as { - table: string; - renames: { from: Column; to: Column }[]; - }[]; - - const columnCreates = [] as { - table: string; - columns: Column[]; - }[]; - - const columnDeletes = [] as { - table: string; - columns: Column[]; - }[]; - - for (let entry of Object.values(res)) { - const { renamed, created, deleted } = await columnsResolver({ - tableName: entry.name, - schema: entry.schema, - deleted: entry.columns.deleted, - created: entry.columns.added, - }); - - if (created.length > 0) { - columnCreates.push({ - table: entry.name, - columns: created, - }); - } - - if (deleted.length > 0) { - columnDeletes.push({ - table: entry.name, - columns: deleted, - }); - } - - if (renamed.length > 0) { - columnRenames.push({ - table: entry.name, - renames: renamed, - }); - } - } - - const columnRenamesDict = columnRenames.reduce( - (acc, it) => { - acc[it.table] = it.renames; - return acc; - }, - {} as Record< - string, - { - from: Named; - to: Named; - }[] - >, - ); - - const columnsPatchedSnap1 = copy(tablesPatchedSnap1); - columnsPatchedSnap1.tables = mapEntries( - columnsPatchedSnap1.tables, - (tableKey, tableValue) => { - const patchedColumns = mapKeys( - tableValue.columns, - (columnKey, column) => { - const rens = columnRenamesDict[tableValue.name] || []; - const newName = columnChangeFor(columnKey, rens); - column.name = newName; - return newName; - }, - ); - - tableValue.columns = patchedColumns; - return [tableKey, tableValue]; - }, - ); - - const viewsDiff = diffSchemasOrTables(json1.views, json2.views); - - const { - created: createdViews, - deleted: deletedViews, - renamed: renamedViews, // renamed or moved - } = await viewsResolver({ - created: viewsDiff.added, - deleted: viewsDiff.deleted, - }); - - const renamesViewDic: Record = {}; - renamedViews.forEach((it) => { - renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; - }); - - const viewsPatchedSnap1 = copy(columnsPatchedSnap1); - viewsPatchedSnap1.views = mapEntries( - viewsPatchedSnap1.views, - (viewKey, viewValue) => { - const rename = renamesViewDic[viewValue.name]; - - if (rename) { - viewValue.name = rename.to; - } - - return [viewKey, viewValue]; - }, - ); - - const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); - - const typedResult = diffResultSchemeSQLite.parse(diffResult); - - // Map array of objects to map - const tablesMap: { - [key: string]: (typeof typedResult.alteredTablesWithColumns)[number]; - } = {}; - - typedResult.alteredTablesWithColumns.forEach((obj) => { - tablesMap[obj.name] = obj; - }); - - const jsonCreateTables = createdTables.map((it) => { - return prepareSQLiteCreateTable(it, action); - }); - - const jsonCreateIndexesForCreatedTables = createdTables - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.indexes, - curFull.internal, - ); - }) - .flat(); - - const jsonDropTables = deletedTables.map((it) => { - return prepareDropTableJson(it); - }); - - const jsonRenameTables = renamedTables.map((it) => { - return prepareRenameTableJson(it.from, it.to); - }); - - const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames - .map((it) => prepareRenameColumns(it.table, '', it.renames)) - .flat(); - - const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes - .map((it) => _prepareDropColumns(it.table, '', it.columns)) - .flat(); - - const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates - .map((it) => { - return _prepareSqliteAddColumns( - it.table, - it.columns, - tablesMap[it.table] && tablesMap[it.table].addedForeignKeys - ? Object.values(tablesMap[it.table].addedForeignKeys) - : [], - ); - }) - .flat(); - - const rColumns = jsonRenameColumnsStatements.map((it) => { - const tableName = it.tableName; - const schema = it.schema; - return { - from: { schema, table: tableName, column: it.oldColumnName }, - to: { schema, table: tableName, column: it.newColumnName }, - }; - }); - - const rTables = renamedTables.map((it) => { - return { from: it.from, to: it.to }; - }); - - const _meta = prepareMigrationMeta([], rTables, rColumns); - - const allAltered = typedResult.alteredTablesWithColumns; - - const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; - const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; - const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; - - const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; - - allAltered.forEach((it) => { - // This part is needed to make sure that same columns in a table are not triggered for change - // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name - // We double-check that pk with same set of columns are both in added and deleted diffs - let addedColumns: string[] = []; - for (const addedPkName of Object.keys(it.addedCompositePKs)) { - const addedPkColumns = it.addedCompositePKs[addedPkName]; - addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); - } - - let deletedColumns: string[] = []; - for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { - const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; - deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); - } - - // Don't need to sort, but need to add tests for it - // addedColumns.sort(); - // deletedColumns.sort(); - - const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); - - let addedCompositePKs: JsonCreateCompositePK[] = []; - let deletedCompositePKs: JsonDeleteCompositePK[] = []; - let alteredCompositePKs: JsonAlterCompositePK[] = []; - if (doPerformDeleteAndCreate) { - addedCompositePKs = prepareAddCompositePrimaryKeySqlite( - it.name, - it.addedCompositePKs, - ); - deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite( - it.name, - it.deletedCompositePKs, - ); - } - alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite( - it.name, - it.alteredCompositePKs, - ); - - // add logic for unique constraints - let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; - let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; - let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; - - let createdCheckConstraints: JsonCreateCheckConstraint[] = []; - let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; - - addedUniqueConstraints = prepareAddUniqueConstraint( - it.name, - it.schema, - it.addedUniqueConstraints, - ); - - deletedUniqueConstraints = prepareDeleteUniqueConstraint( - it.name, - it.schema, - it.deletedUniqueConstraints, - ); - if (it.alteredUniqueConstraints) { - const added: Record = {}; - const deleted: Record = {}; - for (const k of Object.keys(it.alteredUniqueConstraints)) { - added[k] = it.alteredUniqueConstraints[k].__new; - deleted[k] = it.alteredUniqueConstraints[k].__old; - } - addedUniqueConstraints.push( - ...prepareAddUniqueConstraint(it.name, it.schema, added), - ); - deletedUniqueConstraints.push( - ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), - ); - } - - createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); - deletedCheckConstraints = prepareDeleteCheckConstraint( - it.name, - it.schema, - it.deletedCheckConstraints, - ); - - // skip for push - if (it.alteredCheckConstraints && action !== 'push') { - const added: Record = {}; - const deleted: Record = {}; - - for (const k of Object.keys(it.alteredCheckConstraints)) { - added[k] = it.alteredCheckConstraints[k].__new; - deleted[k] = it.alteredCheckConstraints[k].__old; - } - createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); - deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); - } - - jsonAddedCompositePKs.push(...addedCompositePKs); - jsonDeletedCompositePKs.push(...deletedCompositePKs); - jsonAlteredCompositePKs.push(...alteredCompositePKs); - - jsonAddedUniqueConstraints.push(...addedUniqueConstraints); - jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); - jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); - - jsonCreatedCheckConstraints.push(...createdCheckConstraints); - jsonDeletedCheckConstraints.push(...deletedCheckConstraints); - }); - - const jsonTableAlternations = allAltered - .map((it) => { - return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2); - }) - .flat(); - - const jsonCreateIndexesForAllAlteredTables = allAltered - .map((it) => { - return prepareCreateIndexesJson( - it.name, - it.schema, - it.addedIndexes || {}, - curFull.internal, - ); - }) - .flat(); - - const jsonDropIndexesForAllAlteredTables = allAltered - .map((it) => { - return prepareDropIndexesJson( - it.name, - it.schema, - it.deletedIndexes || {}, - ); - }) - .flat(); - - allAltered.forEach((it) => { - const droppedIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__old; - return current; - }, - {} as Record, - ); - const createdIndexes = Object.keys(it.alteredIndexes).reduce( - (current, item: string) => { - current[item] = it.alteredIndexes[item].__new; - return current; - }, - {} as Record, - ); - - jsonCreateIndexesForAllAlteredTables.push( - ...prepareCreateIndexesJson( - it.name, - it.schema, - createdIndexes || {}, - curFull.internal, - ), - ); - jsonDropIndexesForAllAlteredTables.push( - ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), - ); - }); - - const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered - .map((it) => { - const forAdded = prepareLibSQLCreateReferencesJson( - it.name, - it.schema, - it.addedForeignKeys, - json2, - action, - ); - - const forAltered = prepareLibSQLDropReferencesJson( - it.name, - it.schema, - it.deletedForeignKeys, - json2, - _meta, - action, - ); - - const alteredFKs = prepareAlterReferencesJson(it.name, it.schema, it.alteredForeignKeys); - - return [...forAdded, ...forAltered, ...alteredFKs]; - }) - .flat(); - - const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'create_reference', - ); - const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( - (t) => t.type === 'delete_reference', - ); - - const createViews: JsonCreateSqliteViewStatement[] = []; - const dropViews: JsonDropViewStatement[] = []; - - createViews.push( - ...createdViews.filter((it) => !it.isExisting).map((it) => { - return prepareSqliteCreateViewJson( - it.name, - it.definition!, - ); - }), - ); - - dropViews.push( - ...deletedViews.filter((it) => !it.isExisting).map((it) => { - return prepareDropViewJson(it.name); - }), - ); - - // renames - dropViews.push( - ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { - return prepareDropViewJson(it.from.name); - }), - ); - createViews.push( - ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { - return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); - }), - ); - - const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); - - for (const alteredView of alteredViews) { - const { definition } = json2.views[alteredView.name]; - - if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { - dropViews.push(prepareDropViewJson(alteredView.name)); - - createViews.push( - prepareSqliteCreateViewJson( - alteredView.name, - definition!, - ), - ); - } - } - - const jsonStatements: JsonStatement[] = []; - jsonStatements.push(...jsonCreateTables); - - jsonStatements.push(...jsonDropTables); - jsonStatements.push(...jsonRenameTables); - jsonStatements.push(...jsonRenameColumnsStatements); - - jsonStatements.push(...jsonDroppedReferencesForAlteredTables); - - jsonStatements.push(...jsonDeletedCheckConstraints); - - // Will need to drop indexes before changing any columns in table - // Then should go column alternations and then index creation - jsonStatements.push(...jsonDropIndexesForAllAlteredTables); - - jsonStatements.push(...jsonDeletedCompositePKs); - jsonStatements.push(...jsonTableAlternations); - jsonStatements.push(...jsonAddedCompositePKs); - jsonStatements.push(...jsonAddColumnsStatemets); - - jsonStatements.push(...jsonCreateIndexesForCreatedTables); - jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); - jsonStatements.push(...jsonCreatedCheckConstraints); - - jsonStatements.push(...dropViews); - jsonStatements.push(...createViews); - - jsonStatements.push(...jsonCreatedReferencesForAlteredTables); - - jsonStatements.push(...jsonDropColumnsStatemets); - - jsonStatements.push(...jsonAlteredCompositePKs); - - jsonStatements.push(...jsonAlteredUniqueConstraints); - - const combinedJsonStatements = libSQLCombineStatements(jsonStatements, json2, action); - - const sqlStatements = fromJson( - combinedJsonStatements, - 'turso', - action, - json2, - ); - - const uniqueSqlStatements: string[] = []; - sqlStatements.forEach((ss) => { - if (!uniqueSqlStatements.includes(ss)) { - uniqueSqlStatements.push(ss); - } - }); - - return { - statements: combinedJsonStatements, - sqlStatements: uniqueSqlStatements, - _meta, - }; -}; - -// explicitely ask if tables were renamed, if yes - add those to altered tables, otherwise - deleted -// double check if user wants to delete particular table and warn him on data loss diff --git a/drizzle-kit/src/statementCombiner.ts b/drizzle-kit/src/statementCombiner.ts deleted file mode 100644 index 7d84a2aa84..0000000000 --- a/drizzle-kit/src/statementCombiner.ts +++ /dev/null @@ -1,597 +0,0 @@ -import { - JsonCreateIndexStatement, - JsonRecreateTableStatement, - JsonStatement, - prepareCreateIndexesJson, -} from './jsonStatements'; -import { SingleStoreSchemaSquashed } from './serializer/singlestoreSchema'; -import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; - -export const prepareLibSQLRecreateTable = ( - table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], - action?: 'push', -): (JsonRecreateTableStatement | JsonCreateIndexStatement)[] => { - const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; - - const composites: string[][] = Object.values(table.compositePrimaryKeys).map( - (it) => SQLiteSquasher.unsquashPK(it), - ); - - const references: string[] = Object.values(table.foreignKeys); - const fks = references.map((it) => - action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it) - ); - - const statements: (JsonRecreateTableStatement | JsonCreateIndexStatement)[] = [ - { - type: 'recreate_table', - tableName: name, - columns: Object.values(columns), - compositePKs: composites, - referenceData: fks, - uniqueConstraints: Object.values(uniqueConstraints), - checkConstraints: Object.values(checkConstraints), - }, - ]; - - if (Object.keys(indexes).length) { - statements.push(...prepareCreateIndexesJson(name, '', indexes)); - } - return statements; -}; - -export const prepareSQLiteRecreateTable = ( - table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], - action?: 'push', -): JsonStatement[] => { - const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; - - const composites: string[][] = Object.values(table.compositePrimaryKeys).map( - (it) => SQLiteSquasher.unsquashPK(it), - ); - - const references: string[] = Object.values(table.foreignKeys); - const fks = references.map((it) => - action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it) - ); - - const statements: JsonStatement[] = [ - { - type: 'recreate_table', - tableName: name, - columns: Object.values(columns), - compositePKs: composites, - referenceData: fks, - uniqueConstraints: Object.values(uniqueConstraints), - checkConstraints: Object.values(checkConstraints), - }, - ]; - - if (Object.keys(indexes).length) { - statements.push(...prepareCreateIndexesJson(name, '', indexes)); - } - return statements; -}; - -export const libSQLCombineStatements = ( - statements: JsonStatement[], - json2: SQLiteSchemaSquashed, - action?: 'push', -) => { - // const tablesContext: Record = {}; - const newStatements: Record = {}; - for (const statement of statements) { - if ( - statement.type === 'alter_table_alter_column_drop_autoincrement' - || statement.type === 'alter_table_alter_column_set_autoincrement' - || statement.type === 'alter_table_alter_column_drop_pk' - || statement.type === 'alter_table_alter_column_set_pk' - || statement.type === 'create_composite_pk' - || statement.type === 'alter_composite_pk' - || statement.type === 'delete_composite_pk' - || statement.type === 'create_check_constraint' - || statement.type === 'delete_check_constraint' - ) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if ( - statement.type === 'alter_table_alter_column_set_type' - || statement.type === 'alter_table_alter_column_drop_notnull' - || statement.type === 'alter_table_alter_column_set_notnull' - || statement.type === 'alter_table_alter_column_set_default' - || statement.type === 'alter_table_alter_column_drop_default' - ) { - const { tableName, columnName, columnPk } = statement; - - const columnIsPartOfForeignKey = Object.values( - json2.tables[tableName].foreignKeys, - ).some((it) => { - const unsquashFk = action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it); - - return ( - unsquashFk.columnsFrom.includes(columnName) - ); - }); - - const statementsForTable = newStatements[tableName]; - - if ( - !statementsForTable && (columnIsPartOfForeignKey || columnPk) - ) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); - continue; - } - - if ( - statementsForTable && (columnIsPartOfForeignKey || columnPk) - ) { - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - } - continue; - } - if ( - statementsForTable && !(columnIsPartOfForeignKey || columnPk) - ) { - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - newStatements[tableName].push(statement); - } - continue; - } - - newStatements[tableName] = [statement]; - - continue; - } - - if (statement.type === 'create_reference') { - const tableName = statement.tableName; - - const data = action === 'push' - ? SQLiteSquasher.unsquashPushFK(statement.data) - : SQLiteSquasher.unsquashFK(statement.data); - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = statement.isMulticolumn - ? prepareLibSQLRecreateTable(json2.tables[tableName], action) - : [statement]; - - continue; - } - - // if add column with reference -> skip create_reference statement - if ( - !statement.isMulticolumn - && statementsForTable.some((st) => - st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0] - ) - ) { - continue; - } - - if (statement.isMulticolumn) { - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - newStatements[tableName].push(statement); - } - - continue; - } - - if (statement.type === 'delete_reference') { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - const tableName = statement.type === 'rename_table' - ? statement.tableNameTo - : (statement as { tableName: string }).tableName; - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = [statement]; - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - newStatements[tableName].push(statement); - } - } - - const combinedStatements = Object.values(newStatements).flat(); - const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); - const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); - - const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); - - return [...renamedTables, ...renamedColumns, ...rest]; -}; - -export const sqliteCombineStatements = ( - statements: JsonStatement[], - json2: SQLiteSchemaSquashed, - action?: 'push', -) => { - // const tablesContext: Record = {}; - const newStatements: Record = {}; - for (const statement of statements) { - if ( - statement.type === 'alter_table_alter_column_set_type' - || statement.type === 'alter_table_alter_column_set_default' - || statement.type === 'alter_table_alter_column_drop_default' - || statement.type === 'alter_table_alter_column_set_notnull' - || statement.type === 'alter_table_alter_column_drop_notnull' - || statement.type === 'alter_table_alter_column_drop_autoincrement' - || statement.type === 'alter_table_alter_column_set_autoincrement' - || statement.type === 'alter_table_alter_column_drop_pk' - || statement.type === 'alter_table_alter_column_set_pk' - || statement.type === 'delete_reference' - || statement.type === 'alter_reference' - || statement.type === 'create_composite_pk' - || statement.type === 'alter_composite_pk' - || statement.type === 'delete_composite_pk' - || statement.type === 'create_unique_constraint' - || statement.type === 'delete_unique_constraint' - || statement.type === 'create_check_constraint' - || statement.type === 'delete_check_constraint' - ) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if (statement.type === 'create_reference') { - const tableName = statement.tableName; - - const data = action === 'push' - ? SQLiteSquasher.unsquashPushFK(statement.data) - : SQLiteSquasher.unsquashFK(statement.data); - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); - continue; - } - - // if add column with reference -> skip create_reference statement - if ( - data.columnsFrom.length === 1 - && statementsForTable.some((st) => - st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0] - ) - ) { - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - const tableName = statement.type === 'rename_table' - ? statement.tableNameTo - : (statement as { tableName: string }).tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = [statement]; - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - newStatements[tableName].push(statement); - } - } - - const combinedStatements = Object.values(newStatements).flat(); - - const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); - const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); - - const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); - - return [...renamedTables, ...renamedColumns, ...rest]; -}; - -export const prepareSingleStoreRecreateTable = ( - table: SingleStoreSchemaSquashed['tables'][keyof SingleStoreSchemaSquashed['tables']], -): JsonStatement[] => { - const { name, columns, uniqueConstraints, indexes, compositePrimaryKeys } = table; - - const composites: string[] = Object.values(compositePrimaryKeys); - - const statements: JsonStatement[] = [ - { - type: 'singlestore_recreate_table', - tableName: name, - columns: Object.values(columns), - compositePKs: composites, - uniqueConstraints: Object.values(uniqueConstraints), - }, - ]; - - if (Object.keys(indexes).length) { - statements.push(...prepareCreateIndexesJson(name, '', indexes)); - } - return statements; -}; - -export const singleStoreCombineStatements = ( - statements: JsonStatement[], - json2: SingleStoreSchemaSquashed, -) => { - const newStatements: Record = {}; - - for (const statement of statements) { - if ( - statement.type === 'alter_table_alter_column_set_type' - || statement.type === 'alter_table_alter_column_set_notnull' - || statement.type === 'alter_table_alter_column_drop_notnull' - || statement.type === 'alter_table_alter_column_drop_autoincrement' - || statement.type === 'alter_table_alter_column_set_autoincrement' - || statement.type === 'alter_table_alter_column_drop_pk' - || statement.type === 'alter_table_alter_column_set_pk' - || statement.type === 'create_composite_pk' - || statement.type === 'alter_composite_pk' - || statement.type === 'delete_composite_pk' - ) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => - type === 'rename_table' || type === 'alter_table_rename_column' - ); - const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if ( - (statement.type === 'alter_table_alter_column_drop_default' - || statement.type === 'alter_table_alter_column_set_default') && statement.columnNotNull - ) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - if (statement.type === 'alter_table_add_column' && statement.column.primaryKey) { - const tableName = statement.tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { - const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); - const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); - - if (wasRename) { - newStatements[tableName].push(...preparedStatements); - } else { - newStatements[tableName] = preparedStatements; - } - - continue; - } - - continue; - } - - const tableName = statement.type === 'rename_table' - ? statement.tableNameTo - : (statement as { tableName: string }).tableName; - - const statementsForTable = newStatements[tableName]; - - if (!statementsForTable) { - newStatements[tableName] = [statement]; - continue; - } - - if (!statementsForTable.some(({ type }) => type === 'singlestore_recreate_table')) { - newStatements[tableName].push(statement); - } - } - - const combinedStatements = Object.values(newStatements).flat(); - - const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); - const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); - - const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); - - return [...renamedTables, ...renamedColumns, ...rest]; -}; diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts deleted file mode 100644 index ce34341399..0000000000 --- a/drizzle-kit/src/utils.ts +++ /dev/null @@ -1,374 +0,0 @@ -import type { RunResult } from 'better-sqlite3'; -import chalk from 'chalk'; -import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs'; -import { join } from 'path'; -import { parse } from 'url'; -import type { NamedWithSchema } from './cli/commands/migrate'; -import { info } from './cli/views'; -import { assertUnreachable, snapshotVersion } from './global'; -import type { Dialect } from './schemaValidator'; -import { backwardCompatibleGelSchema } from './serializer/gelSchema'; -import { backwardCompatibleMysqlSchema } from './serializer/mysqlSchema'; -import { backwardCompatiblePgSchema } from './serializer/pgSchema'; -import { backwardCompatibleSingleStoreSchema } from './serializer/singlestoreSchema'; -import { backwardCompatibleSqliteSchema } from './serializer/sqliteSchema'; -import type { ProxyParams } from './serializer/studio'; - -export type Proxy = (params: ProxyParams) => Promise; - -export type TransactionProxy = (queries: { sql: string; method?: ProxyParams['method'] }[]) => Promise; - -export type DB = { - query: (sql: string, params?: any[]) => Promise; -}; - -export type SQLiteDB = { - query: (sql: string, params?: any[]) => Promise; - run(query: string): Promise; -}; - -export type LibSQLDB = { - query: (sql: string, params?: any[]) => Promise; - run(query: string): Promise; - batchWithPragma?(queries: string[]): Promise; -}; - -export const copy = (it: T): T => { - return JSON.parse(JSON.stringify(it)); -}; - -export const objectValues = (obj: T): Array => { - return Object.values(obj); -}; - -export const assertV1OutFolder = (out: string) => { - if (!existsSync(out)) return; - - const oldMigrationFolders = readdirSync(out).filter( - (it) => it.length === 14 && /^\d+$/.test(it), - ); - - if (oldMigrationFolders.length > 0) { - console.log( - `Your migrations folder format is outdated, please run ${ - chalk.green.bold( - `drizzle-kit up`, - ) - }`, - ); - process.exit(1); - } -}; - -export type Journal = { - version: string; - dialect: Dialect; - entries: { - idx: number; - version: string; - when: number; - tag: string; - breakpoints: boolean; - }[]; -}; - -export const dryJournal = (dialect: Dialect): Journal => { - return { - version: snapshotVersion, - dialect, - entries: [], - }; -}; - -// export const preparePushFolder = (dialect: Dialect) => { -// const out = ".drizzle"; -// let snapshot: string = ""; -// if (!existsSync(join(out))) { -// mkdirSync(out); -// snapshot = JSON.stringify(dryJournal(dialect)); -// } else { -// snapshot = readdirSync(out)[0]; -// } - -// return { snapshot }; -// }; - -export const prepareOutFolder = (out: string, dialect: Dialect) => { - const meta = join(out, 'meta'); - const journalPath = join(meta, '_journal.json'); - - if (!existsSync(join(out, 'meta'))) { - mkdirSync(meta, { recursive: true }); - writeFileSync(journalPath, JSON.stringify(dryJournal(dialect))); - } - - const journal = JSON.parse(readFileSync(journalPath).toString()); - - const snapshots = readdirSync(meta) - .filter((it) => !it.startsWith('_')) - .map((it) => join(meta, it)); - - snapshots.sort(); - return { meta, snapshots, journal }; -}; - -const validatorForDialect = (dialect: Dialect) => { - switch (dialect) { - case 'postgresql': - return { validator: backwardCompatiblePgSchema, version: 7 }; - case 'sqlite': - return { validator: backwardCompatibleSqliteSchema, version: 6 }; - case 'turso': - return { validator: backwardCompatibleSqliteSchema, version: 6 }; - case 'mysql': - return { validator: backwardCompatibleMysqlSchema, version: 5 }; - case 'singlestore': - return { validator: backwardCompatibleSingleStoreSchema, version: 1 }; - case 'gel': - return { validator: backwardCompatibleGelSchema, version: 1 }; - } -}; - -export const validateWithReport = (snapshots: string[], dialect: Dialect) => { - // ✅ check if drizzle-kit can handle snapshot version - // ✅ check if snapshot is of the last version - // ✅ check if id of the snapshot is valid - // ✅ collect {} of prev id -> snapshotName[], if there's more than one - tell about collision - const { validator, version } = validatorForDialect(dialect); - - const result = snapshots.reduce( - (accum, it) => { - const raw = JSON.parse(readFileSync(`./${it}`).toString()); - - accum.rawMap[it] = raw; - - if (raw['version'] && Number(raw['version']) > version) { - console.log( - info( - `${it} snapshot is of unsupported version, please update drizzle-kit`, - ), - ); - process.exit(0); - } - - const result = validator.safeParse(raw); - if (!result.success) { - accum.malformed.push(it); - return accum; - } - - const snapshot = result.data; - if (snapshot.version !== String(version)) { - accum.nonLatest.push(it); - return accum; - } - - // only if latest version here - const idEntry = accum.idsMap[snapshot['prevId']] ?? { - parent: it, - snapshots: [], - }; - idEntry.snapshots.push(it); - accum.idsMap[snapshot['prevId']] = idEntry; - - return accum; - }, - { - malformed: [], - nonLatest: [], - idToNameMap: {}, - idsMap: {}, - rawMap: {}, - } as { - malformed: string[]; - nonLatest: string[]; - idsMap: Record; - rawMap: Record; - }, - ); - - return result; -}; - -export const prepareMigrationFolder = ( - outFolder: string = 'drizzle', - dialect: Dialect, -) => { - const { snapshots, journal } = prepareOutFolder(outFolder, dialect); - const report = validateWithReport(snapshots, dialect); - if (report.nonLatest.length > 0) { - console.log( - report.nonLatest - .map((it) => { - return `${it}/snapshot.json is not of the latest version`; - }) - .concat(`Run ${chalk.green.bold(`drizzle-kit up`)}`) - .join('\n'), - ); - process.exit(0); - } - - if (report.malformed.length) { - const message = report.malformed - .map((it) => { - return `${it} data is malformed`; - }) - .join('\n'); - console.log(message); - } - - const collisionEntries = Object.entries(report.idsMap).filter( - (it) => it[1].snapshots.length > 1, - ); - - const message = collisionEntries - .map((it) => { - const data = it[1]; - return `[${ - data.snapshots.join( - ', ', - ) - }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; - }) - .join('\n') - .trim(); - if (message) { - console.log(chalk.red.bold('Error:'), message); - } - - const abort = report.malformed.length!! || collisionEntries.length > 0; - - if (abort) { - process.exit(0); - } - - return { snapshots, journal }; -}; - -export const prepareMigrationMeta = ( - schemas: { from: string; to: string }[], - tables: { from: NamedWithSchema; to: NamedWithSchema }[], - columns: { - from: { table: string; schema: string; column: string }; - to: { table: string; schema: string; column: string }; - }[], -) => { - const _meta = { - schemas: {} as Record, - tables: {} as Record, - columns: {} as Record, - }; - - schemas.forEach((it) => { - const from = schemaRenameKey(it.from); - const to = schemaRenameKey(it.to); - _meta.schemas[from] = to; - }); - tables.forEach((it) => { - const from = tableRenameKey(it.from); - const to = tableRenameKey(it.to); - _meta.tables[from] = to; - }); - - columns.forEach((it) => { - const from = columnRenameKey(it.from.table, it.from.schema, it.from.column); - const to = columnRenameKey(it.to.table, it.to.schema, it.to.column); - _meta.columns[from] = to; - }); - - return _meta; -}; - -export const schemaRenameKey = (it: string) => { - return it; -}; - -export const tableRenameKey = (it: NamedWithSchema) => { - const out = it.schema ? `"${it.schema}"."${it.name}"` : `"${it.name}"`; - return out; -}; - -export const columnRenameKey = ( - table: string, - schema: string, - column: string, -) => { - const out = schema - ? `"${schema}"."${table}"."${column}"` - : `"${table}"."${column}"`; - return out; -}; - -export const kloudMeta = () => { - return { - pg: [5], - mysql: [] as number[], - sqlite: [] as number[], - }; -}; - -export const normaliseSQLiteUrl = ( - it: string, - type: 'libsql' | 'better-sqlite' | '@tursodatabase/database' | 'bun', -) => { - if (type === 'libsql') { - if (it.startsWith('file:')) { - return it; - } - try { - const url = parse(it); - if (url.protocol === null) { - return `file:${it}`; - } - return it; - } catch (e) { - return `file:${it}`; - } - } - - if (type === 'better-sqlite' || type === '@tursodatabase/database' || type === 'bun') { - if (it.startsWith('file:')) { - return it.substring(5); - } - - return it; - } - - assertUnreachable(type); -}; - -export const normalisePGliteUrl = ( - it: string, -) => { - if (it.startsWith('file:')) { - return it.substring(5); - } - - return it; -}; - -export function isPgArrayType(sqlType: string) { - return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null; -} - -export function findAddedAndRemoved(columnNames1: string[], columnNames2: string[]) { - const set1 = new Set(columnNames1); - const set2 = new Set(columnNames2); - - const addedColumns = columnNames2.filter((it) => !set1.has(it)); - const removedColumns = columnNames1.filter((it) => !set2.has(it)); - - return { addedColumns, removedColumns }; -} - -export function escapeSingleQuotes(str: string) { - return str.replace(/'/g, "''"); -} - -export function unescapeSingleQuotes(str: string, ignoreFirstAndLastChar: boolean) { - if (str === "''") { - return str; - } - const regex = ignoreFirstAndLastChar ? /(? { try { // check if the files exist await Promise.all([access(keyPath), access(certPath)]); - } catch (e) { + } catch { // if not create them await runCommand(`mkcert localhost`, { cwd: p.data }); } diff --git a/drizzle-kit/src/utils/commutativity.ts b/drizzle-kit/src/utils/commutativity.ts new file mode 100644 index 0000000000..78eb672d60 --- /dev/null +++ b/drizzle-kit/src/utils/commutativity.ts @@ -0,0 +1,19 @@ +import type { Dialect } from './schemaValidator'; + +export const detectNonCommutative = async ( + snapshots: string[], + dialect: Dialect, +) => { + if (dialect === 'postgresql') { + const { detectNonCommutative } = await import('../dialects/postgres/commutativity'); + return detectNonCommutative(snapshots); + } else if (dialect === 'mysql') { + const { detectNonCommutative } = await import('../dialects/mysql/commutativity'); + return detectNonCommutative(snapshots); + } else { + // assertUnreachable(dialect); + } + + // temp + return {} as any; +}; diff --git a/drizzle-kit/src/utils/index.ts b/drizzle-kit/src/utils/index.ts new file mode 100644 index 0000000000..ed6ee4b55f --- /dev/null +++ b/drizzle-kit/src/utils/index.ts @@ -0,0 +1,322 @@ +import type { ProxyParams } from '../cli/commands/studio'; +import type { Dialect } from './schemaValidator'; + +export const originUUID = '00000000-0000-0000-0000-000000000000'; +export const BREAKPOINT = '--> statement-breakpoint\n'; + +export function assertUnreachable(_x: never | undefined): never { + throw new Error("Didn't expect to get here"); +} + +// don't fail in runtime, types only +export function softAssertUnreachable(_x: never) { + return null as never; +} + +export const mapEntries = ( + obj: Record, + map: (key: string, value: T) => [string, T], +): Record => { + const result = Object.fromEntries( + Object.entries(obj).map(([key, val]) => { + const [newKey, newVal] = map(key, val); + return [newKey, newVal]; + }), + ); + return result; +}; + +export type Proxy = (params: ProxyParams) => Promise; +export type TransactionProxy = (queries: { sql: string; method?: ProxyParams['method'] }[]) => Promise; + +export type DB = { + query: (sql: string, params?: any[]) => Promise; +}; + +export type SQLiteDB = { + query: (sql: string, params?: any[]) => Promise; + run(query: string): Promise; +}; + +export type LibSQLDB = { + query: (sql: string, params?: any[]) => Promise; + run(query: string): Promise; + batchWithPragma?(queries: string[]): Promise; +}; + +export type Simplify = + & { + [K in keyof T]: T[K]; + } + & {}; + +export type Journal = { + version: string; + dialect: Dialect; + entries: { + idx: number; + version: string; + when: number; + tag: string; + breakpoints: boolean; + }[]; +}; + +export const kloudMeta = () => { + return { + pg: [5], + mysql: [] as number[], + sqlite: [] as number[], + }; +}; + +export function escapeSingleQuotes(str: string) { + return str.replace(/'/g, "''"); +} + +export function unescapeSingleQuotes(str: string, ignoreFirstAndLastChar: boolean) { + const regex = ignoreFirstAndLastChar ? /(? { + return renames.map((it) => { + const schema1 = it.from.schema ? `${it.from.schema}.` : ''; + const schema2 = it.to.schema ? `${it.to.schema}.` : ''; + + const table1 = it.from.table ? `${it.from.table}.` : ''; + const table2 = it.to.table ? `${it.to.table}.` : ''; + + return `${schema1}${table1}${it.from.name}->${schema2}${table2}${it.to.name}`; + }); +}; + +export type ArrayValue = unknown | null | ArrayValue[]; + +export function stringifyArray( + value: ArrayValue, + mode: 'sql' | 'ts' | 'geometry-sql', + mapCallback: (v: any | null, depth: number) => string, + depth: number = 0, +): string { + if (!Array.isArray(value)) return mapCallback(value, depth); + depth += 1; + const res = value.map((e) => { + if (Array.isArray(e)) return stringifyArray(e, mode, mapCallback, depth); + return mapCallback(e, depth); + }).join(','); + return mode === 'ts' ? `[${res}]` : mode === 'geometry-sql' ? `ARRAY[${res}]` : `{${res}}`; +} + +export function stringifyTuplesArray( + array: ArrayValue[], + mode: 'sql' | 'ts' | 'geometry-sql', + mapCallback: (v: ArrayValue, depth: number) => string, + depth: number = 0, +): string { + if (!array.find((n) => Array.isArray(n))) return mapCallback(array, depth); + + depth += 1; + const res = array.map((e) => { + if (Array.isArray(e) && e.find((n) => Array.isArray(n))) { + return stringifyTuplesArray(e, mode, mapCallback, depth); + } + return mapCallback(e, depth); + }).join(','); + return mode === 'ts' ? `[${res}]` : mode === 'geometry-sql' ? `ARRAY[${res}]` : `{${res}}`; +} + +export const trimChar = (str: string, char: string | [string, string]) => { + if (str.length < 2) return str; + if (typeof char === 'string' && str.startsWith(char) && str.endsWith(char)) return str.substring(1, str.length - 1); + if (Array.isArray(char) && str.startsWith(char[0]) && str.endsWith(char[1])) return str.substring(1, str.length - 1); + + return str; +}; + +export const splitExpressions = (input: string | null): string[] => { + if (!input) return []; + + const expressions: string[] = []; + let parenDepth = 0; + let inSingleQuotes = false; + let inDoubleQuotes = false; + let currentExpressionStart = 0; + + for (let i = 0; i < input.length; i++) { + const char = input[i]; + + if (char === "'" && input[i + 1] === "'") { + i++; + continue; + } + + if (char === '"' && input[i + 1] === '"') { + i++; + continue; + } + + if (char === "'") { + if (!inDoubleQuotes) { + inSingleQuotes = !inSingleQuotes; + } + continue; + } + if (char === '"') { + if (!inSingleQuotes) { + inDoubleQuotes = !inDoubleQuotes; + } + continue; + } + + if (!inSingleQuotes && !inDoubleQuotes) { + if (char === '(') { + parenDepth++; + } else if (char === ')') { + parenDepth = Math.max(0, parenDepth - 1); + } else if (char === ',' && parenDepth === 0) { + expressions.push(input.substring(currentExpressionStart, i).trim()); + currentExpressionStart = i + 1; + } + } + } + + if (currentExpressionStart < input.length) { + expressions.push(input.substring(currentExpressionStart).trim()); + } + + return expressions.filter((s) => s.length > 0); +}; + +export const wrapWith = (it: string, char: string) => { + if (!it.startsWith(char) || !it.endsWith(char)) return `${char}${it}${char}`; + return it; +}; + +export const timeTzRegex = /\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?/; +export const isTime = (it: string) => { + return timeTzRegex.test(it); +}; + +export const dateExtractRegex = /^\d{4}-\d{2}-\d{2}/; +export const isDate = (it: string) => { + return dateExtractRegex.test(it); +}; + +const timestampRegexp = + /^(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?|\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}(?::?\d{2})?)?)$/; +export const isTimestamp = (it: string) => { + return timestampRegexp.test(it); +}; + +export const timezoneSuffixRegexp = /([+-]\d{2}(:\d{2})?|Z)$/i; +export function hasTimeZoneSuffix(s: string): boolean { + return timezoneSuffixRegexp.test(s); +} + +export const possibleIntervals = [ + 'year', + 'month', + 'day', + 'hour', + 'minute', + 'second', + 'year to month', + 'day to hour', + 'day to minute', + 'day to second', + 'hour to minute', + 'hour to second', + 'minute to second', +]; +export function parseIntervalFields(type: string): { fields?: typeof possibleIntervals[number]; precision?: number } { + const options: { precision?: number; fields?: typeof possibleIntervals[number] } = {}; + // incoming: interval day to second(3) + + // [interval, day, to, second(3)] + const splitted = type.split(' '); + if (splitted.length === 1) { + return options; + } + + // [day, to, second(3)] + // day to second(3) + const rest = splitted.slice(1, splitted.length).join(' '); + if (possibleIntervals.includes(rest)) return { ...options, fields: rest }; + + // day to second(3) + for (const s of possibleIntervals) { + if (rest.startsWith(`${s}(`)) return { ...options, fields: s }; + } + + return options; +} + +export function parseEWKB(hex: string): { srid: number | undefined; point: [number, number] } { + const hexToBytes = (hex: string): Uint8Array => { + const bytes: number[] = []; + for (let c = 0; c < hex.length; c += 2) { + bytes.push(Number.parseInt(hex.slice(c, c + 2), 16)); + } + return new Uint8Array(bytes); + }; + const bytesToFloat64 = (bytes: Uint8Array, offset: number): number => { + const buffer = new ArrayBuffer(8); + const view = new DataView(buffer); + for (let i = 0; i < 8; i++) { + view.setUint8(i, bytes[offset + i]!); + } + return view.getFloat64(0, true); + }; + + const bytes = hexToBytes(hex); + + let offset = 0; + + // Byte order: 1 is little-endian, 0 is big-endian + const byteOrder = bytes[offset]; + offset += 1; + + const view = new DataView(bytes.buffer); + const geomType = view.getUint32(offset, byteOrder === 1); + offset += 4; + + let srid: number | undefined; + if (geomType & 0x20000000) { // SRID flag + srid = view.getUint32(offset, byteOrder === 1); + offset += 4; + } + + if ((geomType & 0xFFFF) === 1) { + const x = bytesToFloat64(bytes, offset); + offset += 8; + const y = bytesToFloat64(bytes, offset); + offset += 8; + + return { srid, point: [x, y] }; + } + + throw new Error('Unsupported geometry type'); +} + +export function areStringArraysEqual(arr1: string[], arr2: string[]) { + if (arr1.length !== arr2.length) { + return false; + } + + const sorted1 = [...arr1].sort(); + const sorted2 = [...arr2].sort(); + + for (let i = 0; i < sorted1.length; i++) { + if (sorted1[i] !== sorted2[i]) { + return false; + } + } + + return true; +} diff --git a/drizzle-kit/src/utils/mocks.ts b/drizzle-kit/src/utils/mocks.ts new file mode 100644 index 0000000000..b7fc42d0e1 --- /dev/null +++ b/drizzle-kit/src/utils/mocks.ts @@ -0,0 +1,50 @@ +export const mockResolver = + (renames: Set) => + async (it: { + created: T[]; + deleted: T[]; + }): Promise<{ created: T[]; deleted: T[]; renamedOrMoved: { from: T; to: T }[] }> => { + const { created, deleted } = it; + + if (created.length === 0 || deleted.length === 0 || renames.size === 0) { + return { created, deleted, renamedOrMoved: [] }; + } + + let createdItems = [...created]; + let deletedItems = [...deleted]; + + const renamedOrMoved: { from: T; to: T }[] = []; + for (let rename of renames) { + const [from, to] = rename.split('->'); + + const idxFrom = deletedItems.findIndex((it) => { + const schema = it.schema ? `${it.schema}.` : ''; + const table = it.table ? `${it.table}.` : ''; + const key = `${schema}${table}${it.name}`; + return key === from; + }); + + if (idxFrom >= 0) { + const idxTo = createdItems.findIndex((it) => { + const schema = it.schema ? `${it.schema}.` : ''; + const table = it.table ? `${it.table}.` : ''; + const key = `${schema}${table}${it.name}`; + return key === to; + }); + + if (idxTo < 0) throw new Error(`unexpected`); + + renamedOrMoved.push({ + from: deletedItems[idxFrom], + to: createdItems[idxTo], + }); + + delete createdItems[idxTo]; + delete deletedItems[idxFrom]; + + createdItems = createdItems.filter(Boolean); + deletedItems = deletedItems.filter(Boolean); + } + } + return { created: createdItems, deleted: deletedItems, renamedOrMoved }; + }; diff --git a/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm new file mode 100644 index 0000000000..9e0eed3511 --- /dev/null +++ b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm @@ -0,0 +1,31 @@ +PGArrayExpression { + Array = "ARRAY[" ListOf "]" + + ArrayItem = stringLiteral | Array | quotelessString | nullLiteral + + stringLiteral = "\"" ((~("\"" | escapedSymbol) any) | escapedSymbol)* "\"" + + quotelessString = (~forbiddenSymbolForQuoteless any)+ + + escapedSymbol = "\\" any + + nullLiteral = "NULL" + + forbiddenSymbolForQuoteless = "[" | "]" | " , " | "\"" | nullLiteral +} + +PGArrayLiteral { + Array = "{" ListOf "}" + + ArrayItem = stringLiteral | quotelessString | nullLiteral | Array + + stringLiteral = "\"" ((~("\"" | escapedSymbol) any) | escapedSymbol)* "\"" + + quotelessString = (~forbiddenSymbolForQuoteless any)+ + + escapedSymbol = "\\" any + + nullLiteral = "NULL" + + forbiddenSymbolForQuoteless = "{" | "}" | "," | "\"" | nullLiteral +} \ No newline at end of file diff --git a/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.d.ts b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.d.ts new file mode 100644 index 0000000000..41ce7ff617 --- /dev/null +++ b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.d.ts @@ -0,0 +1,63 @@ +// AUTOGENERATED FILE +// This file was generated from grammar.ohm by `ohm generateBundles`. + +import { + BaseActionDict, + Grammar, + IterationNode, + Namespace, + Node, + NonterminalNode, + Semantics, + TerminalNode, +} from 'ohm-js'; + +export interface PGArrayExpressionActionDict extends BaseActionDict { + Array?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: TerminalNode) => T; + ArrayItem?: (this: NonterminalNode, arg0: NonterminalNode) => T; + stringLiteral?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; + quotelessString?: (this: NonterminalNode, arg0: IterationNode) => T; + escapedSymbol?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; + nullLiteral?: (this: NonterminalNode, arg0: TerminalNode) => T; + forbiddenSymbolForQuoteless?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode) => T; +} + +export interface PGArrayExpressionSemantics extends Semantics { + addOperation(name: string, actionDict: PGArrayExpressionActionDict): this; + extendOperation(name: string, actionDict: PGArrayExpressionActionDict): this; + addAttribute(name: string, actionDict: PGArrayExpressionActionDict): this; + extendAttribute(name: string, actionDict: PGArrayExpressionActionDict): this; +} + +export interface PGArrayExpressionGrammar extends Grammar { + createSemantics(): PGArrayExpressionSemantics; + extendSemantics(superSemantics: PGArrayExpressionSemantics): PGArrayExpressionSemantics; +} + +export interface PGArrayLiteralActionDict extends BaseActionDict { + Array?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: TerminalNode) => T; + ArrayItem?: (this: NonterminalNode, arg0: NonterminalNode) => T; + stringLiteral?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; + quotelessString?: (this: NonterminalNode, arg0: IterationNode) => T; + escapedSymbol?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; + nullLiteral?: (this: NonterminalNode, arg0: TerminalNode) => T; + forbiddenSymbolForQuoteless?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode) => T; +} + +export interface PGArrayLiteralSemantics extends Semantics { + addOperation(name: string, actionDict: PGArrayLiteralActionDict): this; + extendOperation(name: string, actionDict: PGArrayLiteralActionDict): this; + addAttribute(name: string, actionDict: PGArrayLiteralActionDict): this; + extendAttribute(name: string, actionDict: PGArrayLiteralActionDict): this; +} + +export interface PGArrayLiteralGrammar extends Grammar { + createSemantics(): PGArrayLiteralSemantics; + extendSemantics(superSemantics: PGArrayLiteralSemantics): PGArrayLiteralSemantics; +} + +declare const ns: { + PGArrayExpression: PGArrayExpressionGrammar; + PGArrayLiteral: PGArrayLiteralGrammar; +}; +export default ns; diff --git a/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.js b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.js new file mode 100644 index 0000000000..04f8ef5773 --- /dev/null +++ b/drizzle-kit/src/utils/parse-pgarray/grammar/grammar.ohm-bundle.js @@ -0,0 +1,276 @@ +import { makeRecipe } from 'ohm-js'; + +const result = {}; +result.PGArrayExpression = makeRecipe([ + 'grammar', + { + source: + 'PGArrayExpression { \n Array = "ARRAY[" ListOf "]"\n\n ArrayItem = stringLiteral | Array | quotelessString | nullLiteral\n\n stringLiteral = "\\"" ((~("\\"" | escapedSymbol) any) | escapedSymbol)* "\\""\n \n quotelessString = (~forbiddenSymbolForQuoteless any)+\n\n\tescapedSymbol = "\\\\" any\n\n nullLiteral = "NULL"\n\n\tforbiddenSymbolForQuoteless = "[" | "]" | " , " | "\\"" | nullLiteral\n}', + }, + 'PGArrayExpression', + null, + 'Array', + { + Array: [ + 'define', + { sourceInterval: [28, 71] }, + null, + [], + [ + 'seq', + { sourceInterval: [36, 71] }, + ['terminal', { sourceInterval: [36, 44] }, 'ARRAY['], + [ + 'app', + { sourceInterval: [45, 67] }, + 'ListOf', + [ + ['app', { sourceInterval: [52, 61] }, 'ArrayItem', []], + ['terminal', { sourceInterval: [63, 66] }, ','], + ], + ], + ['terminal', { sourceInterval: [68, 71] }, ']'], + ], + ], + ArrayItem: [ + 'define', + { sourceInterval: [77, 143] }, + null, + [], + [ + 'alt', + { sourceInterval: [89, 143] }, + ['app', { sourceInterval: [89, 102] }, 'stringLiteral', []], + ['app', { sourceInterval: [105, 110] }, 'Array', []], + ['app', { sourceInterval: [113, 128] }, 'quotelessString', []], + ['app', { sourceInterval: [132, 143] }, 'nullLiteral', []], + ], + ], + stringLiteral: [ + 'define', + { sourceInterval: [149, 223] }, + null, + [], + [ + 'seq', + { sourceInterval: [165, 223] }, + ['terminal', { sourceInterval: [165, 169] }, '"'], + [ + 'star', + { sourceInterval: [170, 218] }, + [ + 'alt', + { sourceInterval: [171, 216] }, + [ + 'seq', + { sourceInterval: [171, 200] }, + [ + 'not', + { sourceInterval: [172, 195] }, + ['alt', { sourceInterval: [174, 194] }, ['terminal', { sourceInterval: [174, 178] }, '"'], [ + 'app', + { sourceInterval: [181, 194] }, + 'escapedSymbol', + [], + ]], + ], + ['app', { sourceInterval: [196, 199] }, 'any', []], + ], + ['app', { sourceInterval: [203, 216] }, 'escapedSymbol', []], + ], + ], + ['terminal', { sourceInterval: [219, 223] }, '"'], + ], + ], + quotelessString: [ + 'define', + { sourceInterval: [233, 286] }, + null, + [], + [ + 'plus', + { sourceInterval: [251, 286] }, + [ + 'seq', + { sourceInterval: [252, 284] }, + ['not', { sourceInterval: [252, 280] }, [ + 'app', + { sourceInterval: [253, 280] }, + 'forbiddenSymbolForQuoteless', + [], + ]], + ['app', { sourceInterval: [281, 284] }, 'any', []], + ], + ], + ], + escapedSymbol: [ + 'define', + { sourceInterval: [289, 313] }, + null, + [], + ['seq', { sourceInterval: [305, 313] }, ['terminal', { sourceInterval: [305, 309] }, '\\'], [ + 'app', + { sourceInterval: [310, 313] }, + 'any', + [], + ]], + ], + nullLiteral: ['define', { sourceInterval: [319, 339] }, null, [], [ + 'terminal', + { sourceInterval: [333, 339] }, + 'NULL', + ]], + forbiddenSymbolForQuoteless: [ + 'define', + { sourceInterval: [342, 411] }, + null, + [], + [ + 'alt', + { sourceInterval: [372, 411] }, + ['terminal', { sourceInterval: [372, 375] }, '['], + ['terminal', { sourceInterval: [378, 381] }, ']'], + ['terminal', { sourceInterval: [384, 389] }, ' , '], + ['terminal', { sourceInterval: [392, 396] }, '"'], + ['app', { sourceInterval: [400, 411] }, 'nullLiteral', []], + ], + ], + }, +]); +result.PGArrayLiteral = makeRecipe([ + 'grammar', + { + source: + 'PGArrayLiteral { \n Array = "{" ListOf "}"\n\n ArrayItem = stringLiteral | quotelessString | nullLiteral | Array\n\n stringLiteral = "\\"" ((~("\\"" | escapedSymbol) any) | escapedSymbol)* "\\""\n \n quotelessString = (~forbiddenSymbolForQuoteless any)+\n\n\tescapedSymbol = "\\\\" any \n\n nullLiteral = "NULL"\n\n\tforbiddenSymbolForQuoteless = "{" | "}" | "," | "\\"" | nullLiteral\n}', + }, + 'PGArrayLiteral', + null, + 'Array', + { + Array: [ + 'define', + { sourceInterval: [25, 63] }, + null, + [], + [ + 'seq', + { sourceInterval: [33, 63] }, + ['terminal', { sourceInterval: [33, 36] }, '{'], + [ + 'app', + { sourceInterval: [37, 59] }, + 'ListOf', + [ + ['app', { sourceInterval: [44, 53] }, 'ArrayItem', []], + ['terminal', { sourceInterval: [55, 58] }, ','], + ], + ], + ['terminal', { sourceInterval: [60, 63] }, '}'], + ], + ], + ArrayItem: [ + 'define', + { sourceInterval: [69, 134] }, + null, + [], + [ + 'alt', + { sourceInterval: [81, 134] }, + ['app', { sourceInterval: [81, 94] }, 'stringLiteral', []], + ['app', { sourceInterval: [97, 112] }, 'quotelessString', []], + ['app', { sourceInterval: [115, 126] }, 'nullLiteral', []], + ['app', { sourceInterval: [129, 134] }, 'Array', []], + ], + ], + stringLiteral: [ + 'define', + { sourceInterval: [140, 214] }, + null, + [], + [ + 'seq', + { sourceInterval: [156, 214] }, + ['terminal', { sourceInterval: [156, 160] }, '"'], + [ + 'star', + { sourceInterval: [161, 209] }, + [ + 'alt', + { sourceInterval: [162, 207] }, + [ + 'seq', + { sourceInterval: [162, 191] }, + [ + 'not', + { sourceInterval: [163, 186] }, + ['alt', { sourceInterval: [165, 185] }, ['terminal', { sourceInterval: [165, 169] }, '"'], [ + 'app', + { sourceInterval: [172, 185] }, + 'escapedSymbol', + [], + ]], + ], + ['app', { sourceInterval: [187, 190] }, 'any', []], + ], + ['app', { sourceInterval: [194, 207] }, 'escapedSymbol', []], + ], + ], + ['terminal', { sourceInterval: [210, 214] }, '"'], + ], + ], + quotelessString: [ + 'define', + { sourceInterval: [224, 277] }, + null, + [], + [ + 'plus', + { sourceInterval: [242, 277] }, + [ + 'seq', + { sourceInterval: [243, 275] }, + ['not', { sourceInterval: [243, 271] }, [ + 'app', + { sourceInterval: [244, 271] }, + 'forbiddenSymbolForQuoteless', + [], + ]], + ['app', { sourceInterval: [272, 275] }, 'any', []], + ], + ], + ], + escapedSymbol: [ + 'define', + { sourceInterval: [280, 304] }, + null, + [], + ['seq', { sourceInterval: [296, 304] }, ['terminal', { sourceInterval: [296, 300] }, '\\'], [ + 'app', + { sourceInterval: [301, 304] }, + 'any', + [], + ]], + ], + nullLiteral: ['define', { sourceInterval: [311, 331] }, null, [], [ + 'terminal', + { sourceInterval: [325, 331] }, + 'NULL', + ]], + forbiddenSymbolForQuoteless: [ + 'define', + { sourceInterval: [334, 401] }, + null, + [], + [ + 'alt', + { sourceInterval: [364, 401] }, + ['terminal', { sourceInterval: [364, 367] }, '{'], + ['terminal', { sourceInterval: [370, 373] }, '}'], + ['terminal', { sourceInterval: [376, 379] }, ','], + ['terminal', { sourceInterval: [382, 386] }, '"'], + ['app', { sourceInterval: [390, 401] }, 'nullLiteral', []], + ], + ], + }, +]); +export default result; diff --git a/drizzle-kit/src/utils/parse-pgarray/index.ts b/drizzle-kit/src/utils/parse-pgarray/index.ts new file mode 100644 index 0000000000..8cf346eb24 --- /dev/null +++ b/drizzle-kit/src/utils/parse-pgarray/index.ts @@ -0,0 +1,103 @@ +import PGArray from './grammar/grammar.ohm-bundle'; + +const literalArraySemantics = PGArray.PGArrayLiteral.createSemantics(); +literalArraySemantics.addOperation('parseArray', { + Array(_lBracket, argList, _rBracket) { + return argList['parseArray'](); + }, + + ArrayItem(arg0) { + return arg0['parseArray'](); + }, + + NonemptyListOf(arg0, arg1, arg2) { + return [arg0['parseArray'](), ...arg1['parseArray'](), ...arg2['parseArray']()]; + }, + + EmptyListOf() { + return []; + }, + + _iter(...children) { + return children.map((c) => c['parseArray']()).filter((e) => e !== undefined); + }, + + _terminal() { + return; + }, + + stringLiteral(_lQuote, string, _rQuote) { + return JSON.parse('"' + string.sourceString.replaceAll("''", "'") + '"'); + }, + + quotelessString(string) { + return string.sourceString.replaceAll("''", "'"); + }, + + nullLiteral(_) { + return null; + }, +}); + +const expressionArraySemantics = PGArray.PGArrayExpression.createSemantics(); +expressionArraySemantics.addOperation('parseExpressionArray', { + Array(_lBracket, argList, _rBracket) { + return argList['parseExpressionArray'](); + }, + + ArrayItem(arg0) { + return arg0['parseExpressionArray'](); + }, + + NonemptyListOf(arg0, arg1, arg2) { + return [arg0['parseExpressionArray'](), ...arg1['parseExpressionArray'](), ...arg2['parseExpressionArray']()]; + }, + + EmptyListOf() { + return []; + }, + + _iter(...children) { + return children.map((c) => c['parseExpressionArray']()).filter((e) => e !== undefined); + }, + + _terminal() { + return; + }, + + stringLiteral(_lQuote, string, _rQuote) { + return JSON.parse('"' + string.sourceString.replaceAll("''", "'") + '"'); + }, + + quotelessString(string) { + return string.sourceString.replaceAll("''", "'"); + }, + + nullLiteral(_) { + return null; + }, +}); + +export type ArrayValue = string | null | ArrayValue[]; + +// '{}' +// every value will be a string +export function parseArray(array: string) { + const match = PGArray.PGArrayLiteral.match(array, 'Array'); + + if (match.failed()) throw new Error(`Failed to parse array: '${array}'`); + + const res = literalArraySemantics(match)['parseArray'](); + return res as ArrayValue[]; +} + +// ARRAY[] +// every value will be a string +export function parseExpressionArray(array: string) { + const match = PGArray.PGArrayExpression.match(array, 'Array'); + + if (match.failed()) throw new Error(`Failed to parse array: '${array}'`); + + const res = expressionArraySemantics(match)['parseExpressionArray'](); + return res as ArrayValue[]; +} diff --git a/drizzle-kit/src/utils/readme.md b/drizzle-kit/src/utils/readme.md new file mode 100644 index 0000000000..68fe2a0c83 --- /dev/null +++ b/drizzle-kit/src/utils/readme.md @@ -0,0 +1,186 @@ +# How commutativity works + +`detectNonCommutative` function accepts an array of snapshots paths from a drizzle folder and a dialect we should use it for. Dialect is a param to dicsus, maybe we will have just different commutative function + +It outputs an array of conflicts with a full info about each conflict + +Hot this function works: + +Input we will go through, 3 migrations, where 2 and 3 where creating the same table in different branches, which will cause a conflict + +First migration +```json +{ + version: "8", + dialect: "postgres", + id: "p1", + prevId: "00000000-0000-0000-0000-000000000000", + ddl: [], + renames: [] +} +``` + +Second migration(done in branch1) +```json +{ + version: "8", + dialect: "postgres", + id: "a1", + prevId: "p1", + ddl: [ + { + isRlsEnabled: false, + name: "users", + schema: "public", + entityType: "tables" + }, + { + type: "varchar", + options: null, + typeSchema: "pg_catalog", + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + name: "email", + schema: "public", + table: "users", + entityType: "columns" + } + ], + renames: [] +} +``` + +Third migration(done in branch2) +```json +{ + version: "8", + dialect: "postgres", + id: "a1", + prevId: "p1", + ddl: [ + { + isRlsEnabled: false, + name: "users", + schema: "public", + entityType: "tables" + }, + { + type: "varchar", + options: null, + typeSchema: "pg_catalog", + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + name: "email", + schema: "public", + table: "users", + entityType: "columns" + } + ], + renames: [] +} +``` + + + +1. We are building a snapshots grapgh possible multi-child node in our migration tree + +`buildSnapshotGraph` accepts all the snapshots as array and transform them to a Map and references between nodes: + +```ts +nodes { + p1: { + id: 'p1', + prevId: '00000000-0000-0000-0000-000000000000', + path: '...', + folderPath: '...', + raw: {...} // raw snapshot json + }, + a1: { + id: 'a1', + prevId: 'p1', + path: '...', + folderPath: '...', + raw: {...} // raw snapshot json + }, + b1: { + id: 'b1', + prevId: 'p1', + path: '...', + folderPath: '...', + raw: {...} // raw snapshot json + } +} +``` + +2. Next we need to actually map those nodes to a map of parent id to child ids to find if we have an array of >1 childs for each branch + +`prevToChildren` is the exact map logic for it, it will output this: + +```ts +prevToChildren { + '00000000-0000-0000-0000-000000000000': [ 'p1' ], + // Conflict! + p1: [ 'a1', 'b1' ] +} +``` + +3. For each case where a key has >1 childs we need to check collisions + +- We need to collect all the leaves for each branch child, so we can find a head and check each of branch node for collisions, if at least one is found - add a conflict with explanation + +For our case we have both childs as a head, so we will have +```ts +{ a1: [ 'a1' ], b1: [ 'b1' ] } +``` + +4. Conflicts cases are separated into several steps steps + +- Firstly we identify same resources changes and same changes, then we identify same resources with different actions(create, drop, etc.) +- Then we identify conflicts when table drops and anything that is realted to this table was changed +- The same then will be done foe schemas(for dialects that supports schemas) +- Finally we will respond with an array of condlicts + +Example, +```ts +[ + { + parentId: 'p1', + parentPath: '...', // path to parent + branchA: { + headId: 'a1', // snapshot id + path: '...', // path to snapshot json + statements: [Array] // raw json statements + }, + branchB: { + headId: 'b1', + path: '...', + statements: [Array] + }, + reasons: [ + 'Two identical operations on the same resource conflict (e.g., duplicate changes): table:public.users (create) vs table:public.users (create)' + ] + } +] +``` + +extra cases handled +``` +--- case 1 --- +P1 - empty + +A1 - create.users B1 - create posts +A2 - create.posts B2 - alter posts + B3 - create media +--- case 2 --- +P1 - users table + +A1 - alter.users B1 - create posts +A2 - alter.users B2 - alter posts + B3 - drop users +``` diff --git a/drizzle-kit/src/utils/schemaValidator.ts b/drizzle-kit/src/utils/schemaValidator.ts new file mode 100644 index 0000000000..a435c1ce1d --- /dev/null +++ b/drizzle-kit/src/utils/schemaValidator.ts @@ -0,0 +1,17 @@ +import type { TypeOf } from 'zod'; +import { enum as enumType } from 'zod'; + +export const dialects = [ + 'postgresql', + 'mysql', + 'sqlite', + 'turso', + 'singlestore', + 'gel', + 'mssql', + 'cockroach', +] as const; +export const dialect = enumType(dialects); + +export type Dialect = (typeof dialects)[number]; +const _: Dialect = '' as TypeOf; diff --git a/drizzle-kit/src/utils/sequence-matcher.ts b/drizzle-kit/src/utils/sequence-matcher.ts new file mode 100644 index 0000000000..28022271b8 --- /dev/null +++ b/drizzle-kit/src/utils/sequence-matcher.ts @@ -0,0 +1,261 @@ +/** + * A sequence matcher for string arrays that finds differences + * and tracks positions of added elements. + */ +export function diffStringArrays(oldArr: string[], newArr: string[]): { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; +}[] { + // Get edit operations + const opcodes = getOpcodes(oldArr, newArr); + + // Convert to the requested format + return formatResult(opcodes, oldArr, newArr); +} + +/** + * Get edit operations between two arrays + */ +function getOpcodes( + oldArray: string[], + newArray: string[], +): Array<['equal' | 'delete' | 'insert' | 'replace', number, number, number, number]> { + // Get matching blocks + const matchingBlocks = getMatchingBlocks(oldArray, newArray); + + // Convert to opcodes + const opcodes: Array<['equal' | 'delete' | 'insert' | 'replace', number, number, number, number]> = []; + let oldIndex = 0; + let newIndex = 0; + + for (const [oldBlockStart, newBlockStart, matchLength] of matchingBlocks) { + // Handle differences before this match + if (oldIndex < oldBlockStart || newIndex < newBlockStart) { + const tag: 'delete' | 'insert' | 'replace' = oldIndex < oldBlockStart && newIndex < newBlockStart + ? 'replace' + : oldIndex < oldBlockStart + ? 'delete' + : 'insert'; + opcodes.push([tag, oldIndex, oldBlockStart, newIndex, newBlockStart]); + } + + // Handle the match itself + if (matchLength > 0) { + opcodes.push(['equal', oldBlockStart, oldBlockStart + matchLength, newBlockStart, newBlockStart + matchLength]); + } + + // Update positions + oldIndex = oldBlockStart + matchLength; + newIndex = newBlockStart + matchLength; + } + + return opcodes; +} + +/** + * Get matching blocks between two arrays + */ +function getMatchingBlocks(oldArray: string[], newArray: string[]): Array<[number, number, number]> { + // Special case for empty arrays + if (oldArray.length === 0 && newArray.length === 0) { + return [[0, 0, 0]]; + } + + // Find matching blocks recursively + const matchQueue: Array<[number, number, number, number]> = [[0, oldArray.length, 0, newArray.length]]; + const matches: Array<[number, number, number]> = []; + + while (matchQueue.length > 0) { + const [oldStart, oldEnd, newStart, newEnd] = matchQueue.pop()!; + + // Find longest match in this range + const [oldMatchStart, newMatchStart, matchLength] = findLongestMatch( + oldArray, + newArray, + oldStart, + oldEnd, + newStart, + newEnd, + ); + + if (matchLength > 0) { + matches.push([oldMatchStart, newMatchStart, matchLength]); + + // Add regions before the match to the queue + if (oldStart < oldMatchStart && newStart < newMatchStart) { + matchQueue.push([oldStart, oldMatchStart, newStart, newMatchStart]); + } + + // Add regions after the match to the queue + if (oldMatchStart + matchLength < oldEnd && newMatchStart + matchLength < newEnd) { + matchQueue.push([oldMatchStart + matchLength, oldEnd, newMatchStart + matchLength, newEnd]); + } + } + } + + // Sort matches and add sentinel + matches.sort((a, b) => a[0] - b[0]); + matches.push([oldArray.length, newArray.length, 0]); + + return matches; +} + +/** + * Find the longest matching block in oldArray[oldStart:oldEnd] and newArray[newStart:newEnd] + */ +function findLongestMatch( + oldArray: string[], + newArray: string[], + oldStart: number, + oldEnd: number, + newStart: number, + newEnd: number, +): [number, number, number] { + let bestOldStart = oldStart; + let bestNewStart = newStart; + let bestMatchLength = 0; + + // Create a map of elements in newArray to their positions + const newElementPositions: Map = new Map(); + for (let newIndex = newStart; newIndex < newEnd; newIndex++) { + const element = newArray[newIndex]; + if (!newElementPositions.has(element)) { + newElementPositions.set(element, []); + } + newElementPositions.get(element)!.push(newIndex); + } + + // For each element in oldArray, check for matches in newArray + for (let oldIndex = oldStart; oldIndex < oldEnd; oldIndex++) { + const element = oldArray[oldIndex]; + if (!newElementPositions.has(element)) continue; + + for (const newIndex of newElementPositions.get(element)!) { + // Skip if we're past the end + if (newIndex >= newEnd) continue; + + // Count how many consecutive elements match + let currentMatchLength = 1; + while ( + oldIndex + currentMatchLength < oldEnd + && newIndex + currentMatchLength < newEnd + && oldArray[oldIndex + currentMatchLength] === newArray[newIndex + currentMatchLength] + ) { + currentMatchLength++; + } + + if (currentMatchLength > bestMatchLength) { + bestOldStart = oldIndex; + bestNewStart = newIndex; + bestMatchLength = currentMatchLength; + } + } + } + + return [bestOldStart, bestNewStart, bestMatchLength]; +} + +/** + * Format the opcodes into the requested result format + */ +function formatResult( + opcodes: Array<['equal' | 'delete' | 'insert' | 'replace', number, number, number, number]>, + oldArray: string[], + newArray: string[], +): { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; + isAtEnd?: boolean; +}[] { + const result: { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; + isAtEnd?: boolean; + }[] = []; + + for (const [tag, oldStart, oldEnd, newStart, newEnd] of opcodes) { + if (tag === 'equal') { + // Same elements in both arrays + for (let oldIndex = oldStart; oldIndex < oldEnd; oldIndex++) { + result.push({ + type: 'same', + value: oldArray[oldIndex], + }); + } + continue; + } + + if (tag === 'delete') { + // Elements removed from oldArray + for (let oldIndex = oldStart; oldIndex < oldEnd; oldIndex++) { + result.push({ + type: 'removed', + value: oldArray[oldIndex], + }); + } + continue; + } + + if (tag === 'insert') { + // Elements added in newArray + for (let newIndex = newStart; newIndex < newEnd; newIndex++) { + addWithPosition(newArray[newIndex], newIndex, newArray, oldArray, result); + } + continue; + } + + if (tag === 'replace') { + // Both removal and addition + // First, handle removals + for (let oldIndex = oldStart; oldIndex < oldEnd; oldIndex++) { + result.push({ + type: 'removed', + value: oldArray[oldIndex], + }); + } + + // Then, handle additions + for (let newIndex = newStart; newIndex < newEnd; newIndex++) { + addWithPosition(newArray[newIndex], newIndex, newArray, oldArray, result); + } + continue; + } + } + + return result; +} + +/** + * Helper function to add an element with position information + */ +function addWithPosition( + value: string, + currentIndex: number, + newArray: string[], + oldElementSet: string[], + result: { + type: 'same' | 'removed' | 'added'; + value: string; + beforeValue?: string; + }[], +): void { + // Find what this added element comes before + let beforeValue: string | undefined = undefined; + + // Look ahead to find the next element that exists in oldArray + for (let lookAheadIndex = currentIndex + 1; lookAheadIndex < newArray.length; lookAheadIndex++) { + if (oldElementSet.indexOf(newArray[lookAheadIndex]) >= 0) { + beforeValue = newArray[lookAheadIndex]; + break; + } + } + + result.push({ + type: 'added', + value, + beforeValue, + }); +} diff --git a/drizzle-kit/src/utils/utils-node.ts b/drizzle-kit/src/utils/utils-node.ts new file mode 100644 index 0000000000..ce043fdd9c --- /dev/null +++ b/drizzle-kit/src/utils/utils-node.ts @@ -0,0 +1,398 @@ +import chalk from 'chalk'; +import { existsSync, lstatSync, mkdirSync, readdirSync, readFileSync } from 'fs'; +import { sync as globSync } from 'glob'; +import { join, resolve } from 'path'; +import { snapshotValidator as mysqlSnapshotValidator } from 'src/dialects/mysql/snapshot'; +import { snapshotValidator as singlestoreSnapshotValidator } from 'src/dialects/singlestore/snapshot'; +import { parse } from 'url'; +import { error, info } from '../cli/views'; +import { snapshotValidator as cockroachValidator } from '../dialects/cockroach/snapshot'; +import { snapshotValidator as mssqlValidatorSnapshot } from '../dialects/mssql/snapshot'; +import { snapshotValidator as pgSnapshotValidator } from '../dialects/postgres/snapshot'; +import { snapshotValidator as sqliteStapshotValidator } from '../dialects/sqlite/snapshot'; +import { assertUnreachable } from '.'; +import type { Journal } from '.'; +import type { Dialect } from './schemaValidator'; + +export const prepareFilenames = (path: string | string[]) => { + if (typeof path === 'string') { + path = [path]; + } + + const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; + + const result = path.reduce((result, cur) => { + const globbed = globSync(`${prefix}${cur}`); + + for (const it of globbed) { + const fileName = lstatSync(it).isDirectory() ? null : resolve(it); + + const filenames = fileName + ? [fileName!] + : readdirSync(it).map((file) => join(resolve(it), file)); + + for (const file of filenames.filter((file) => !lstatSync(file).isDirectory())) { + result.add(file); + } + } + + return result; + }, new Set()); + const res = [...result]; + + // TODO: properly handle and test + // const errors = res.filter((it) => { + // return !( + // it.endsWith('.ts') + // || it.endsWith('.js') + // || it.endsWith('.cjs') + // || it.endsWith('.mjs') + // || it.endsWith('.mts') + // || it.endsWith('.cts') + // ); + // }); + + // when schema: "./schema" and not "./schema.ts" + if (res.length === 0) { + console.log( + error( + `No schema files found for path config [${ + path + .map((it) => `'${it}'`) + .join(', ') + }]`, + ), + ); + console.log( + error( + `If path represents a file - please make sure to use .ts or other extension in the path`, + ), + ); + process.exit(1); + } + + return res; +}; + +export const assertV1OutFolder = (out: string) => { + if (!existsSync(out)) return; + + const oldMigrationFolders = readdirSync(out).filter( + (it) => it.length === 14 && /^\d+$/.test(it), + ); + + if (oldMigrationFolders.length > 0) { + console.log( + `Your migrations folder format is outdated, please run ${ + chalk.green.bold( + `drizzle-kit up`, + ) + }`, + ); + process.exit(1); + } +}; + +export const assertV3OutFolder = (out: string) => { + if (!existsSync(out)) return; + + if (existsSync(join(out, 'meta/_journal.json'))) { + console.log( + `Your migrations folder format is outdated, please run ${ + chalk.green.bold( + `drizzle-kit up`, + ) + }`, + ); + process.exit(1); + } +}; + +export const dryJournal = (dialect: Dialect): Journal => { + return { + version: '7', + dialect, + entries: [], + }; +}; + +export const prepareOutFolder = (out: string) => { + if (!existsSync(out)) { + mkdirSync(out, { recursive: true }); + } + + const snapshots = readdirSync(out) + .map((subdir) => join(out, subdir, 'snapshot.json')) + .filter((filePath) => existsSync(filePath)); + + snapshots.sort(); + + return { snapshots }; +}; + +type ValidationResult = { status: 'valid' | 'unsupported' | 'nonLatest' } | { status: 'malformed'; errors: string[] }; + +const assertVersion = (obj: object, current: number): 'unsupported' | 'nonLatest' | null => { + const version = 'version' in obj ? Number(obj['version']) : undefined; + if (!version) return 'unsupported'; + if (version > current) return 'unsupported'; + if (version < current) return 'nonLatest'; + + return null; +}; + +const postgresValidator = (snapshot: object): ValidationResult => { + const versionError = assertVersion(snapshot, 8); + if (versionError) return { status: versionError }; + + const res = pgSnapshotValidator.parse(snapshot); + if (!res.success) { + return { status: 'malformed', errors: res.errors ?? [] }; + } + + return { status: 'valid' }; +}; + +const cockroachSnapshotValidator = (snapshot: object): ValidationResult => { + const versionError = assertVersion(snapshot, 1); + if (versionError) return { status: versionError }; + + const res = cockroachValidator.parse(snapshot); + if (!res.success) { + return { status: 'malformed', errors: res.errors ?? [] }; + } + + return { status: 'valid' }; +}; + +const mysqlValidator = ( + snapshot: object, +): ValidationResult => { + const versionError = assertVersion(snapshot, 6); + if (versionError) return { status: versionError }; + + const { success } = mysqlSnapshotValidator.parse(snapshot); + if (!success) return { status: 'malformed', errors: [] }; + + return { status: 'valid' }; +}; + +const mssqlSnapshotValidator = ( + snapshot: object, +): ValidationResult => { + const versionError = assertVersion(snapshot, 1); + if (versionError) return { status: versionError }; + + const res = mssqlValidatorSnapshot.parse(snapshot); + if (!res.success) return { status: 'malformed', errors: res.errors ?? [] }; + + return { status: 'valid' }; +}; + +const sqliteValidator = ( + snapshot: object, +): ValidationResult => { + const versionError = assertVersion(snapshot, 7); + if (versionError) return { status: versionError }; + + const { success } = sqliteStapshotValidator.parse(snapshot); + if (!success) { + return { status: 'malformed', errors: [] }; + } + + return { status: 'valid' }; +}; + +const singlestoreValidator = ( + snapshot: object, +): ValidationResult => { + const versionError = assertVersion(snapshot, 2); + if (versionError) return { status: versionError }; + + const { success } = singlestoreSnapshotValidator.parse(snapshot); + if (!success) { + return { status: 'malformed', errors: [] }; + } + return { status: 'valid' }; +}; + +export const validatorForDialect = (dialect: Dialect): (snapshot: object) => ValidationResult => { + switch (dialect) { + case 'postgresql': + return postgresValidator; + case 'sqlite': + return sqliteValidator; + case 'turso': + return sqliteValidator; + case 'mysql': + return mysqlValidator; + case 'singlestore': + return singlestoreValidator; + case 'mssql': + return mssqlSnapshotValidator; + case 'cockroach': + return cockroachSnapshotValidator; + case 'gel': + throw Error('gel validator is not implemented yet'); // TODO + default: + assertUnreachable(dialect); + } +}; + +export const validateWithReport = (snapshots: string[], dialect: Dialect) => { + // ✅ check if drizzle-kit can handle snapshot version + // ✅ check if snapshot is of the last version + // ✅ check if id of the snapshot is valid + // ✅ collect {} of prev id -> snapshotName[], if there's more than one - tell about collision + const validator = validatorForDialect(dialect); + + const result = snapshots.reduce( + (accum, it) => { + const raw = JSON.parse(readFileSync(`./${it}`).toString()); + + accum.rawMap[it] = raw; + + const res = validator(raw); + if (res.status === 'unsupported') { + console.log( + info( + `${it} snapshot is of unsupported version, please update drizzle-kit`, + ), + ); + process.exit(0); + } + if (res.status === 'malformed') { + accum.malformed.push(it); + return accum; + } + + if (res.status === 'nonLatest') { + accum.nonLatest.push(it); + return accum; + } + + // only if latest version here + const idEntry = accum.idsMap[raw['prevId']] ?? { + parent: it, + snapshots: [], + }; + + idEntry.snapshots.push(it); + accum.idsMap[raw['prevId']] = idEntry; + return accum; + }, + { + malformed: [], + nonLatest: [], + idToNameMap: {}, + idsMap: {}, + rawMap: {}, + } as { + malformed: string[]; + nonLatest: string[]; + idsMap: Record; + rawMap: Record; + }, + ); + + return result; +}; + +export const normaliseSQLiteUrl = ( + it: string, + type: 'libsql' | 'better-sqlite' | '@tursodatabase/database' | 'bun', +) => { + if (type === 'libsql') { + if (it.startsWith('file:')) { + return it; + } + try { + const url = parse(it); + if (url.protocol === null) { + return `file:${it}`; + } + return it; + } catch { + return `file:${it}`; + } + } + + if (type === 'better-sqlite' || type === '@tursodatabase/database' || type === 'bun') { + if (it.startsWith('file:')) { + return it.substring(5); + } + + return it; + } + + assertUnreachable(type); +}; + +// NextJs default config is target: es5, which esbuild-register can't consume +const assertES5 = async () => { + try { + await import('./_es5'); + } catch (e: any) { + if ('errors' in e && Array.isArray(e.errors) && e.errors.length > 0) { + const es5Error = (e.errors as any[]).filter((it) => it.text?.includes(`("es5") is not supported yet`)).length > 0; + if (es5Error) { + console.log( + error( + `Please change compilerOptions.target from 'es5' to 'es6' or above in your tsconfig.json`, + ), + ); + process.exit(1); + } + } + console.error(e); + process.exit(1); + } +}; + +export class InMemoryMutex { + private lockPromise: Promise | null = null; + + async withLock(fn: () => Promise): Promise { + // Wait for any existing lock + while (this.lockPromise) { + await this.lockPromise; + } + + let resolveLock: (() => void) | undefined; + this.lockPromise = new Promise((resolve) => { + resolveLock = resolve; + }); + + try { + return await fn(); + } finally { + this.lockPromise = null; + resolveLock!(); // non-null assertion: TS now knows it's definitely assigned + } + } +} + +const registerMutex = new InMemoryMutex(); + +export const safeRegister = async (fn: () => Promise) => { + return registerMutex.withLock(async () => { + const { register } = await import('esbuild-register/dist/node'); + let res: { unregister: () => void }; + try { + const { unregister } = register(); + res = { unregister }; + } catch { + // tsx fallback + res = { + unregister: () => {}, + }; + } + // has to be outside try catch to be able to run with tsx + await assertES5(); + + const result = await fn(); + res.unregister(); + + return result; + }); +}; diff --git a/drizzle-kit/src/utils/when-json-met-bigint/index.ts b/drizzle-kit/src/utils/when-json-met-bigint/index.ts new file mode 100644 index 0000000000..8d043e45b9 --- /dev/null +++ b/drizzle-kit/src/utils/when-json-met-bigint/index.ts @@ -0,0 +1,19 @@ +// Copied from https://github.com/haoadoreorange/when-json-met-bigint +// Author: @haoadoresorange + +import type { JsonBigIntOptions } from './lib'; +import { newParse } from './parse'; +import { stringify } from './stringify'; + +const parse = newParse(); +export const JSONB = Object.assign( + (options?: JsonBigIntOptions) => { + return { + parse: newParse(options), + stringify, + }; + }, + // default options + { parse, stringify }, +); +export { parse, stringify }; diff --git a/drizzle-kit/src/utils/when-json-met-bigint/lib.ts b/drizzle-kit/src/utils/when-json-met-bigint/lib.ts new file mode 100644 index 0000000000..b382cdc8af --- /dev/null +++ b/drizzle-kit/src/utils/when-json-met-bigint/lib.ts @@ -0,0 +1,73 @@ +export const error = `error`; +export const ignore = `ignore`; +export const preserve = `preserve`; +export const CONSTRUCTOR_ACTIONS = [error, ignore, preserve] as const; +export const PROTO_ACTIONS = CONSTRUCTOR_ACTIONS; +export type JsonBigIntOptions = { + /** + * @default false + */ + errorOnBigIntDecimalOrScientific?: boolean; + /** + * @default false + */ + errorOnDuplicatedKeys?: boolean; + /** + * @default false + */ + strict?: boolean; + /** + * @default false + */ + parseBigIntAsString?: boolean; + /** + * @default false + */ + alwaysParseAsBigInt?: boolean; + /** + * @default 'preserve' + */ + protoAction?: (typeof PROTO_ACTIONS)[number]; + /** + * @default 'preserve' + */ + constructorAction?: (typeof CONSTRUCTOR_ACTIONS)[number]; +}; + +export const isNonNullObject = ( + o: unknown, +): o is Record | unknown[] => { + return o !== null && typeof o === `object`; +}; + +export class Cache { + private _cache = {} as Record; + private _size = 0; + private _old = {} as Record; + + constructor(private readonly _max = 1e6 / 2) {} + + get(key: K): V | undefined { + return this.has(key) ? this._cache[key] : undefined; + } + + set(key: K, value: V): V { + if (this._size >= this._max) { + this._old = this._cache; + this._cache = {} as Record; + this._size = 0; + } + this._cache[key] = value; + this._size++; + return value; + } + + has(key: K): boolean { + if (Object.prototype.hasOwnProperty.call(this._cache, key)) return true; + if (Object.prototype.hasOwnProperty.call(this._old, key)) { + this._cache[key] = this._old[key]; + return true; + } + return false; + } +} diff --git a/drizzle-kit/src/utils/when-json-met-bigint/parse.ts b/drizzle-kit/src/utils/when-json-met-bigint/parse.ts new file mode 100644 index 0000000000..fef3500061 --- /dev/null +++ b/drizzle-kit/src/utils/when-json-met-bigint/parse.ts @@ -0,0 +1,536 @@ +import type { JsonBigIntOptions } from './lib'; +import { Cache, CONSTRUCTOR_ACTIONS, error, ignore, isNonNullObject, preserve, PROTO_ACTIONS } from './lib'; + +const bigint = `bigint`; +const number = `number`; + +// regexpxs extracted from +// (c) BSD-3-Clause +// https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors +const SUSPECT_PROTO_RX = + /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; +const SUSPECT_CONSTRUCTOR_RX = + /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; + +const ESCAPEE = { + '"': `"`, + '\\': `\\`, + '/': `/`, + b: `\b`, + f: `\f`, + n: `\n`, + r: `\r`, + t: `\t`, +} as const; + +type StringOrNumberOrSymbol = string | number | symbol; +type SimpleSchema = + | `number` + | `bigint` + | ((n: number | bigint) => `number` | `bigint`); +type InternalSchema = + | SimpleSchema + | (InternalSchema | null)[] + | { [key: StringOrNumberOrSymbol]: InternalSchema | undefined }; +export type Schema = unknown extends T ? InternalSchema + : T extends number | number | bigint ? SimpleSchema + : T extends (infer E)[] ? (Schema | null)[] + // unknown wouldn't work for interface, have to be any, see https://github.com/microsoft/TypeScript/issues/42825 + : T extends Record ? { + [ + K in keyof T as K extends symbol ? never + // This is originally to filter out the keys that don't need + // schema, but somehow mysteriously make the compiler always omit + // keys that have generic type itself, for example: + // const f = () => { + // const sch: Schema<{ a: T, b: string }> + // } + // gives sch type {} + // It is not the type of sch extends Record. + // When trying something like this + // : Schema extends Record + // ? K | symbol + // K | symbol]?: Schema; + // the type of sch is still { b?: undefined } only. + // Meaning the key 'a' is always removed for some reason. + + // : Schema extends Record + // ? never + : K | symbol + ]?: Schema; + } + : never; + +// TODO: Infer parsed type when schema generic parameter is known +// type Parsed = S extends SchemaNumberOrBigIntOrFn +// ? number | bigint | string +// : S extends (infer E | null)[] +// ? Parsed[] +// : S extends Record +// ? { [K in keyof S as K extends symbol ? string : K]: Parsed } & Record< +// string | number | symbol, +// unknown +// > +// : any; +type JsonValue = + | { [key: string]: JsonValue } + | JsonValue[] + | string + | number + | bigint + | boolean + | null; +// Closure for internal state variables. +// Parser's internal state variables are prefixed with p_, methods are prefixed with p +export const newParse = ( + p_user_options?: JsonBigIntOptions, +): ( + text: string, + reviver?: Parameters[1] | null, + schema?: Schema, +) => ReturnType => { + // This returns a function that can parse a JSON text, producing a JavaScript + // data structure. It is a simple, recursive descent parser. It does not use + // eval or regular expressions, so it can be used as a model for implementing + // a JSON parser in other languages. + + let p_current_char_index: number, // Index of current character + p_current_char: string, // Current character + p_text: string; // Text being parsed + + // Default options. + const p_options: JsonBigIntOptions = { + errorOnBigIntDecimalOrScientific: false, + errorOnDuplicatedKeys: false, + parseBigIntAsString: false, + alwaysParseAsBigInt: false, // Toggles whether all numbers should be BigInt + protoAction: preserve, + constructorAction: preserve, + }; + + // If there are options, then use them to override the default options. + // These checks are for JS users with no type checking. + if (p_user_options) { + if ( + p_user_options.strict === true + || p_user_options.errorOnBigIntDecimalOrScientific === true + ) { + p_options.errorOnBigIntDecimalOrScientific = true; + } + if ( + p_user_options.strict === true + || p_user_options.errorOnDuplicatedKeys === true + ) { + p_options.errorOnDuplicatedKeys = true; + } + if (p_user_options.parseBigIntAsString === true) { + p_options.parseBigIntAsString = true; + } + if (p_user_options.alwaysParseAsBigInt === true) { + p_options.alwaysParseAsBigInt = true; + } + + if (p_user_options.protoAction) { + if (PROTO_ACTIONS.includes(p_user_options.protoAction)) { + p_options.protoAction = p_user_options.protoAction; + } else { + throw new Error( + // This case is possible in JS but not TS. + `Incorrect value for protoAction option, must be ${ + PROTO_ACTIONS.map( + (a) => `"${a}"`, + ).join(` or `) + } but passed ${p_user_options.protoAction}`, + ); + } + } + if (p_user_options.constructorAction) { + if (CONSTRUCTOR_ACTIONS.includes(p_user_options.constructorAction)) { + p_options.constructorAction = p_user_options.constructorAction; + } else { + throw new Error( + // This case is possible in JS but not TS. + `Incorrect value for constructorAction option, must be ${ + CONSTRUCTOR_ACTIONS.map( + (a) => `"${a}"`, + ).join(` or `) + } but passed ${p_user_options.constructorAction}`, + ); + } + } + } + + const pError = (m: string) => { + // Call error when something is wrong. + throw { + name: `SyntaxError`, + message: m, + at: p_current_char_index, + text: p_text, + }; + }; + const pCurrentCharIs = (c: string) => { + // Verify that it matches the current character. + if (c !== p_current_char) { + return pError(`Expected '` + c + `' instead of '` + p_current_char + `'`); + } + }; + const pNext = (c?: string) => { + // Get the next character. When there are no more characters, + // return the empty string. + p_current_char = p_text.charAt(++p_current_char_index); + // If a c parameter is provided, verify that it matches the next character. + if (c) pCurrentCharIs(c); + return p_current_char; + }; + const pSkipWhite = () => { + // Skip whitespace. + while (p_current_char && p_current_char <= ` `) { + pNext(); + } + }; + + const pObject = (schema?: InternalSchema) => { + // Parse an object value. + + const result = ( + p_options.protoAction === preserve ? Object.create(null) : {} + ) as Record; + + if (p_current_char === `{`) { + pNext(); + pSkipWhite(); + // @ts-expect-error next() change ch + if (p_current_char === `}`) { + pNext(); + return result; // empty object + } + while (p_current_char) { + const key = pString(); + const sub_schema = isNonNullObject(schema) && !Array.isArray(schema) + ? schema[key] || schema[Symbol.for(`any`)] + : undefined; + pSkipWhite(); + pCurrentCharIs(`:`); + pNext(); + if ( + p_options.errorOnDuplicatedKeys === true + && Object.hasOwnProperty.call(result, key) + ) { + pError(`Duplicate key "${key}"`); + } + + if (SUSPECT_PROTO_RX.test(key) === true) { + if (p_options.protoAction === error) { + pError(`Object contains forbidden prototype property`); + } else if (p_options.protoAction === ignore) { + pJsonValue(); + } else { + result[key] = pJsonValue(sub_schema); + } + } else if (SUSPECT_CONSTRUCTOR_RX.test(key) === true) { + if (p_options.constructorAction === error) { + pError(`Object contains forbidden constructor property`); + } else if (p_options.constructorAction === ignore) { + pJsonValue(); + } else { + result[key] = pJsonValue(sub_schema); + } + } else { + result[key] = pJsonValue(sub_schema); + } + + pSkipWhite(); + // @ts-expect-error next() change ch + if (p_current_char === `}`) { + pNext(); + if (p_options.protoAction === preserve) { + Object.setPrototypeOf(result, Object.prototype); + } + return result; + } + pCurrentCharIs(`,`); + pNext(); + pSkipWhite(); + } + } + return pError(`Bad object`); + }; + + const pArray = (schema?: InternalSchema) => { + // Parse an array value. + + const result: JsonValue[] = []; + + if (p_current_char === `[`) { + pNext(); + pSkipWhite(); + // @ts-expect-error next() change ch. + if (p_current_char === `]`) { + pNext(); + return result; // empty array + } + const is_array = Array.isArray(schema); + const is_tuple_like = is_array && schema.length > 1; + while (p_current_char) { + result.push( + pJsonValue( + (is_tuple_like + ? schema[result.length] + : is_array + ? schema[0] + : undefined) as undefined, // It's ok to cast null to undefined + ), + ); + pSkipWhite(); + // @ts-expect-error next() change ch + if (p_current_char === `]`) { + pNext(); + return result; + } + pCurrentCharIs(`,`); + pNext(); + pSkipWhite(); + } + } + return pError(`Bad array`); + }; + + const pString = () => { + // Parse a string value. + + let result = ``; + + // When parsing for string values, we must look for " and \ characters. + + if (p_current_char === `"`) { + let start_at = p_current_char_index + 1; + while (pNext()) { + if (p_current_char === `"`) { + if (p_current_char_index > start_at) { + result += p_text.substring(start_at, p_current_char_index); + } + pNext(); + return result; + } + if (p_current_char === `\\`) { + if (p_current_char_index > start_at) { + result += p_text.substring(start_at, p_current_char_index); + } + pNext(); + if (p_current_char === `u`) { + let uffff = 0; + for (let i = 0; i < 4; i += 1) { + const hex = parseInt(pNext(), 16); + if (!isFinite(hex)) { + break; + } + uffff = uffff * 16 + hex; + } + result += String.fromCharCode(uffff); + } else if (typeof ESCAPEE[p_current_char] === `string`) { + result += ESCAPEE[p_current_char]; + } else { + break; + } + start_at = p_current_char_index + 1; + } + } + } + return pError(`Bad string`); + }; + + const pNumber = (() => { + // TODO: Add test + const cache = new Cache< + string, + Map + >(); + return (schema?: SimpleSchema | null) => { + // Parse a number value. + + let result_string = ``; + let is_positive = true; // for Infinity + + if (p_current_char === `-`) { + result_string = p_current_char; + is_positive = false; + pNext(); + } + if (p_current_char === `0`) { + result_string += p_current_char; + pNext(); + if (p_current_char >= `0` && p_current_char <= `9`) { + pError(`Bad number`); + } + } + while (p_current_char >= `0` && p_current_char <= `9`) { + result_string += p_current_char; + pNext(); + } + if (p_current_char === `.`) { + result_string += p_current_char; + while (pNext() && p_current_char >= `0` && p_current_char <= `9`) { + result_string += p_current_char; + } + } + if (p_current_char === `e` || p_current_char === `E`) { + result_string += p_current_char; + pNext(); + // @ts-expect-error next() change ch + if (p_current_char === `-` || p_current_char === `+`) { + result_string += p_current_char; + pNext(); + } + while (p_current_char >= `0` && p_current_char <= `9`) { + result_string += p_current_char; + pNext(); + } + } + const raw_schema = schema; + const cache_string = cache.get(result_string); + if (!cache_string || !cache_string.has(raw_schema)) { + const cache_schema = cache_string || cache.set(result_string, new Map()); + const result_number = Number(result_string); + if (Number.isNaN(result_number)) { + cache_schema.set(raw_schema, NaN); + } else if (!Number.isFinite(result_number)) { + cache_schema.set(raw_schema, is_positive ? Infinity : -Infinity); + } else { + // Decimal or scientific notation + // cannot be BigInt, aka BigInt("1.79e+308") will throw. + const is_decimal_or_scientific = /[.eE]/.test(result_string); + if (Number.isSafeInteger(result_number) || is_decimal_or_scientific) { + if (typeof schema === `function`) schema = schema(result_number); + cache_schema.set( + raw_schema, + schema === number + || (!p_options.alwaysParseAsBigInt && schema !== bigint) + || (is_decimal_or_scientific + && !p_options.errorOnBigIntDecimalOrScientific) + ? result_number + : is_decimal_or_scientific + ? pError(`Decimal and scientific notation cannot be bigint`) + : BigInt(result_string), + ); + } else { + let result_bigint; + if (typeof schema === `function`) { + result_bigint = BigInt(result_string); + schema = schema(result_bigint); + } + if (schema === number) cache_schema.set(raw_schema, result_number); + else { + cache_schema.set( + raw_schema, + p_options.parseBigIntAsString + ? result_string + : result_bigint || BigInt(result_string), + ); + } + } + } + } + const result = cache.get(result_string)!.get(raw_schema)!; // Cannot be undefined + return Number.isNaN(result) ? pError(`Bad number`) : result; + }; + })(); + + const pBooleanOrNull = () => { + // true, false, or null. + switch (p_current_char) { + case `t`: + pNext(`r`); + pNext(`u`); + pNext(`e`); + pNext(); + return true; + case `f`: + pNext(`a`); + pNext(`l`); + pNext(`s`); + pNext(`e`); + pNext(); + return false; + case `n`: + pNext(`u`); + pNext(`l`); + pNext(`l`); + pNext(); + return null; + } + return pError(`Unexpected '${p_current_char}'`); + }; + + const pJsonValue = (schema?: InternalSchema): JsonValue => { + // Parse a JSON value. It could be an object, an array, a string, a number, + // or boolean or null. + + pSkipWhite(); + switch (p_current_char) { + case `{`: + return pObject(schema); + case `[`: + return pArray(schema); + case `"`: + return pString(); + case `-`: + return pNumber(schema as SimpleSchema); + default: + return p_current_char >= `0` && p_current_char <= `9` + ? pNumber(schema as SimpleSchema) + : pBooleanOrNull(); + } + }; + + // Return the parse function. + return (text, reviver, schema) => { + // Reset state. + p_current_char_index = -1; // next char will begin at 0 + p_current_char = ` `; + p_text = String(text); + + const result = pJsonValue(schema); + pSkipWhite(); + if (p_current_char) { + pError(`Syntax error`); + } + + // If there is a reviver function, we recursively walk the new structure, + // passing each name/value pair to the reviver function for possible + // transformation, starting with a temporary root object that holds the result + // in an empty key. If there is not a reviver function, we simply return the + // result. + + if (typeof reviver === `function`) { + return (function walk( + object_or_array: Record | JsonValue[], + key: string, + ) { + // @ts-expect-error index array with string + const value = object_or_array[key] as JsonValue; + if (isNonNullObject(value)) { + const revived_keys = new Set(); + for (const reviving_key in value) { + const next_object_or_array = !Array.isArray(value) + ? { ...value } + : [...value]; + // @ts-expect-error index array with string + revived_keys.forEach((rk) => delete next_object_or_array[rk]); + const v = walk(next_object_or_array, reviving_key); + revived_keys.add(reviving_key); + if (v !== undefined) { + // @ts-expect-error index array with string + value[reviving_key] = v; + } else { + // @ts-expect-error index array with string + delete value[reviving_key]; + } + } + } + return reviver.call(object_or_array, key, value); + })({ '': result }, ``) as JsonValue; + } + return result; + }; +}; diff --git a/drizzle-kit/src/utils/when-json-met-bigint/stringify.ts b/drizzle-kit/src/utils/when-json-met-bigint/stringify.ts new file mode 100644 index 0000000000..34ce48d7ce --- /dev/null +++ b/drizzle-kit/src/utils/when-json-met-bigint/stringify.ts @@ -0,0 +1,215 @@ +import { Cache, isNonNullObject } from './lib'; + +const isNonNullObjectWithToJSOnImplemented = ( + o: T, +): o is T & { toJSON: (key?: string) => unknown } => isNonNullObject(o) && typeof (o as any).toJSON === `function`; + +// Number -> number & String -> string +const toPrimitive = (o: number | string | T) => + o instanceof Number ? Number(o) : o instanceof String ? String(o) : o; // oxlint-disable-line no-instanceof-builtins drizzle-internal/no-instanceof + +const quote = (() => { + const ESCAPABLE = + // eslint-disable-next-line no-control-regex, no-misleading-character-class + /[\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g; + const META = { + // Table of character substitutions. + '\b': `\\b`, + '\t': `\\t`, + '\n': `\\n`, + '\f': `\\f`, + '\r': `\\r`, + '"': `\\"`, + '\\': `\\\\`, + } as const; + + const cache = new Cache(); + return (s: string) => { + if (!cache.has(s)) { + // If the string contains no control characters, no quote characters, and no + // backslash characters, then we can safely slap some quotes around it. + // Otherwise we must also replace the offending characters with safe escape + // sequences. + ESCAPABLE.lastIndex = 0; + cache.set( + s, + ESCAPABLE.test(s) + ? `"` + + s.replace(ESCAPABLE, function(a) { + const c = META[a as keyof typeof META]; + return typeof c === `string` + ? c + : `\\u` + (`0000` + a.charCodeAt(0).toString(16)).slice(-4); + }) + + `"` + : `"` + s + `"`, + ); + } + return cache.get(s)!; // Cannot be undefined + }; +})(); + +type ReplacerFn = (this: any, key: string, value: any) => any; +type Stringified = V extends symbol | Function ? undefined + : ReturnType; +type Stringify = ( + value: V, + replacer?: (number | number | string | string)[] | ReplacerFn | null, + space?: Parameters[2] | number | string, + n?: boolean, +) => Stringified; +// Closure for internal state variables. +// Serializer's internal state variables are prefixed with s_, methods are prefixed with s. +export const stringify = ((): Stringify => { + // This immediately invoked function returns a function that stringify JS + // data structure. + + // Original spec use stack, but stack is slow and not necessary in this case + // use Set instead + const stack = new Set(); + let indent: string; // current indentation + let gap: string; // JSON indentation string + let sReplacer: ReplacerFn | null | undefined; + const s_replacer = new Set(); + + const sStringify = | unknown[]>( + object_or_array: T, + key_or_index: T extends Record ? keyof T : number, + delim: string, + n?: boolean, + ): string | undefined => { + // Produce a string from object_or_array[key_or_index]. + + // @ts-expect-error index array with string + let value = object_or_array[key_or_index] as unknown; + + // If the value has toJSON method, call it. + if (isNonNullObjectWithToJSOnImplemented(value)) { + value = value.toJSON(); + } + + // If we were called with a replacer function, then call the replacer to + // obtain a replacement value. + if (typeof sReplacer === `function`) { + value = sReplacer.call(object_or_array, key_or_index.toString(), value); + } + + // What happens next depends on the value's type. + switch (typeof value) { + case `string`: + return quote(value); + case `number`: + // JSON numbers must be finite. Encode non-finite numbers as null. + return Number.isFinite(value) ? value.toString() : `null`; + case `boolean`: + case `bigint`: + return n ? `${value.toString()}n` : value.toString(); + case `object`: { + // If the type is 'object', we might be dealing with an object + // or an array or null. + // Due to a specification blunder in ECMAScript, typeof null is 'object', + // so watch out for that case. + + if (!value) { + return `null`; + } + + if (stack.has(value)) throw new TypeError(`cyclic object value`); + stack.add(value); + const last_gap = indent; // stepback + indent += gap; + + if (Array.isArray(value)) { + // Make an array to hold the partial results of stringifying this object value. + // The value is an array. Stringify every element. Use null as a placeholder + // for non-JSON values. + const partial = value.map( + (_v_, i) => sStringify(value as unknown[], i, delim, n) || `null`, + ); + + // Join all of the elements together, separated with commas, and wrap them in + // brackets. + const result = partial.length === 0 + ? `[]` + : indent + ? `[\n` + + indent + + partial.join(`${delim}\n` + indent) + + `\n` + + last_gap + + `]` + : `[` + partial.join(delim) + `]`; + stack.delete(value); + indent = last_gap; + return result; + } + + const partial: string[] = []; + (s_replacer.size > 0 ? s_replacer : Object.keys(value)).forEach( + (key) => { + const v = sStringify(value as Record, key, delim, n); + if (v) { + partial.push(quote(key) + (gap ? `: ` : `:`) + v); + } + }, + ); + + // Join all of the member texts together, separated with commas, + // and wrap them in braces. + const result = partial.length === 0 + ? `{}` + : indent + ? `{\n` + + indent + + partial.join(`${delim}\n` + indent) + + `\n` + + last_gap + + `}` + : `{` + partial.join(delim) + `}`; + stack.delete(value); + indent = last_gap; + return result; + } + } + }; + + // Return the stringify function. + return (value, replacer, space, n) => { + value = toPrimitive(value) as typeof value; + // Reset state. + stack.clear(); + + indent = ``; + // If the space parameter is a number, make an indent string containing that + // many spaces. + // If the space parameter is a string, it will be used as the indent string. + const primitive_space = toPrimitive(space); + gap = typeof primitive_space === `number` && primitive_space > 0 + ? Array.from({ length: primitive_space + 1 }).join(` `) + : typeof primitive_space !== `string` + ? `` + : primitive_space.length > 10 + ? primitive_space.slice(0, 10) + : primitive_space; + + s_replacer.clear(); + if (Array.isArray(replacer)) { + sReplacer = null; + if (isNonNullObject(value)) { + replacer.forEach((e) => { + const key = toPrimitive(e); + if (typeof key === `string` || typeof key === `number`) { + const key_string = key.toString(); + if (!s_replacer.has(key_string)) s_replacer.add(key_string); + } + }); + } + } else sReplacer = replacer; + + // Make a fake root object containing our value under the key of ''. + // Return the result of stringifying the value. + // Cheating here, JSON.stringify can return undefined but overloaded types + // are not seen here so we cast to string to satisfy tsc + return sStringify({ '': value }, ``, ',', n) as Stringified; + }; +})(); diff --git a/drizzle-kit/src/utils/words.ts b/drizzle-kit/src/utils/words.ts index b0c686659d..9fa828da11 100644 --- a/drizzle-kit/src/utils/words.ts +++ b/drizzle-kit/src/utils/words.ts @@ -1,23 +1,9 @@ -import type { Prefix } from '../cli/validations/common'; +import { prepareSnapshotFolderName } from 'src/cli/commands/generate-common'; export const prepareMigrationMetadata = ( - idx: number, - prefixMode: Prefix, name?: string, ) => { - const prefix = prefixMode === 'index' - ? idx.toFixed(0).padStart(4, '0') - : prefixMode === 'timestamp' || prefixMode === 'supabase' - ? new Date() - .toISOString() - .replace('T', '') - .replaceAll('-', '') - .replaceAll(':', '') - .slice(0, 14) - : prefixMode === 'unix' - ? Math.floor(Date.now() / 1000) - : ''; - + const prefix = prepareSnapshotFolderName(); const suffix = name || `${adjectives.random()}_${heroes.random()}`; const tag = `${prefix}_${suffix}`; return { prefix, suffix, tag }; diff --git a/drizzle-kit/tests/bin.test.ts b/drizzle-kit/tests/bin.test.ts deleted file mode 100644 index 8264966767..0000000000 --- a/drizzle-kit/tests/bin.test.ts +++ /dev/null @@ -1,58 +0,0 @@ -import chalk from 'chalk'; -import { assert, test } from 'vitest'; -import { analyzeImports, ChainLink } from '../imports-checker/checker'; - -test('imports-issues', () => { - const issues = analyzeImports({ - basePath: '.', - localPaths: ['src'], - whiteList: [ - '@drizzle-team/brocli', - 'json-diff', - 'path', - 'fs', - 'fs/*', - 'url', - 'zod', - 'node:*', - 'hono', - 'glob', - 'hono/*', - 'hono/**/*', - '@hono/*', - 'crypto', - 'hanji', - 'chalk', - 'dotenv/config', - 'camelcase', - 'semver', - 'env-paths', - ], - entry: 'src/cli/index.ts', - logger: true, - ignoreTypes: true, - }).issues; - - const chainToString = (chains: ChainLink[]) => { - if (chains.length === 0) throw new Error(); - - let out = chains[0]!.file + '\n'; - let indentation = 0; - for (let chain of chains) { - out += ' '.repeat(indentation) - + '└' - + chain.import - + ` ${chalk.gray(chain.file)}\n`; - indentation += 1; - } - return out; - }; - - console.log(); - for (const issue of issues) { - console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); - console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); - } - - assert.equal(issues.length, 0); -}); diff --git a/drizzle-kit/tests/cockroach/array.test.ts b/drizzle-kit/tests/cockroach/array.test.ts new file mode 100644 index 0000000000..0f75469637 --- /dev/null +++ b/drizzle-kit/tests/cockroach/array.test.ts @@ -0,0 +1,264 @@ +import { + bigint, + boolean, + cockroachEnum, + cockroachTable, + date, + int4, + text, + timestamp, + uuid, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('array #1: empty array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: int4('values').array().default([]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['public'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + + const st0 = [`ALTER TABLE "test" ADD COLUMN "values" int4[] DEFAULT '{}'::int4[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #2: int4 array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: int4('values').array().default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE \"test\" ADD COLUMN \"values\" int4[] DEFAULT '{1,2,3}'::int4[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #3: bigint array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: bigint('values', { mode: 'bigint' }).array().default([BigInt(1), BigInt(2), BigInt(3)]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE \"test\" ADD COLUMN \"values\" int8[] DEFAULT '{1,2,3}'::int8[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #4: boolean array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: boolean('values').array().default([true, false, true]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE \"test\" ADD COLUMN \"values\" bool[] DEFAULT '{true,false,true}'::bool[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #6: date array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: date('values').array().default(['2024-08-06', '2024-08-07']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" date[] DEFAULT \'{2024-08-06,2024-08-07}\'::date[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #7: timestamp array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: timestamp('values').array().default([new Date('2024-08-06'), new Date('2024-08-07')]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" timestamp[] DEFAULT \'{"2024-08-06 00:00:00.000","2024-08-07 00:00:00.000"}\'::timestamp[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #9: text array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: text('values').array().default(['abc', 'def']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['ALTER TABLE "test" ADD COLUMN "values" string[] DEFAULT \'{abc,def}\'::string[];']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #10: uuid array default', async ({ dbc: db }) => { + const from = { + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + test: cockroachTable('test', { + id: int4('id'), + values: uuid('values').array().default([ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', + 'b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11', + ]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" uuid[] DEFAULT \'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11}\'::uuid[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #11: enum array default', async ({ dbc: db }) => { + const testEnum = cockroachEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + enum: testEnum, + test: cockroachTable('test', { + id: int4('id'), + values: testEnum('values').array().default(['a', 'b', 'c']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" "test_enum"[] DEFAULT \'{a,b,c}\'::"test_enum"[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('array #12: enum empty array default', async ({ dbc: db }) => { + const testEnum = cockroachEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: cockroachTable('test', { + id: int4('id'), + }), + }; + const to = { + enum: testEnum, + test: cockroachTable('test', { + id: int4('id'), + values: testEnum('values').array().default([]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['ALTER TABLE "test" ADD COLUMN "values" "test_enum"[] DEFAULT \'{}\'::"test_enum"[];']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/checks.test.ts b/drizzle-kit/tests/cockroach/checks.test.ts new file mode 100644 index 0000000000..28581bff2e --- /dev/null +++ b/drizzle-kit/tests/cockroach/checks.test.ts @@ -0,0 +1,219 @@ +import { sql } from 'drizzle-orm'; +import { check, cockroachTable, int4, varchar } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('create table with check', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"age" int4,\n\tCONSTRAINT "some_check_name" CHECK ("users"."age" > 21)\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add check contraint to existing table', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + age: int4('age'), + }), + }; + + const to = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("users"."age" > 21);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop check contraint in existing table', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: cockroachTable('users', { + age: int4('age'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename check constraint', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, + `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 21);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter check constraint', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: cockroachTable('users', { + age: int4('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 10`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP CONSTRAINT "some_check_name";', + 'ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("users"."age" > 10);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +test.concurrent('alter multiple check constraints', async ({ dbc: db }) => { + const from = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + age: int4('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_1', sql`${table.age} > 21`), + check('some_check_name_2', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const to = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + age: int4('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_3', sql`${table.age} > 21`), + check('some_check_name_4', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_1";`, + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_2";`, + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_3" CHECK ("users"."age" > 21);`, + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_4" CHECK ("users"."name" != \'Alex\');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create checks with same names', async ({ dbc: db }) => { + const to = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + age: int4('age'), + name: varchar('name'), + }, + ( + table, + ) => [check('some_check_name', sql`${table.age} > 21`), check('some_check_name', sql`${table.name} != 'Alex'`)], + ), + }; + + // 'constraint_name_duplicate' + await expect(diff({}, to, [])).rejects.toThrow(); + // adding only CONSTRAINT "some_check_name" CHECK ("users"."age" > 21), not throwing error + await expect(push({ db, to })).rejects.toThrow(); +}); + +test.concurrent('db has checks. Push with same names', async ({ dbc: db }) => { + const schema1 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + values: int4('values').default(1), + }, (table) => [check('some_check', sql`${table.values} < 100`)]), + }; + const schema2 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + values: int4('values').default(1), + }, (table) => [check('some_check', sql`${table.values} > 100`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "test" DROP CONSTRAINT "some_check";', + 'ALTER TABLE "test" ADD CONSTRAINT "some_check" CHECK ("test"."values" > 100);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/cockroach/columns-without-tx.test.ts b/drizzle-kit/tests/cockroach/columns-without-tx.test.ts new file mode 100644 index 0000000000..bc9853feb2 --- /dev/null +++ b/drizzle-kit/tests/cockroach/columns-without-tx.test.ts @@ -0,0 +1,35 @@ +import { cockroachTable, int4, primaryKey } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('with composite pks #2', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id1: int4('id1'), + id2: int4('id2'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id1: int4('id1').notNull(), + id2: int4('id2').notNull(), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id1" SET NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id2" SET NOT NULL;', + 'ALTER TABLE "users" ADD CONSTRAINT "compositePK" PRIMARY KEY("id1","id2");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/columns.test.ts b/drizzle-kit/tests/cockroach/columns.test.ts new file mode 100644 index 0000000000..971ccb8b93 --- /dev/null +++ b/drizzle-kit/tests/cockroach/columns.test.ts @@ -0,0 +1,1006 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + bit, + bool, + boolean, + char, + cockroachEnum, + cockroachSchema, + cockroachTable, + date, + decimal, + doublePrecision, + float, + index, + int2, + int4, + int8, + interval, + jsonb, + numeric, + primaryKey, + real, + smallint, + string, + text, + time, + timestamp, + uniqueIndex, + uuid, + varbit, + varchar, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('add columns #1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = ['ALTER TABLE "users" ADD COLUMN "name" string;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add columns #2', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "users" ADD COLUMN "name" string;', + 'ALTER TABLE "users" ADD COLUMN "email" string;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column conflict duplicate name #1', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + name: varchar('name', { length: 100 }).primaryKey(), + email: text('name'), + }), + }; + + await push({ to: schema1, db }); + + await expect(diff(schema1, schema2, [])).rejects.toThrowError(); // duplicate names in columns + await expect(push({ to: schema2, db })).rejects.toThrowError(); // duplicate names in columns +}); + +test.concurrent('alter column change name #1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users.name->public.users.name1', + ], + }); + + const st0 = ['ALTER TABLE "users" RENAME COLUMN "name" TO "name1";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter column change name #2', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name1'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users.name->public.users.name1', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME COLUMN "name" TO "name1";', + 'ALTER TABLE "users" ADD COLUMN "email" string;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter table add composite pk', async ({ dbc: db }) => { + const schema1 = { + table: cockroachTable('table', { + id1: int4('id1').notNull(), + id2: int4('id2').notNull(), + }), + }; + + const schema2 = { + table: cockroachTable('table', { + id1: int4('id1').notNull(), + id2: int4('id2').notNull(), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff( + schema1, + schema2, + [], + ); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = ['ALTER TABLE "table" ADD PRIMARY KEY ("id1","id2");']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename table rename column #1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + }), + }; + + const schema2 = { + users: cockroachTable('users1', { + id: int4('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users->public.users1', + 'public.users1.id->public.users1.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users->public.users1', + 'public.users1.id->public.users1.id1', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users1";', + 'ALTER TABLE "users1" RENAME COLUMN "id" TO "id1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('with composite pks #1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id1: int4('id1'), + id2: int4('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id1: int4('id1'), + id2: int4('id2'), + text: text('text'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = ['ALTER TABLE "users" ADD COLUMN "text" string;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('with composite pks #3', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable( + 'users', + { + id1: int4('id1'), + id2: int4('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: cockroachTable('users', { + id1: int4('id1'), + id3: int4('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' })]), + }; + + const renames = ['public.users.id2->public.users.id3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0 = ['ALTER TABLE "users" RENAME COLUMN "id2" TO "id3";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create composite primary key', async ({ dbc: db }) => { + const schema1 = {}; + + const schema2 = { + table: cockroachTable('table', { + col1: int4('col1').notNull(), + col2: int4('col2').notNull(), + }, (t) => [primaryKey({ + columns: [t.col1, t.col2], + })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst, losses } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE "table" (\n\t"col1" int4,\n\t"col2" int4,\n\tCONSTRAINT "table_pkey" PRIMARY KEY("col1","col2")\n);\n', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add multiple constraints #1', async ({ dbc: db }) => { + const t1 = cockroachTable('t1', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t2 = cockroachTable('t2', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t3 = cockroachTable('t3', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const schema1 = { + t1, + t2, + t3, + ref1: cockroachTable('ref1', { + id1: uuid('id1').references(() => t1.id), + id2: uuid('id2').references(() => t2.id), + id3: uuid('id3').references(() => t3.id), + }), + }; + + const schema2 = { + t1, + t2, + t3, + ref1: cockroachTable('ref1', { + id1: uuid('id1').references(() => t1.id, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t2.id, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t3.id, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id1_t1_id_fkey", ADD CONSTRAINT "ref1_id1_t1_id_fkey" FOREIGN KEY ("id1") REFERENCES "t1"("id") ON DELETE CASCADE;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id2_t2_id_fkey", ADD CONSTRAINT "ref1_id2_t2_id_fkey" FOREIGN KEY ("id2") REFERENCES "t2"("id") ON DELETE SET NULL;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id3_t3_id_fkey", ADD CONSTRAINT "ref1_id3_t3_id_fkey" FOREIGN KEY ("id3") REFERENCES "t3"("id") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add multiple constraints #2', async ({ dbc: db }) => { + const t1 = cockroachTable('t1', { + id1: uuid('id1').unique(), + id2: uuid('id2').unique(), + id3: uuid('id3').unique(), + }); + + const schema1 = { + t1, + ref1: cockroachTable('ref1', { + id1: uuid('id1').references(() => t1.id1), + id2: uuid('id2').references(() => t1.id2), + id3: uuid('id3').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: cockroachTable('ref1', { + id1: uuid('id1').references(() => t1.id1, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t1.id2, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id1_t1_id1_fkey", ADD CONSTRAINT "ref1_id1_t1_id1_fkey" FOREIGN KEY ("id1") REFERENCES "t1"("id1") ON DELETE CASCADE;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id2_t1_id2_fkey", ADD CONSTRAINT "ref1_id2_t1_id2_fkey" FOREIGN KEY ("id2") REFERENCES "t1"("id2") ON DELETE SET NULL;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id3_t1_id3_fkey", ADD CONSTRAINT "ref1_id3_t1_id3_fkey" FOREIGN KEY ("id3") REFERENCES "t1"("id3") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add multiple constraints #3', async ({ dbc: db }) => { + const t1 = cockroachTable('t1', { + id1: uuid('id1').unique(), + id2: uuid('id2').unique(), + id3: uuid('id3').unique(), + }); + + const schema1 = { + t1, + ref1: cockroachTable('ref1', { + id: uuid('id').references(() => t1.id1), + }), + ref2: cockroachTable('ref2', { + id: uuid('id').references(() => t1.id2), + }), + ref3: cockroachTable('ref3', { + id: uuid('id').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: cockroachTable('ref1', { + id: uuid('id').references(() => t1.id1, { onDelete: 'cascade' }), + }), + ref2: cockroachTable('ref2', { + id: uuid('id').references(() => t1.id2, { onDelete: 'set null' }), + }), + ref3: cockroachTable('ref3', { + id: uuid('id').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id_t1_id1_fkey", ADD CONSTRAINT "ref1_id_t1_id1_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id1") ON DELETE CASCADE;', + 'ALTER TABLE "ref2" DROP CONSTRAINT "ref2_id_t1_id2_fkey", ADD CONSTRAINT "ref2_id_t1_id2_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id2") ON DELETE SET NULL;', + 'ALTER TABLE "ref3" DROP CONSTRAINT "ref3_id_t1_id3_fkey", ADD CONSTRAINT "ref3_id_t1_id3_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id3") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('varchar and text default values escape single quotes', async ({ dbc: db }) => { + const schema1 = { + table: cockroachTable('table', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + table: cockroachTable('table', { + id: int4('id').primaryKey(), + text: text('text').default("escape's quotes"), + varchar: varchar('varchar').default("escape's quotes"), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE "table" ADD COLUMN "text" string DEFAULT e'escape\\'s quotes';`, + `ALTER TABLE "table" ADD COLUMN "varchar" varchar DEFAULT e'escape\\'s quotes';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add columns with defaults', async ({ dbc: db }) => { + const schema1 = { + table: cockroachTable('table', { + id: int4().primaryKey(), + }), + }; + + const schema2 = { + table: cockroachTable('table', { + id: int4().primaryKey(), + text1: text().default(''), + text2: string({ length: 100 }).default('text'), + int1: int4().default(10), + int2: int4().default(0), + int3: int4().default(-10), + bool1: bool().default(true), + bool2: bool().default(false), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "text1" string DEFAULT \'\';', + 'ALTER TABLE "table" ADD COLUMN "text2" string(100) DEFAULT \'text\';', + 'ALTER TABLE "table" ADD COLUMN "int1" int4 DEFAULT 10;', + 'ALTER TABLE "table" ADD COLUMN "int2" int4 DEFAULT 0;', + 'ALTER TABLE "table" ADD COLUMN "int3" int4 DEFAULT -10;', + 'ALTER TABLE "table" ADD COLUMN "bool1" bool DEFAULT true;', + 'ALTER TABLE "table" ADD COLUMN "bool2" bool DEFAULT false;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // TODO: check for created tables, etc +}); + +test.concurrent('add array column - empty array default', async ({ dbc: db }) => { + const schema1 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + }), + }; + const schema2 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + values: int4('values').array().default([]), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'ALTER TABLE "test" ADD COLUMN "values" int4[] DEFAULT \'{}\'::int4[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add array column - default', async ({ dbc: db }) => { + const schema1 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + }), + }; + const schema2 = { + test: cockroachTable('test', { + id: int4('id').primaryKey(), + values: int4('values').array().default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'ALTER TABLE "test" ADD COLUMN "values" int4[] DEFAULT \'{1,2,3}\'::int4[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add not null to a column', async ({ db }) => { + const schema1 = { + users: cockroachTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .defaultNow() + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => [uniqueIndex('User_email_key').on(table.email)], + ), + }; + + const schema2 = { + users: cockroachTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .defaultNow() + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => [uniqueIndex('User_email_key').on(table.email)], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, losses } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // TODO: revise should I use suggestion func? + // const { losses, hints } = await suggestions(db, statements); + + expect(losses).toStrictEqual([]); +}); + +test.concurrent('add not null to a column with null data. Should rollback', async ({ db }) => { + const schema1 = { + users: cockroachTable('User', { + id: text('id').primaryKey(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date' }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }).defaultNow().notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }).notNull().$onUpdate(() => new Date()), + }, (table) => [uniqueIndex('User_email_key').on(table.email)]), + }; + + const schema2 = { + users: cockroachTable('User', { + id: text('id').primaryKey(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date' }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }).defaultNow().notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }).notNull().$onUpdate(() => new Date()), + }, (table) => [uniqueIndex('User_email_key').on(table.email)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.query(`INSERT INTO "User" (id, email, "updatedAt") values ('str', 'email@gmail', '2025-04-29 09:20:39');`); + const { sqlStatements: pst, hints } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(hints).toStrictEqual([]); +}); + +test.concurrent('add generated column', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + }), + }; + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name") STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add generated constraint to an existing column', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name") STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop generated constraint from a column', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), + }), + }; + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('no diffs for all database types', async ({ dbc: db }) => { + const customSchema = cockroachSchema('schemass'); + + const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); + + const enumname = cockroachEnum('enumname', ['three', 'two', 'one']); + + const schema1 = { + test: cockroachEnum('test', ['ds']), + testHello: cockroachEnum('test_hello', ['ds']), + enumname: cockroachEnum('enumname', ['three', 'two', 'one']), + + customSchema: customSchema, + transactionStatusEnum: customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']), + + allSmallSerials: cockroachTable('schema_test', { + columnAll: uuid('column_all').defaultRandom(), + column: transactionStatusEnum('column').notNull(), + }), + + allSmallInts: customSchema.table( + 'schema_test2', + { + columnAll: smallint('column_all').default(124).notNull(), + column: smallint('columns').array(), + column2: smallint('column2').array(), + }, + (t: any) => [uniqueIndex('testdfds').on(t.column)], + ), + + allInt2: customSchema.table( + 'all_int2', + { + columnAll: int2('column_all').default(124).notNull(), + column: int2('columns').array(), + column2: int2('column2').array(), + }, + ), + + allEnums: customSchema.table( + 'all_enums', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns'), + }, + (t: any) => [index('ds').on(t.column)], + ), + + allTimestamps: customSchema.table('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), + }), + + allUuids: customSchema.table('all_uuids', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), + }), + + allDates: customSchema.table('all_dates', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), + }), + + allReals: customSchema.table('all_reals', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), + }), + allBigints: cockroachTable('all_bigints', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), + column1: int8('column1', { mode: 'number' }), + column2: int8('column2', { mode: 'bigint' }), + }), + + allIntervals: customSchema.table('all_intervals', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), + }), + + allTexts: customSchema.table( + 'all_texts', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + }, + (t: any) => [index('test').on(t.column)], + ), + + allStrings: customSchema.table( + 'all_strings', + { + columnAll: string('column_all').default('text').notNull(), + column: string('columns').primaryKey(), + column2: string('column2', { length: 200 }), + }, + ), + allBools: customSchema.table('all_bools', { + column1: bool('column1').default(true).notNull(), + column2: bool('column2'), + column3: boolean('column3').default(true).notNull(), + column4: boolean('column4'), + column5: bool('column5').default(true).notNull().array(), + column6: bool('column6').array(), + column7: boolean('column7').default(true).notNull().array(), + column8: boolean('column8').array(), + }), + + allVarchars: customSchema.table('all_varchars', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), + }), + + allTimes: customSchema.table('all_times', { + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), + }), + + allChars: customSchema.table('all_chars', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), + columnArr: char('column_arr', { length: 1 }).array(), + }), + allDoublePrecision: customSchema.table('all_double_precision', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), + }), + + allFloat: customSchema.table('all_float', { + columnAll: float('column_all').default(33).notNull(), + column: float('column'), + }), + allJsonb: customSchema.table('all_jsonb', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), + }), + + allIntegers: customSchema.table('all_integers', { + columnAll: int4('column_all').primaryKey(), + column: int4('column'), + columnPrimary: int4('column_primary'), + }), + + allNumerics: customSchema.table('all_numerics', { + columnAll: numeric('column_all').default('32').notNull(), + column: numeric('column', { precision: 1, scale: 1 }), + columnPrimary: numeric('column_primary').primaryKey().notNull(), + }), + + allDecimals: customSchema.table('all_decimals', { + columnAll: decimal('column_all').default('32').notNull(), + column: decimal('column', { precision: 1, scale: 1 }), + columnPrimary: decimal('column_primary').primaryKey().notNull(), + }), + + allBits: customSchema.table('all_bits', { + column1: bit('column1').default('1').notNull(), + column2: bit('column2', { length: 10 }), + column3: bit('column3').default('1').notNull().array(), + column4: bit('column4', { length: 10 }).array(), + column5: varbit('column5').notNull(), + column6: varbit('column6', { length: 10 }), + column7: varbit('column7').notNull().array(), + column8: varbit('column8', { length: 10 }).array(), + }), + }; + + const schemas = ['public', 'schemass']; + const { sqlStatements: st } = await diff(schema1, schema1, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema1, schemas }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/constraints-without-tx.test.ts b/drizzle-kit/tests/cockroach/constraints-without-tx.test.ts new file mode 100644 index 0000000000..35d7c3b2d1 --- /dev/null +++ b/drizzle-kit/tests/cockroach/constraints-without-tx.test.ts @@ -0,0 +1,55 @@ +import { cockroachTable, int4, primaryKey, text } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('alter table add composite pk', async ({ dbc: db }) => { + const schema1 = { + table: cockroachTable('table', { + id1: int4('id1').notNull(), + id2: int4('id2').notNull(), + }), + }; + + const schema2 = { + table: cockroachTable('table', { + id1: int4('id1').notNull(), + id2: int4('id2').notNull(), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff( + schema1, + schema2, + [], + ); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = ['ALTER TABLE "table" ADD PRIMARY KEY ("id1","id2");']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('pk #5', async ({ db }) => { + const from = { + users: cockroachTable('users', { + name: text().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: cockroachTable('users', { + name: text().notNull(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + + expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" DROP CONSTRAINT "users_pkey";']); + await expect(push({ db, to })).rejects.toThrow(); // can not drop pk without adding new one +}); diff --git a/drizzle-kit/tests/cockroach/constraints.test.ts b/drizzle-kit/tests/cockroach/constraints.test.ts new file mode 100644 index 0000000000..2757a28999 --- /dev/null +++ b/drizzle-kit/tests/cockroach/constraints.test.ts @@ -0,0 +1,1904 @@ +import { sql } from 'drizzle-orm'; +import { + AnyCockroachColumn, + bigint, + cockroachTable, + foreignKey, + index, + int4, + primaryKey, + text, + unique, + varchar, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('unique #1', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE UNIQUE INDEX "users_name_key" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #2', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text().unique('unique_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE UNIQUE INDEX "unique_name" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #3', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text().unique('unique_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE UNIQUE INDEX "unique_name" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #6', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE UNIQUE INDEX "unique_name" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #7', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE UNIQUE INDEX "unique_name" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #8', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP INDEX "unique_name" CASCADE;`, + 'CREATE UNIQUE INDEX "unique_name2" ON "users" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #9', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + `ALTER INDEX "unique_name" RENAME TO "unique_name2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #10', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + email: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: cockroachTable('users', { + name: text(), + email2: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.email->public.users.email2', + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.email->public.users.email2', + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + `ALTER TABLE "users" RENAME COLUMN "email" TO "email2";`, + `ALTER INDEX "unique_name" RENAME TO "unique_name2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #11', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + email: text(), + }, (t) => [ + unique('unique_name').on(t.name), + unique('unique_email').on(t.email), + ]), + }; + const to = { + users: cockroachTable('users', { + name: text(), + email: text(), + }, (t) => [ + unique('unique_name2').on(t.name), + unique('unique_email2').on(t.email), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + 'ALTER INDEX "unique_name" RENAME TO "unique_name2";', + `DROP INDEX "unique_email" CASCADE;`, + `CREATE UNIQUE INDEX "unique_email2" ON "users" ("email");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #12', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + email: text().unique(), + }), + }; + const to = { + users: cockroachTable('users2', { + name: text(), + email: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users->public.users2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users->public.users2', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('unique #13', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + email: text().unique(), + }), + }; + const sch2 = { + users: cockroachTable('users2', { + name: text(), + email2: text().unique('users_email_key'), + }), + }; + + const sch3 = { + users: cockroachTable('users2', { + name: text(), + email2: text(), + }), + }; + + // sch1 -> sch2 + const { sqlStatements: st1, next: n1 } = await diff(sch1, sch2, [ + 'public.users->public.users2', + 'public.users2.email->public.users2.email2', + ]); + + await push({ db, to: sch1 }); + const { sqlStatements: pst1 } = await push({ + db, + to: sch2, + renames: [ + 'public.users->public.users2', + 'public.users2.email->public.users2.email2', + ], + }); + + const st10 = [ + `ALTER TABLE "users" RENAME TO "users2";`, + `ALTER TABLE "users2" RENAME COLUMN "email" TO "email2";`, + ]; + expect(st1).toStrictEqual(st10); + expect(pst1).toStrictEqual(st10); + + // sch2 -> sch3 + const { sqlStatements: st2 } = await diff(n1, sch3, []); + + const { sqlStatements: pst2 } = await push({ + db, + to: sch3, + }); + + const st20 = [ + 'DROP INDEX "users_email_key" CASCADE;', + ]; + expect(st2).toStrictEqual(st20); + expect(pst2).toStrictEqual(st20); +}); + +test.concurrent('unique multistep #1', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = ['CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['public.users->public.users2', 'public.users2.name->public.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e3 = ['DROP INDEX "users_name_key" CASCADE;']; + + expect(pst4).toStrictEqual(e3); + expect(st4).toStrictEqual(e3); +}); + +test.concurrent('unique multistep #2', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').unique(), + }), + }; + + const r1 = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, r1); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames: r1 }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(pst2).toStrictEqual(e2); + expect(st2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['DROP INDEX "users_name_key" CASCADE;']); + expect(pst5).toStrictEqual(['DROP INDEX "users_name_key" CASCADE;']); +}); + +test.concurrent('unique multistep #3', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['public.users->public.users2', 'public.users2.name->public.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'DROP INDEX "users_name_key" CASCADE;', + 'CREATE UNIQUE INDEX "name_unique" ON "users2" ("name2");', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['DROP INDEX "name_unique" CASCADE;']); + expect(pst5).toStrictEqual(['DROP INDEX "name_unique" CASCADE;']); +}); + +test.concurrent('unique multistep #4', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n', + ]); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, renames }); + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const renames2 = ['public.users2.users_name_key->public.users2.name_unique']; + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, renames2); + const { sqlStatements: pst4 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st4).toStrictEqual(['ALTER INDEX "users_name_key" RENAME TO "name_unique";']); + expect(pst4).toStrictEqual(['ALTER INDEX "users_name_key" RENAME TO "name_unique";']); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['DROP INDEX "name_unique" CASCADE;']); + expect(pst5).toStrictEqual(['DROP INDEX "name_unique" CASCADE;']); +}); + +test.concurrent('index multistep #1', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" string\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + expect(st4).toStrictEqual(['DROP INDEX "users_name_index";']); + expect(pst4).toStrictEqual(['DROP INDEX "users_name_index";']); +}); + +test.concurrent('index multistep #2', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" string\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, []); + const { sqlStatements: pst3 } = await push({ db, to: sch3 }); + + const e3 = [ + 'DROP INDEX "users_name_index";', + 'CREATE INDEX "name2_idx" ON "users2" ("name2");', + ]; + expect(st3).toStrictEqual(e3); + expect(pst3).toStrictEqual(e3); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test.concurrent('index multistep #3', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" string\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const renames2 = [ + 'public.users2.users_name_index->public.users2.name2_idx', + ]; + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, renames2); + const { sqlStatements: pst3 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st3).toStrictEqual(['ALTER INDEX "users_name_index" RENAME TO "name2_idx";']); + expect(pst3).toStrictEqual(['ALTER INDEX "users_name_index" RENAME TO "name2_idx";']); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test.concurrent('index multistep #3', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" string\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, []); + const { sqlStatements: pst3 } = await push({ db, to: sch3 }); + + const e3 = [ + 'DROP INDEX "users_name_index";', + 'CREATE INDEX "name2_idx" ON "users2" ("name2");', + ]; + expect(st3).toStrictEqual(e3); + expect(pst3).toStrictEqual(e3); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test.concurrent('pk #1', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text().notNull(), + }), + }; + + const to = { + users: cockroachTable('users', { + name: text().notNull().primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(st).toStrictEqual(['ALTER TABLE "users" ADD PRIMARY KEY ("name");']); + expect(pst).toStrictEqual(['ALTER TABLE "users" ADD PRIMARY KEY ("name");']); +}); + +test.concurrent('pk #2', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text().notNull().primaryKey(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text().notNull().primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test.concurrent('pk #3', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text().notNull().primaryKey(), + }), + }; + const to = { + users: cockroachTable('users', { + name: text().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test.concurrent('pk #4', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: cockroachTable('users', { + name: text().notNull().primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test.concurrent('pk multistep #1', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().primaryKey(), + id: int4().notNull(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4 NOT NULL\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4 NOT NULL\n);\n']); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').primaryKey(), + id: int4().notNull(), + }), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + id: int4().notNull().primaryKey(), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const st04 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_pkey", ADD CONSTRAINT "users2_pkey" PRIMARY KEY("id");', + 'ALTER TABLE "users2" ALTER COLUMN "name2" DROP NOT NULL;', + ]; + expect(st4).toStrictEqual(st04); + expect(pst4).toStrictEqual(st04); +}); + +test.concurrent('pk multistep #2', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().primaryKey().notNull(), + id: int4().notNull(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4 NOT NULL\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4 NOT NULL\n);\n']); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2').notNull(), + id: int4().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2').notNull(), + id: int4().notNull(), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const renames2 = ['public.users2.users_pkey->public.users2.users2_pk']; + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, renames2); + const { sqlStatements: pst4 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_pkey" TO "users2_pk";']); + expect(pst4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_pkey" TO "users2_pk";']); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2').notNull(), + id: int4().notNull().primaryKey(), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + + expect(st5).toStrictEqual([ + 'ALTER TABLE "users2" DROP CONSTRAINT "users2_pk", ADD CONSTRAINT "users2_pkey" PRIMARY KEY("id");', + ]); + expect(pst5).toStrictEqual([ + 'ALTER TABLE "users2" DROP CONSTRAINT "users2_pk", ADD CONSTRAINT "users2_pkey" PRIMARY KEY("id");', + ]); +}); + +test.concurrent('pk multistep #3', async ({ db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().primaryKey(), + id: int4(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" string PRIMARY KEY,\n\t"id" int4\n);\n']); + + const sch2 = { + users: cockroachTable('users2', { + name: text('name2'), + id: int4(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: cockroachTable('users2', { + name: text('name2'), + id: int4(), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_pkey", ADD CONSTRAINT "users2_pk" PRIMARY KEY("name2");', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: cockroachTable('users2', { + name: text('name2'), + id: int4().notNull().primaryKey(), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + + const st05 = [ + 'ALTER TABLE "users2" ALTER COLUMN "id" SET NOT NULL;', + 'ALTER TABLE "users2" DROP CONSTRAINT "users2_pk", ADD CONSTRAINT "users2_pkey" PRIMARY KEY("id");', + 'ALTER TABLE "users2" ALTER COLUMN "name2" DROP NOT NULL;', + ]; + expect(st5).toStrictEqual(st05); + expect(pst5).toStrictEqual(st05); +}); + +test.concurrent('pk multistep #4', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text(), + }, (t) => [ + primaryKey({ name: 'users_pk', columns: [t.name] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_pk" PRIMARY KEY("name")\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_pk" PRIMARY KEY("name")\n);\n', + ]); + + const sch2 = { + users: cockroachTable('users2', { + name: text(), + }, (t) => [ + primaryKey({ name: 'users_pk', columns: [t.name] }), + ]), + }; + + const renames = ['public.users->public.users2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +test.concurrent('pk multistep #5', async ({ dbc: db }) => { + const sch1 = { + users: cockroachTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string PRIMARY KEY\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" string PRIMARY KEY\n);\n', + ]); + + const sch2 = { + users: cockroachTable('users2', { + name: text().primaryKey(), + }), + }; + + const renames = ['public.users->public.users2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +test.concurrent('fk #1', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4().primaryKey(), + }); + const posts = cockroachTable('posts', { + id: int4().primaryKey(), + authorId: int4().references(() => users.id), + }); + + const to = { + posts, + users, + }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \"posts\" (\n\t"id" int4 PRIMARY KEY,\n\t"authorId" int4\n);\n`, + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `ALTER TABLE "posts" ADD CONSTRAINT "posts_authorId_users_id_fkey" FOREIGN KEY ("authorId") REFERENCES "users"("id");`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// exactly 63 symbols fkey, fkey name explicit +test.concurrent('fk #2', async ({ dbc: db }) => { + const users = cockroachTable('123456789_123456789_users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "123456789_123456789_users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n`, + 'ALTER TABLE "123456789_123456789_users" ADD CONSTRAINT "123456789_123456789_users_id2_123456789_123456789_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "123456789_123456789_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// 65 symbols fkey, fkey = table_hash_fkey +test.concurrent('fk #3', async ({ dbc: db }) => { + const users = cockroachTable('1234567890_1234567890_users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "1234567890_1234567890_users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n`, + 'ALTER TABLE "1234567890_1234567890_users" ADD CONSTRAINT "1234567890_1234567890_users_2Ge3281eRCJ5_fkey" FOREIGN KEY ("id2") REFERENCES "1234567890_1234567890_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// >=45 length table name, fkey = hash_fkey +test.concurrent('fk #4', async ({ dbc: db }) => { + const users = cockroachTable('1234567890_1234567890_1234567890_123456_users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "1234567890_1234567890_1234567890_123456_users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n`, + 'ALTER TABLE "1234567890_1234567890_1234567890_123456_users" ADD CONSTRAINT "ydU6odH887YL_fkey" FOREIGN KEY ("id2") REFERENCES "1234567890_1234567890_1234567890_123456_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #5', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n`, + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #6', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const users2 = cockroachTable('users2', { + id: int4('id3').primaryKey(), + id2: int4().references((): AnyCockroachColumn => users2.id), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2', 'public.users2.id->public.users2.id3']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "id" TO "id3";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #7', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id1), + }); + + const users2 = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4(), + }, (t) => [foreignKey({ name: 'id2_id1_fk', columns: [t.id2], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users.users_id2_users_id1_fkey->public.users.id2_id1_fk']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME CONSTRAINT "users_id2_users_id1_fkey" TO "id2_id1_fk";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #8', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().unique(), + id3: int4().references((): AnyCockroachColumn => users.id1), + }); + + const users2 = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().unique(), + id3: int4().references((): AnyCockroachColumn => users.id2), + }); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + 'ALTER TABLE "users" DROP CONSTRAINT "users_id3_users_id1_fkey";', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id3_users_id2_fkey" FOREIGN KEY ("id3") REFERENCES "users"("id2");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #9', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().unique(), + id3: int4(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id1] })]); + + const users2 = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().unique(), + id3: int4(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id2] })]); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + 'ALTER TABLE "users" DROP CONSTRAINT "fk1", ADD CONSTRAINT "fk1" FOREIGN KEY ("id3") REFERENCES "users"("id2");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #10', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id1: int4().primaryKey(), + }); + + const users2 = cockroachTable('users2', { + id1: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users2.id1), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" ADD COLUMN "id2" int4;', + 'ALTER TABLE "users2" ADD CONSTRAINT "users2_id2_users2_id1_fkey" FOREIGN KEY ("id2") REFERENCES "users2"("id1");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk #11', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id1: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id1), + }); + + const users2 = cockroachTable('users2', { + id1: int4().primaryKey(), + id2: int4(), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id1_fkey";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test.concurrent('fk multistep #1', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const users2 = cockroachTable('users2', { + id: int4('id3').primaryKey(), + id2: int4().references((): AnyCockroachColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const renames = ['public.users->public.users2', 'public.users2.id->public.users2.id3']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "id" TO "id3";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const users3 = cockroachTable('users2', { + id: int4('id3').primaryKey(), + id2: int4(), + }); + const sch3 = { users: users3 }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id_fkey";']); + expect(pst4).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id_fkey";']); +}); + +test.concurrent('fk multistep #2', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4().primaryKey(), + id2: int4().references((): AnyCockroachColumn => users.id), + }); + + const users2 = cockroachTable('users2', { + id: int4('id3').primaryKey(), + id2: int4().references((): AnyCockroachColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY,\n\t"id2" int4\n);\n', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, []); + const { sqlStatements: pst2 } = await push({ db, to: sch2 }); + + const e2 = [ + 'CREATE TABLE "users2" (\n\t"id3" int4 PRIMARY KEY,\n\t"id2" int4\n);\n', + 'DROP TABLE "users";', + 'ALTER TABLE "users2" ADD CONSTRAINT "users2_id2_users2_id3_fkey" FOREIGN KEY ("id2") REFERENCES "users2"("id3");', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456#issuecomment-3076042688 +test('fk multistep #4', async ({ dbc: db }) => { + const foo = cockroachTable('foo', { + id: int4().primaryKey(), + }); + + const bar = cockroachTable('bar', { + id: int4().primaryKey(), + fooId: int4().references(() => foo.id), + }); + + const schema1 = { foo, bar }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "foo" (\n\t"id" int4 PRIMARY KEY\n);\n', + 'CREATE TABLE "bar" (\n\t"id" int4 PRIMARY KEY,\n\t"fooId" int4\n);\n', + 'ALTER TABLE "bar" ADD CONSTRAINT "bar_fooId_foo_id_fkey" FOREIGN KEY ("fooId") REFERENCES "foo"("id");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + bar: cockroachTable('bar', { + id: int4().primaryKey(), + fooId: int4(), + }), + }; + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE "bar" DROP CONSTRAINT "bar_fooId_foo_id_fkey";', + 'DROP TABLE "foo";', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test.concurrent('unique duplicate name', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: varchar({ length: 255 }), + age: int4(), + }), + users2: cockroachTable('users2', { + name: varchar({ length: 255 }), + age: int4(), + }), + }; + const to = { + users: cockroachTable('users', { + name: varchar({ length: 255 }), + age: int4(), + }, (t) => [unique('test').on(t.name)]), + users2: cockroachTable('users2', { + name: varchar({ length: 255 }), + age: int4(), + }, (t) => [unique('test').on(t.name)]), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test.concurrent('pk duplicate name', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: varchar({ length: 255 }), + age: int4(), + }), + users2: cockroachTable('users2', { + name: varchar({ length: 255 }), + age: int4(), + }), + }; + const to = { + users: cockroachTable('users', { + name: varchar({ length: 255 }), + age: int4(), + }, (t) => [primaryKey({ name: 'test', columns: [t.name] })]), + users2: cockroachTable('users2', { + name: varchar({ length: 255 }), + age: int4(), + }, (t) => [primaryKey({ name: 'test', columns: [t.name] })]), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test.concurrent('fk duplicate name', async ({ dbc: db }) => { + const users = cockroachTable('users', { + name: varchar({ length: 255 }).primaryKey(), + age: int4().unique(), + }); + const from = { + users, + users2: cockroachTable('users2', { + name: varchar({ length: 255 }), + age: int4(), + }), + }; + const to = { + users, + users2: cockroachTable( + 'users2', + { + name: varchar({ length: 255 }), + age: int4(), + }, + ( + t, + ) => [ + foreignKey({ name: 'test', columns: [t.age], foreignColumns: [users.age] }), + foreignKey({ name: 'test', columns: [t.name], foreignColumns: [users.name] }), + ], + ), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test.concurrent('index duplicate name', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + name: varchar({ length: 255 }).primaryKey(), + age: int4().unique(), + }, (t) => [index('test').on(t.age), index('test').on(t.name)]), + }; + + await expect(diff({}, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test.concurrent('index with no name', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + name: varchar({ length: 255 }).primaryKey(), + age: int4().unique(), + }, (t) => [index().on(sql`${t.age}`)]), + }; + + await expect(diff({}, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test.concurrent('alter pk test #1', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + id: int4().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: cockroachTable('users', { + name: text(), + id: int4().primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE \"users\" DROP CONSTRAINT \"users_pkey\", ADD CONSTRAINT \"users_pkey\" PRIMARY KEY(\"id\");`, + 'ALTER TABLE "users" ALTER COLUMN "name" DROP NOT NULL;', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter pk test #2', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + id: int4().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: cockroachTable('users', { + name: text(), + id: bigint('id3', { mode: 'number' }).primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, ['public.users.id->public.users.id3']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames: ['public.users.id->public.users.id3'] }); + + const st0 = [ + `ALTER TABLE \"users\" RENAME COLUMN \"id\" TO \"id3\";`, + 'ALTER TABLE "users" ALTER COLUMN "id3" SET DATA TYPE int8;', + `ALTER TABLE \"users\" DROP CONSTRAINT \"users_pkey\", ADD CONSTRAINT \"users_pkey\" PRIMARY KEY(\"id3\");`, + 'ALTER TABLE "users" ALTER COLUMN "name" DROP NOT NULL;', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter pk test #3', async ({ dbc: db }) => { + const from = { + users: cockroachTable('users', { + name: text(), + id: int4().notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: cockroachTable('users', { + name: text(), + id: bigint('id3', { mode: 'number' }).primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" ADD COLUMN "id3" int8 NOT NULL;', + `ALTER TABLE \"users\" DROP CONSTRAINT \"users_pkey\", ADD CONSTRAINT \"users_pkey\" PRIMARY KEY(\"id3\");`, + 'ALTER TABLE "users" ALTER COLUMN "name" DROP NOT NULL;', + 'ALTER TABLE "users" DROP COLUMN "id";', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456 +test('drop column with pk and add pk to another column #1', async ({ dbc: db }) => { + const schema1 = { + authors: cockroachTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "authors" (\n\t"publication_id" varchar(64),\n\t"author_id" varchar(10),' + + '\n\tCONSTRAINT "authors_pkey" PRIMARY KEY("publication_id","author_id")\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + authors: cockroachTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + orcidId: varchar('orcid_id', { length: 64 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID, table.orcidId] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE "authors" ADD COLUMN "orcid_id" varchar(64) NOT NULL;', + 'ALTER TABLE "authors" DROP CONSTRAINT "authors_pkey", ADD CONSTRAINT "authors_pkey" PRIMARY KEY("publication_id","author_id","orcid_id");', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); diff --git a/drizzle-kit/tests/cockroach/defaults-without-tx.test.ts b/drizzle-kit/tests/cockroach/defaults-without-tx.test.ts new file mode 100644 index 0000000000..8940da3e1b --- /dev/null +++ b/drizzle-kit/tests/cockroach/defaults-without-tx.test.ts @@ -0,0 +1,26 @@ +import { char, string, varchar } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diffDefault, test } from './mocks'; + +test.concurrent('char + char arrays', async ({ db }) => { + const res1_0 = await diffDefault(db, char().default('text'), `'text'`, { expectError: true }); + // char is less than default + const res10 = await diffDefault(db, char({ length: 2 }).default('text'), `'text'`, { expectError: true }); + + expect(res1_0).toStrictEqual([`Insert default failed`]); + expect(res10).toStrictEqual([`Insert default failed`]); +}); + +test.concurrent('varchar + varchar arrays', async ({ db }) => { + // varchar length is less than default + const res10 = await diffDefault(db, varchar({ length: 2 }).default('text'), `'text'`, { expectError: true }); + + expect(res10).toStrictEqual([`Insert default failed`]); +}); + +test.concurrent('string + string arrays', async ({ db }) => { + // varchar length is less than default + const res10 = await diffDefault(db, string({ length: 2 }).default('text'), `'text'`, { expectError: true }); + + expect(res10).toStrictEqual([`Insert default failed`]); +}); diff --git a/drizzle-kit/tests/cockroach/defaults.test.ts b/drizzle-kit/tests/cockroach/defaults.test.ts new file mode 100644 index 0000000000..856b979f50 --- /dev/null +++ b/drizzle-kit/tests/cockroach/defaults.test.ts @@ -0,0 +1,3521 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + bit, + bool, + char, + cockroachEnum, + date, + decimal, + doublePrecision, + float, + geometry, + inet, + int4, + int8, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varbit, + varchar, + vector, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diffDefault, test } from './mocks'; + +test.concurrent('int4', async ({ dbc: db }) => { + const res1 = await diffDefault(db, int4().default(10), '10'); + const res2 = await diffDefault(db, int4().default(0), '0'); + const res3 = await diffDefault(db, int4().default(-10), '-10'); + const res4 = await diffDefault(db, int4().default(1e4), '10000'); + const res5 = await diffDefault(db, int4().default(-1e4), '-10000'); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); +}); + +test.concurrent('int4 arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + int4().array().default([]), + "'{}'::int4[]", + ); + const res2 = await diffDefault( + db, + int4().array().default([10]), + "'{10}'::int4[]", + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); +}); + +test.concurrent('smallint', async ({ dbc: db }) => { + // 2^15 - 1 + const res1 = await diffDefault(db, smallint().default(32767), '32767'); + // -2^15 + const res2 = await diffDefault(db, smallint().default(-32768), '-32768'); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); +}); + +test.concurrent('smallint arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + smallint().array().default([]), + "'{}'::int2[]", + ); + const res2 = await diffDefault( + db, + smallint().array().default([32767]), + "'{32767}'::int2[]", + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); +}); + +test.concurrent('bigint', async ({ dbc: db }) => { + // 2^53 + const res1 = await diffDefault( + db, + int8({ mode: 'number' }).default(9007199254740991), + '9007199254740991', + ); + const res2 = await diffDefault( + db, + int8({ mode: 'number' }).default(-9007199254740991), + '-9007199254740991', + ); + // 2^63 - 1 + const res3 = await diffDefault( + db, + bigint({ mode: 'bigint' }).default(9223372036854775807n), + '9223372036854775807', + ); + // -2^63 + const res4 = await diffDefault( + db, + bigint({ mode: 'bigint' }).default(-9223372036854775808n), + '-9223372036854775808', + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('bigint arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + bigint({ mode: 'number' }).array().default([]), + "'{}'::int8[]", + ); + const res2 = await diffDefault( + db, + bigint({ mode: 'bigint' }).array().default([]), + "'{}'::int8[]", + ); + + const res3 = await diffDefault( + db, + bigint({ mode: 'number' }).array().default([9007199254740991]), + "'{9007199254740991}'::int8[]", + ); + const res4 = await diffDefault( + db, + bigint({ mode: 'bigint' }).array().default([9223372036854775807n]), + "'{9223372036854775807}'::int8[]", + ); + + const res9 = await diffDefault( + db, + bigint({ mode: 'number' }).array().default([1, 2]), + "'{1,2}'::int8[]", + ); + const res10 = await diffDefault( + db, + bigint({ mode: 'bigint' }).array().default([1n, 2n]), + "'{1,2}'::int8[]", + ); + + const res13 = await diffDefault( + db, + bigint({ mode: 'bigint' }) + .array() + .default(sql`'{}'`), + "'{}'::int8[]", + ); + const res14 = await diffDefault( + db, + bigint({ mode: 'bigint' }) + .array() + .default(sql`'{}'::int8[]`), + "'{}'::int8[]", + ); + const res15 = await diffDefault( + db, + bigint({ mode: 'bigint' }) + .array() + .default(sql`'{9223372036854775807}'::int8[]`), + "'{9223372036854775807}'::int8[]", + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res10).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); +}); + +test.concurrent('numeric', async ({ dbc: db }) => { + const res1 = await diffDefault(db, numeric().default('10.123'), '10.123'); + + const res4 = await diffDefault( + db, + numeric({ mode: 'string' }).default('10.123'), + '10.123', + ); + const res2 = await diffDefault( + db, + numeric({ mode: 'bigint' }).default(9223372036854775807n), + '9223372036854775807', + ); + const res3 = await diffDefault( + db, + numeric({ mode: 'number' }).default(9007199254740991), + '9007199254740991', + ); + + const res5 = await diffDefault( + db, + numeric({ precision: 6 }).default('10.123'), + '10.123', + ); + const res6 = await diffDefault( + db, + numeric({ precision: 6, scale: 2 }).default('10.123'), + '10.123', + ); + + const res7 = await diffDefault( + db, + numeric({ precision: 6 }).default('10'), + '10', + ); + const res8 = await diffDefault( + db, + numeric({ precision: 6, scale: 2 }).default('10'), + '10', + ); + + const res7_1 = await diffDefault( + db, + numeric({ precision: 6 }).default('10.100'), + '10.100', + ); + const res8_1 = await diffDefault( + db, + numeric({ precision: 6, scale: 2 }).default('10.100'), + '10.100', + ); + const res7_2 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6 }).default(10.1), + '10.1', + ); + const res8_2 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6, scale: 2 }).default(10.1), + '10.1', + ); + + const res9 = await diffDefault( + db, + numeric({ mode: 'string', scale: 2 }).default('10.123'), + '10.123', + ); + const res10 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6 }).default('10.123'), + '10.123', + ); + const res11 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), + '10.123', + ); + + const res12 = await diffDefault( + db, + numeric({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '9223372036854775807', + ); + const res13 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6, scale: 2 }).default(10.123), + '10.123', + ); + const res14 = await diffDefault( + db, + numeric({ mode: 'number', scale: 2 }).default(10.123), + '10.123', + ); + const res15 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6 }).default(10.123), + '10.123', + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res7_1).toStrictEqual([]); + expect(res8_1).toStrictEqual([]); + expect(res7_2).toStrictEqual([]); + expect(res8_2).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res10).toStrictEqual([]); + expect(res11).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); +}); + +test.concurrent('numeric arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + numeric({ mode: 'number' }).array().default([]), + "'{}'::decimal[]", + ); + const res2 = await diffDefault( + db, + numeric({ mode: 'number', precision: 4, scale: 2 }).array().default([]), + "'{}'::decimal(4,2)[]", + ); + const res3 = await diffDefault( + db, + numeric({ mode: 'bigint' }).array().default([]), + "'{}'::decimal[]", + ); + const res4 = await diffDefault( + db, + numeric({ mode: 'bigint', precision: 4 }).array().default([]), + "'{}'::decimal(4)[]", + ); + const res5 = await diffDefault( + db, + numeric({ mode: 'string' }).array().default([]), + "'{}'::decimal[]", + ); + const res6 = await diffDefault( + db, + numeric({ mode: 'string', precision: 4, scale: 2 }).array().default([]), + "'{}'::decimal(4,2)[]", + ); + + // no precision and scale + // default will be created same as passed + const res7_1 = await diffDefault( + db, + numeric({ mode: 'number' }).array().default([10.123, 123.1]), + "'{10.123,123.1}'::decimal[]", + ); + // scale exists and less then decimal part + // default will be trimmed by scale + const res7_2 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6, scale: 2 }) + .array() + .default([10.123, 123.153]), + "'{10.123,123.153}'::decimal(6,2)[]", + ); + // scale will be 0 + // default will be trimmed to integer part + const res7_3 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6 }).array().default([10.123, 123.1]), + "'{10.123,123.1}'::decimal(6)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res7_4 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6, scale: 3 }) + .array() + .default([10.123, 123.1]), + "'{10.123,123.1}'::decimal(6,3)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res7_5 = await diffDefault( + db, + numeric({ mode: 'number', precision: 6, scale: 3 }) + .array() + .default([10, 123]), + "'{10,123}'::decimal(6,3)[]", + ); + + // no precision and scale + // default will be created same as passed + const res8_1 = await diffDefault( + db, + numeric({ mode: 'string' }).array().default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal[]", + ); + // scale exists and less then decimal part + // default will be trimmed by scale + const res8_2 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6, scale: 2 }) + .array() + .default(['10.123', '123.153']), + "'{10.123,123.153}'::decimal(6,2)[]", + ); + // scale will be 0 + // default will be trimmed to integer part + const res8_3 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6 }) + .array() + .default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal(6)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res8_4 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6, scale: 3 }) + .array() + .default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal(6,3)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res8_5 = await diffDefault( + db, + numeric({ mode: 'string', precision: 6, scale: 3 }) + .array() + .default(['10', '123']), + "'{10,123}'::decimal(6,3)[]", + ); + + // no precision and scale + // default will be created same as passed + const res9_1 = await diffDefault( + db, + numeric({ mode: 'bigint' }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal[]", + ); + + // scale will be 0 + // default will be trimmed to integer part + const res9_2 = await diffDefault( + db, + numeric({ mode: 'bigint', precision: 19 }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal(19)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res9_3 = await diffDefault( + db, + numeric({ mode: 'bigint', precision: 23, scale: 3 }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal(23,3)[]", + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + + expect(res7_1).toStrictEqual([]); + expect(res7_2).toStrictEqual([]); + expect(res7_3).toStrictEqual([]); + expect(res7_4).toStrictEqual([]); + expect(res7_5).toStrictEqual([]); + + expect(res8_1).toStrictEqual([]); + expect(res8_2).toStrictEqual([]); + expect(res8_3).toStrictEqual([]); + expect(res8_4).toStrictEqual([]); + expect(res8_5).toStrictEqual([]); + + expect(res9_1).toStrictEqual([]); + expect(res9_2).toStrictEqual([]); + expect(res9_3).toStrictEqual([]); +}); + +test.concurrent('decimal', async ({ dbc: db }) => { + const res1 = await diffDefault(db, decimal().default('10.123'), '10.123'); + + const res4 = await diffDefault( + db, + decimal({ mode: 'string' }).default('10.123'), + '10.123', + ); + const res2 = await diffDefault( + db, + decimal({ mode: 'bigint' }).default(9223372036854775807n), + '9223372036854775807', + ); + const res3 = await diffDefault( + db, + decimal({ mode: 'number' }).default(9007199254740991), + '9007199254740991', + ); + + const res5 = await diffDefault( + db, + decimal({ precision: 6 }).default('10.123'), + '10.123', + ); + const res6 = await diffDefault( + db, + decimal({ precision: 6, scale: 2 }).default('10.123'), + '10.123', + ); + + const res7 = await diffDefault( + db, + decimal({ precision: 6 }).default('10'), + '10', + ); + const res8 = await diffDefault( + db, + decimal({ precision: 6, scale: 2 }).default('10'), + '10', + ); + + const res7_1 = await diffDefault( + db, + decimal({ precision: 6 }).default('10.100'), + '10.100', + ); + const res8_1 = await diffDefault( + db, + decimal({ precision: 6, scale: 2 }).default('10.100'), + '10.100', + ); + const res7_2 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6 }).default(10.1), + '10.1', + ); // js trims .100 to 0.1 + const res8_2 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6, scale: 2 }).default(10.1), + '10.1', + ); // js trims .100 to 0.1 + + const res9 = await diffDefault( + db, + decimal({ mode: 'string', scale: 2 }).default('10.123'), + '10.123', + ); + const res10 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6 }).default('10.123'), + '10.123', + ); + const res11 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), + '10.123', + ); + + const res12 = await diffDefault( + db, + decimal({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '9223372036854775807', + ); + const res13 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6, scale: 2 }).default(10.123), + '10.123', + ); + const res14 = await diffDefault( + db, + decimal({ mode: 'number', scale: 2 }).default(10.123), + '10.123', + ); + const res15 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6 }).default(10.123), + '10.123', + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res7_1).toStrictEqual([]); + expect(res8_1).toStrictEqual([]); + expect(res7_2).toStrictEqual([]); + expect(res8_2).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res10).toStrictEqual([]); + expect(res11).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); +}); + +test.concurrent('decimals arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + decimal({ mode: 'number' }).array().default([]), + "'{}'::decimal[]", + ); + const res2 = await diffDefault( + db, + decimal({ mode: 'number', precision: 4, scale: 2 }).array().default([]), + "'{}'::decimal(4,2)[]", + ); + const res3 = await diffDefault( + db, + decimal({ mode: 'bigint' }).array().default([]), + "'{}'::decimal[]", + ); + const res4 = await diffDefault( + db, + decimal({ mode: 'bigint', precision: 4 }).array().default([]), + "'{}'::decimal(4)[]", + ); + const res5 = await diffDefault( + db, + decimal({ mode: 'string' }).array().default([]), + "'{}'::decimal[]", + ); + const res6 = await diffDefault( + db, + decimal({ mode: 'string', precision: 4, scale: 2 }).array().default([]), + "'{}'::decimal(4,2)[]", + ); + + // no precision and scale + // default will be created same as passed + const res7_1 = await diffDefault( + db, + decimal({ mode: 'number' }).array().default([10.123, 123.1]), + "'{10.123,123.1}'::decimal[]", + ); + // scale exists and less then decimal part + // default will be trimmed by scale + const res7_2 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6, scale: 2 }) + .array() + .default([10.123, 123.153]), + "'{10.123,123.153}'::decimal(6,2)[]", + ); + // scale will be 0 + // default will be trimmed to integer part + const res7_3 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6 }).array().default([10.123, 123.1]), + "'{10.123,123.1}'::decimal(6)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res7_4 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6, scale: 3 }) + .array() + .default([10.123, 123.1]), + "'{10.123,123.1}'::decimal(6,3)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res7_5 = await diffDefault( + db, + decimal({ mode: 'number', precision: 6, scale: 3 }) + .array() + .default([10, 123]), + "'{10,123}'::decimal(6,3)[]", + ); + + // no precision and scale + // default will be created same as passed + const res8_1 = await diffDefault( + db, + decimal({ mode: 'string' }).array().default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal[]", + ); + // scale exists and less then decimal part + // default will be trimmed by scale + const res8_2 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6, scale: 2 }) + .array() + .default(['10.123', '123.153']), + "'{10.123,123.153}'::decimal(6,2)[]", + ); + // scale will be 0 + // default will be trimmed to integer part + const res8_3 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6 }) + .array() + .default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal(6)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res8_4 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6, scale: 3 }) + .array() + .default(['10.123', '123.1']), + "'{10.123,123.1}'::decimal(6,3)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res8_5 = await diffDefault( + db, + decimal({ mode: 'string', precision: 6, scale: 3 }) + .array() + .default(['10', '123']), + "'{10,123}'::decimal(6,3)[]", + ); + + // no precision and scale + // default will be created same as passed + const res9_1 = await diffDefault( + db, + decimal({ mode: 'bigint' }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal[]", + ); + + // scale will be 0 + // default will be trimmed to integer part + const res9_2 = await diffDefault( + db, + decimal({ mode: 'bigint', precision: 19 }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal(19)[]", + ); + // scale exists and is bigger then decimal part + // default will be padded by scale + const res9_3 = await diffDefault( + db, + decimal({ mode: 'bigint', precision: 23, scale: 3 }) + .array() + .default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::decimal(23,3)[]", + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7_1).toStrictEqual([]); + expect(res7_2).toStrictEqual([]); + expect(res7_3).toStrictEqual([]); + expect(res7_4).toStrictEqual([]); + expect(res7_5).toStrictEqual([]); + expect(res8_1).toStrictEqual([]); + expect(res8_2).toStrictEqual([]); + expect(res8_3).toStrictEqual([]); + expect(res8_4).toStrictEqual([]); + expect(res8_5).toStrictEqual([]); + expect(res9_1).toStrictEqual([]); + expect(res9_2).toStrictEqual([]); + expect(res9_3).toStrictEqual([]); +}); + +test.concurrent('real', async ({ dbc: db }) => { + const res1 = await diffDefault(db, real().default(1000.123), '1000.123'); + const res2 = await diffDefault(db, real().default(1000), '1000'); + const res3 = await diffDefault(db, real().default(1000.3), '1000.3'); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); +}); + +test.concurrent('real arrays', async ({ dbc: db }) => { + const res2 = await diffDefault( + db, + real().array().default([]), + `'{}'::real[]`, + ); + const res3 = await diffDefault( + db, + real().array().default([1000.123, 10.2]), + `'{1000.123,10.2}'::real[]`, + ); + const res4 = await diffDefault( + db, + real().array().default([1000.2]), + `'{1000.2}'::real[]`, + ); + const res5 = await diffDefault( + db, + real().array().default([1000.123, 10]), + `'{1000.123,10}'::real[]`, + ); + + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); +}); + +test.concurrent('float', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + float().default(10000.123), + '10000.123', + ); + const res2 = await diffDefault(db, float().default(10000), '10000'); + const res3 = await diffDefault(db, float().default(1000.3), '1000.3'); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); +}); + +test.concurrent('float arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + float().array().default([]), + `'{}'::float[]`, + ); + const res2 = await diffDefault( + db, + float().array().default([10000.123]), + `'{10000.123}'::float[]`, + ); + const res3 = await diffDefault( + db, + float().array().default([10000, 14]), + `'{10000,14}'::float[]`, + ); + const res4 = await diffDefault( + db, + float().array().default([1000.2]), + `'{1000.2}'::float[]`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('doublePrecision', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + doublePrecision().default(10000.123), + '10000.123', + ); + const res2 = await diffDefault( + db, + doublePrecision().default(10000), + '10000', + ); + const res3 = await diffDefault( + db, + doublePrecision().default(1000.3), + '1000.3', + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); +}); + +test.concurrent('doublePrecision arrays', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + doublePrecision().array().default([]), + `'{}'::float[]`, + ); + const res2 = await diffDefault( + db, + doublePrecision().array().default([10000.123]), + `'{10000.123}'::float[]`, + ); + const res3 = await diffDefault( + db, + doublePrecision().array().default([10000, 14]), + `'{10000,14}'::float[]`, + ); + const res4 = await diffDefault( + db, + doublePrecision().array().default([1000.2]), + `'{1000.2}'::float[]`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('bool', async ({ dbc: db }) => { + const res1 = await diffDefault(db, bool().default(true), 'true'); + const res2 = await diffDefault(db, bool().default(false), 'false'); + const res3 = await diffDefault(db, bool().default(sql`true`), 'true'); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); +}); + +test.concurrent('bool arrays', async ({ dbc: db }) => { + const res4 = await diffDefault( + db, + bool().array().default([]), + `'{}'::bool[]`, + ); + const res5 = await diffDefault( + db, + bool().array().default([true]), + `'{true}'::bool[]`, + ); + const res6 = await diffDefault( + db, + bool() + .array() + .default(sql`'{true}'::bool[]`), + `'{true}'::bool[]`, + ); + + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); +}); + +test.concurrent('char', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + char({ length: 15 }).default('text'), + `'text'`, + ); + const res2 = await diffDefault( + db, + char({ length: 15 }).default("text'text"), + `e'text\\'text'`, + ); + const res3 = await diffDefault( + db, + char({ length: 15 }).default('text\'text"'), + `e'text\\'text"'`, + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault( + db, + char({ length: 15 }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + const res5 = await diffDefault( + db, + char({ length: 15, enum: ['one', 'two', 'three'] }).default('one'), + "'one'", + ); + const res6 = await diffDefault( + db, + char({ length: 15 }).default('hello, world'), + "'hello, world'", + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res7 = await diffDefault( + db, + char({ + length: 15, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'], + }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + + const res9 = await diffDefault( + db, + char({ length: 15 }).default('text'), + `'text'`, + ); + const res11 = await diffDefault( + db, + char({ length: 2 }).default('12'), + `'12'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res11).toStrictEqual([]); +}); + +test.concurrent('char arrays', async ({ dbc: db }) => { + const res7 = await diffDefault( + db, + char({ length: 15 }).array().default([]), + `'{}'::char(15)[]`, + ); + const res8 = await diffDefault( + db, + char({ length: 15 }).array().default(['text']), + `'{text}'::char(15)[]`, + ); + const res9 = await diffDefault( + db, + char().array().default(['text']), + `'{text}'::char[]`, + ); + const res12 = await diffDefault( + db, + char({ length: 15 }).array().default(['\\']), + `'{"\\\\"}'::char(15)[]`, + ); + const res13 = await diffDefault( + db, + char({ length: 15 }).array().default(["'"]), + `'{''}'::char(15)[]`, + ); + const res14 = await diffDefault( + db, + char({ length: 15, enum: ['one', 'two', 'three'] }) + .array() + .default(['one']), + `'{one}'::char(15)[]`, + ); + const res15 = await diffDefault( + db, + char({ + length: 15, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'], + }) + .array() + .default([`mo''",\`}{od`]), + `'{"mo''''\\\",\`}{od"}'::char(15)[]`, + ); + + const res16 = await diffDefault( + db, + char({ length: 15 }).array().default([]), + `'{}'::char(15)[]`, + ); + + // char is bigger than default + const res17 = await diffDefault( + db, + char({ length: 15 }).array().default(['text']), + `'{text}'::char(15)[]`, + ); + // char is less than default + const res18 = await diffDefault( + db, + char({ length: 2 }).array().default(['text']), + `'{text}'::char(2)[]`, + ); + const res18_1 = await diffDefault( + db, + char({ length: 2 }).array().default(["t'"]), + `'{t''}'::char(2)[]`, + ); + + const res18_2 = await diffDefault( + db, + char({ length: 2 }).array().default(['t\\']), + `'{"t\\\\"}'::char(2)[]`, + ); + // char is same as default + const res19 = await diffDefault( + db, + char({ length: 2 }).array().default(['12']), + `'{12}'::char(2)[]`, + ); + + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res17).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res18_2).toStrictEqual([]); + expect(res19).toStrictEqual([]); +}); + +test.concurrent('varchar', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + varchar({ length: 255 }).default('text'), + `'text'`, + ); + const res1_0 = await diffDefault(db, varchar().default('text'), `'text'`); + const res2 = await diffDefault( + db, + varchar({ length: 255 }).default("text'text"), + `e'text\\'text'`, + ); + const res3 = await diffDefault( + db, + varchar({ length: 255 }).default('text\'text"'), + `e'text\\'text"'`, + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault( + db, + varchar({ length: 255 }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + const res5 = await diffDefault( + db, + varchar({ length: 255, enum: ['one', 'two', 'three'] }).default('one'), + "'one'", + ); + const res5_1 = await diffDefault( + db, + varchar({ length: 255 }).default('hello, world'), + "'hello, world'", + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + db, + varchar({ + length: 255, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'], + }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + + // varchar length is bigger than default + const res9 = await diffDefault( + db, + varchar({ length: 15 }).default('text'), + `'text'`, + ); + // varchar length is same as default + const res11 = await diffDefault( + db, + varchar({ length: 2 }).default('12'), + `'12'`, + ); + + expect(res1).toStrictEqual([]); + expect(res1_0).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res5_1).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res11).toStrictEqual([]); +}); + +test.concurrent('varchar arrays', async ({ dbc: db }) => { + const res7 = await diffDefault( + db, + varchar({ length: 255 }).array().default([]), + `'{}'::varchar(255)[]`, + ); + const res8 = await diffDefault( + db, + varchar({ length: 255 }).array().default(['text']), + `'{text}'::varchar(255)[]`, + ); + const res8_0 = await diffDefault( + db, + varchar().array().default(['text']), + `'{text}'::varchar[]`, + ); + const res12 = await diffDefault( + db, + varchar({ length: 15 }).array().default(['\\']), + `'{"\\\\"}'::varchar(15)[]`, + ); + const res13 = await diffDefault( + db, + varchar({ length: 15 }).array().default(["'"]), + `'{''}'::varchar(15)[]`, + ); + const res14 = await diffDefault( + db, + varchar({ length: 15, enum: ['one', 'two', 'three'] }) + .array() + .default(['one']), + `'{one}'::varchar(15)[]`, + ); + const res15 = await diffDefault( + db, + varchar({ + length: 255, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'], + }) + .array() + .default([`mo''",\`}{od`]), + `'{"mo''''\\\",\`}{od"}'::varchar(255)[]`, + ); + + const res16 = await diffDefault( + db, + varchar({ length: 255 }).array().default([]), + `'{}'::varchar(255)[]`, + ); + + // char is bigger than default + const res17 = await diffDefault( + db, + varchar({ length: 255 }).array().default(['text']), + `'{text}'::varchar(255)[]`, + ); + // char is less than default + const res18 = await diffDefault( + db, + varchar({ length: 2 }).array().default(['text']), + `'{text}'::varchar(2)[]`, + ); + const res18_1 = await diffDefault( + db, + varchar({ length: 2 }).array().default(["t'"]), + `'{t''}'::varchar(2)[]`, + ); + + const res18_2 = await diffDefault( + db, + varchar({ length: 2 }).array().default(['t\\']), + `'{"t\\\\"}'::varchar(2)[]`, + ); + // char is same as default + const res19 = await diffDefault( + db, + varchar({ length: 2 }).array().default(['12']), + `'{12}'::varchar(2)[]`, + ); + + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res8_0).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res17).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res18_2).toStrictEqual([]); + expect(res19).toStrictEqual([]); +}); + +test.concurrent('text', async ({ dbc: db }) => { + const res1 = await diffDefault(db, text().default('text'), `'text'`); + const res2 = await diffDefault( + db, + text().default("text'text"), + `e'text\\'text'`, + ); + const res3 = await diffDefault( + db, + text().default('text\'text"'), + `e'text\\'text"'`, + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault( + db, + text().default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + const res5 = await diffDefault(db, text().default('one'), "'one'"); + const res5_1 = await diffDefault( + db, + text().default('hello, world'), + "'hello, world'", + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + db, + text({ + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'], + }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res5_1).toStrictEqual([]); + expect(res6).toStrictEqual([]); +}); + +test.concurrent('text arrays', async ({ dbc: db }) => { + const res7 = await diffDefault( + db, + text().array().default([]), + `'{}'::string[]`, + ); + const res8 = await diffDefault( + db, + text().array().default(['text']), + `'{text}'::string[]`, + ); + const res12 = await diffDefault( + db, + text().array().default(['\\']), + `'{"\\\\"}'::string[]`, + ); + const res13 = await diffDefault( + db, + text().array().default(["'"]), + `'{''}'::string[]`, + ); + const res14 = await diffDefault( + db, + text({ enum: ['one', 'two', 'three'] }) + .array() + .default(['one']), + `'{one}'::string[]`, + ); + const res15 = await diffDefault( + db, + text({ + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'], + }) + .array() + .default([`mo''",\`}{od`]), + `'{"mo''''\\\",\`}{od"}'::string[]`, + ); + + const res16 = await diffDefault( + db, + text().array().default([]), + `'{}'::string[]`, + ); + + const res18 = await diffDefault( + db, + text().array().default(['text']), + `'{text}'::string[]`, + ); + const res18_1 = await diffDefault( + db, + text().array().default(["t'"]), + `'{t''}'::string[]`, + ); + + const res18_2 = await diffDefault( + db, + text().array().default(['t\\']), + `'{"t\\\\"}'::string[]`, + ); + + const res20 = await diffDefault( + db, + text().array().default(["1234'4"]), + `'{1234''4}'::string[]`, + ); + const res21 = await diffDefault( + db, + text().array().default(['1234\\1']), + `'{"1234\\\\1"}'::string[]`, + ); + + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res18_2).toStrictEqual([]); + expect(res20).toStrictEqual([]); + expect(res21).toStrictEqual([]); +}); + +test.concurrent('string', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + string({ length: 255 }).default('text'), + `'text'`, + ); + const res1_0 = await diffDefault(db, string().default('text'), `'text'`); + const res2 = await diffDefault( + db, + string({ length: 255 }).default("text'text"), + `e'text\\'text'`, + ); + const res3 = await diffDefault( + db, + string({ length: 255 }).default('text\'text"'), + `e'text\\'text"'`, + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault( + db, + string({ length: 255 }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + const res5 = await diffDefault( + db, + string({ length: 255, enum: ['one', 'two', 'three'] }).default('one'), + "'one'", + ); + const res5_1 = await diffDefault( + db, + string({ length: 255 }).default('hello, world'), + "'hello, world'", + ); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + db, + string({ + length: 255, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'], + }).default(`mo''",\\\`}{od`), + `e'mo\\'\\'",\\\\\`}{od'`, + ); + + // varchar length is bigger than default + const res9 = await diffDefault( + db, + string({ length: 15 }).default('text'), + `'text'`, + ); + // varchar length is same as default + const res11 = await diffDefault( + db, + string({ length: 2 }).default('12'), + `'12'`, + ); + + expect(res1).toStrictEqual([]); + expect(res1_0).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res5_1).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res11).toStrictEqual([]); +}); + +test.concurrent('string arrays', async ({ dbc: db }) => { + const res7 = await diffDefault( + db, + string({ length: 255 }).array().default([]), + `'{}'::string(255)[]`, + ); + const res8 = await diffDefault( + db, + string({ length: 255 }).array().default(['text']), + `'{text}'::string(255)[]`, + ); + const res8_0 = await diffDefault( + db, + string().array().default(['text']), + `'{text}'::string[]`, + ); + const res12 = await diffDefault( + db, + string({ length: 15 }).array().default(['\\']), + `'{"\\\\"}'::string(15)[]`, + ); + const res13 = await diffDefault( + db, + string({ length: 15 }).array().default(["'"]), + `'{''}'::string(15)[]`, + ); + const res14 = await diffDefault( + db, + string({ length: 15, enum: ['one', 'two', 'three'] }) + .array() + .default(['one']), + `'{one}'::string(15)[]`, + ); + const res15 = await diffDefault( + db, + string({ + length: 255, + enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'], + }) + .array() + .default([`mo''",\`}{od`]), + `'{"mo''''\\\",\`}{od"}'::string(255)[]`, + ); + + // char is bigger than default + const res17 = await diffDefault( + db, + string({ length: 255 }).array().default(['text']), + `'{text}'::string(255)[]`, + ); + // char is less than default + const res18 = await diffDefault( + db, + string({ length: 2 }).array().default(['text']), + `'{text}'::string(2)[]`, + ); + const res18_1 = await diffDefault( + db, + string({ length: 2 }).array().default(["t'"]), + `'{t''}'::string(2)[]`, + ); + const res18_2 = await diffDefault( + db, + string({ length: 2 }).array().default(['t\\']), + `'{"t\\\\"}'::string(2)[]`, + ); + // char is same as default + const res19 = await diffDefault( + db, + string({ length: 2 }).array().default(['12']), + `'{12}'::string(2)[]`, + ); + const res22 = await diffDefault( + db, + string({ length: 3 }).array().default(['"1234545"']), + `'{"\\"1234545\\""}'::string(3)[]`, + ); + + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); + expect(res8_0).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res14).toStrictEqual([]); + expect(res15).toStrictEqual([]); + expect(res17).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res18_2).toStrictEqual([]); + expect(res19).toStrictEqual([]); + expect(res22).toStrictEqual([]); +}); + +test.concurrent('jsonb', async ({ dbc: db }) => { + const res1 = await diffDefault(db, jsonb().default({}), `'{}'`); + const res2 = await diffDefault(db, jsonb().default([]), `'[]'`); + const res3 = await diffDefault( + db, + jsonb().default([1, 2, 3]), + `'[1,2,3]'`, + ); + const res4 = await diffDefault( + db, + jsonb().default({ key: 'value' }), + `'{"key":"value"}'`, + ); + const res5 = await diffDefault( + db, + jsonb().default({ key: "val'ue" }), + `e'{"key":"val\\'ue"}'`, + ); + const res6 = await diffDefault( + db, + jsonb().default({ key: `mo''",\`}{od` }), + `e'{"key":"mo\\'\\'\\\\",\`}{od"}'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + // await expect().rejects + // .toThrowError(); +}); + +// tests were commented since there are too many of them +test.concurrent('timestamp', async ({ dbc: db }) => { + // normal without timezone + const res1 = await diffDefault( + db, + timestamp({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + ); + // precision same as in default + const res2 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 3 }).default( + new Date('2025-05-23T12:53:53.115Z'), + ), + `'2025-05-23 12:53:53.115'`, + ); + // precision is less than in default + // cockroach will store this value trimmed + // this should pass since in diff we handle it + const res3 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 1 }).default( + new Date('2025-05-23T12:53:53.115Z'), + ), + `'2025-05-23 12:53:53.115'`, + ); + + // all string variations + // normal: without timezone + const res9 = await diffDefault( + db, + timestamp({ mode: 'string' }).default('2025-05-23T12:53:53.115'), + `'2025-05-23T12:53:53.115'`, + ); + + const res9_2 = await diffDefault( + db, + timestamp({ mode: 'string' }).default('2025-05-23T12:53:53'), + `'2025-05-23T12:53:53'`, + ); + // normal: timezone with "zero UTC offset" in the end + const res10 = await diffDefault( + db, + timestamp({ mode: 'string' }).default('2025-05-23T12:53:53.115Z'), + `'2025-05-23T12:53:53.115Z'`, + ); + + // normal: timezone with custom timezone + const res12 = await diffDefault( + db, + timestamp({ mode: 'string' }).default('2025-05-23T12:53:53.115+03'), + `'2025-05-23T12:53:53.115+03'`, + ); + + // precision is less than in default + // cockroach will store this value trimmed + // this should pass since in diff we handle it + // without UTC + const res13 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1 }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + + // custom timezone + const res16 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1 }).default( + '2025-05-23T12:53:53.116+04:30', + ), + `'2025-05-23T12:53:53.116+04:30'`, + ); + + // precision same + // No timezone + const res17 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + // precision same + // zero timezone + const res18 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }).default( + '2025-05-23T12:53:53.115Z', + ), + `'2025-05-23T12:53:53.115Z'`, + ); + + // custom timezone + const res20 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + + // precision is bigget than in default + // No timezone + const res21 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + // precision is bigget than in default + // zero timezone + const res22 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }).default( + '2025-05-23T12:53:53.115Z', + ), + `'2025-05-23T12:53:53.115Z'`, + ); + + const res24 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + + const res25 = await diffDefault( + db, + timestamp({ + mode: 'string', + precision: 1, + withTimezone: true, + }).defaultNow(), + `now()`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res9_2).toStrictEqual([]); + expect(res10).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res17).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res20).toStrictEqual([]); + expect(res21).toStrictEqual([]); + expect(res22).toStrictEqual([]); + expect(res24).toStrictEqual([]); + expect(res25).toStrictEqual([]); +}); + +test.concurrent('timestamp arrays', async ({ dbc: db }) => { + const res1_1 = await diffDefault( + db, + timestamp({ mode: 'date' }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115"}'::timestamp[]`, + ); + + const res2_1 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 3 }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115"}'::timestamp(3)[]`, + ); + + const res3_1 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 1 }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115"}'::timestamp(1)[]`, + ); + + const res9_1 = await diffDefault( + db, + timestamp({ mode: 'string' }).array().default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamp[]`, + ); + const res9_3 = await diffDefault( + db, + timestamp({ mode: 'string' }).array().default(['2025-05-23T12:53:53.0']), + `'{"2025-05-23T12:53:53.0"}'::timestamp[]`, + ); + + const res10_1 = await diffDefault( + db, + timestamp({ mode: 'string' }).array().default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamp[]`, + ); + + const res12_1 = await diffDefault( + db, + timestamp({ mode: 'string' }) + .array() + .default(['2025-05-23T12:53:53.115+03']), + `'{"2025-05-23T12:53:53.115+03"}'::timestamp[]`, + ); + + const res13_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1 }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamp(1)[]`, + ); + + const res16_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1 }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamp(1)[]`, + ); + + const res17_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamp(3)[]`, + ); + + const res18_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }) + .array() + .default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamp(3)[]`, + ); + + const res20_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3 }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamp(3)[]`, + ); + + const res21_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamp(5)[]`, + ); + + const res22_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }) + .array() + .default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamp(5)[]`, + ); + + const res24_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5 }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamp(5)[]`, + ); + + expect(res1_1).toStrictEqual([]); + expect(res2_1).toStrictEqual([]); + expect(res3_1).toStrictEqual([]); + expect(res9_1).toStrictEqual([]); + expect(res9_3).toStrictEqual([]); + expect(res10_1).toStrictEqual([]); + expect(res12_1).toStrictEqual([]); + expect(res13_1).toStrictEqual([]); + expect(res16_1).toStrictEqual([]); + expect(res17_1).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res20_1).toStrictEqual([]); + expect(res21_1).toStrictEqual([]); + expect(res22_1).toStrictEqual([]); + expect(res24_1).toStrictEqual([]); +}); + +test.concurrent('timestamptz', async ({ dbc: db }) => { + // all dates variations + + // normal with timezone + const res5 = await diffDefault( + db, + timestamp({ mode: 'date', withTimezone: true }).default( + new Date('2025-05-23T12:53:53.115Z'), + ), + `'2025-05-23 12:53:53.115+00'`, + ); + + // precision same as in default + const res6 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).default( + new Date('2025-05-23T12:53:53.115Z'), + ), + `'2025-05-23 12:53:53.115+00'`, + ); + // precision is less than in default + // cockroach will store this value trimmed + // this should pass since in diff we handle it + const res7 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 1, withTimezone: true }).default( + new Date('2025-05-23T12:53:53.115Z'), + ), + `'2025-05-23 12:53:53.115+00'`, + ); + + // all string variations + // normal: without timezone + const res9 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + const res9_2 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }).default( + '2025-05-23T12:53:53', + ), + `'2025-05-23T12:53:53'`, + ); + const res9_3 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }).default( + '2025-05-23T12:53:53.0', + ), + `'2025-05-23T12:53:53.0'`, + ); + // normal: timezone with custom timezone + const res12 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }).default( + '2025-05-23T12:53:53.115+03', + ), + `'2025-05-23T12:53:53.115+03'`, + ); + + // precision is bigger than in default + // cockroach will not pad this + // without UTC + const res13 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5, withTimezone: true }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + + // custom timezone + const res16 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5, withTimezone: true }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + + // precision is less than in default + // cockroach will not trim this + // without UTC + const res17 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + + // precision is less than in default + // cockroach will store this value trimmed + // this should pass since in diff we handle it + // zero UTC + const res18 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }).default( + '2025-05-23T12:53:53.115Z', + ), + `'2025-05-23T12:53:53.115Z'`, + ); + + const res20 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + + // precision same + // without UTC + const res21 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).default( + '2025-05-23T12:53:53.115', + ), + `'2025-05-23T12:53:53.115'`, + ); + + // precision same + // zero UTC + const res22 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).default( + '2025-05-23T12:53:53.115Z', + ), + `'2025-05-23T12:53:53.115Z'`, + ); + + // precision same + // custom timezone + const res24 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + + const res25 = await diffDefault( + db, + timestamp({ + mode: 'string', + precision: 1, + withTimezone: true, + }).defaultNow(), + `now()`, + ); + + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res9).toStrictEqual([]); + expect(res9_2).toStrictEqual([]); + expect(res9_3).toStrictEqual([]); + expect(res12).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res17).toStrictEqual([]); + expect(res18).toStrictEqual([]); + expect(res20).toStrictEqual([]); + expect(res21).toStrictEqual([]); + expect(res22).toStrictEqual([]); + expect(res24).toStrictEqual([]); + expect(res25).toStrictEqual([]); +}); + +test.concurrent('timestamptz arrays', async ({ dbc: db }) => { + const res5_1 = await diffDefault( + db, + timestamp({ mode: 'date', withTimezone: true }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115+00"}'::timestamptz[]`, + ); + + const res6_1 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 3, withTimezone: true }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115+00"}'::timestamptz(3)[]`, + ); + // precision is less than in default + // cockroach will store this value trimmed + // this should pass since in diff we handle it + + const res7_1 = await diffDefault( + db, + timestamp({ mode: 'date', precision: 1, withTimezone: true }) + .array() + .default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115+00"}'::timestamptz(1)[]`, + ); + + // all string variations + // normal: without timezone + const res9_1 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamptz[]`, + ); + + const res10_1 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamptz[]`, + ); + + const res12_1 = await diffDefault( + db, + timestamp({ mode: 'string', withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115+03']), + `'{"2025-05-23T12:53:53.115+03"}'::timestamptz[]`, + ); + + const res13_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamptz(5)[]`, + ); + const res16 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5, withTimezone: true }).default( + '2025-05-23T12:53:53.115+04:30', + ), + `'2025-05-23T12:53:53.115+04:30'`, + ); + const res16_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 5, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamptz(5)[]`, + ); + + const res17_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamptz(1)[]`, + ); + + const res18_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamptz(1)[]`, + ); + + // precision is less than in default, cockroach will store this value trimmed, this should pass since in diff we handle it + const res20_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamptz(1)[]`, + ); + + // precision same, without UTC + const res21_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115']), + `'{"2025-05-23T12:53:53.115"}'::timestamptz(3)[]`, + ); + + // precision same, zero UTC + const res22_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 3, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115Z']), + `'{"2025-05-23T12:53:53.115Z"}'::timestamptz(3)[]`, + ); + + // precision same + // custom timezone + const res24_1 = await diffDefault( + db, + timestamp({ mode: 'string', precision: 1, withTimezone: true }) + .array() + .default(['2025-05-23T12:53:53.115+04:30']), + `'{"2025-05-23T12:53:53.115+04:30"}'::timestamptz(1)[]`, + ); + + expect(res5_1).toStrictEqual([]); + expect(res6_1).toStrictEqual([]); + expect(res7_1).toStrictEqual([]); + expect(res9_1).toStrictEqual([]); + expect(res10_1).toStrictEqual([]); + expect(res12_1).toStrictEqual([]); + expect(res13_1).toStrictEqual([]); + expect(res16).toStrictEqual([]); + expect(res16_1).toStrictEqual([]); + expect(res17_1).toStrictEqual([]); + expect(res18_1).toStrictEqual([]); + expect(res20_1).toStrictEqual([]); + expect(res21_1).toStrictEqual([]); + expect(res22_1).toStrictEqual([]); + expect(res24_1).toStrictEqual([]); +}); + +// tests were commented since there are too many of them +test.concurrent('time', async ({ dbc: db }) => { + // normal time without precision + const res1 = await diffDefault( + db, + time().default('15:50:33'), + `'15:50:33'`, + ); + // const res1_1 = await diffDefault(db, time().default('15:50:33Z'), `'15:50:33Z'`); + // const res1_2 = await diffDefault(db, time().default('15:50:33+00'), `'15:50:33+00'`); + // const res1_3 = await diffDefault(db, time().default('15:50:33+03'), `'15:50:33+03'`); + // const res1_4 = await diffDefault(db, time().default('2025-05-23 15:50:33'), `'2025-05-23 15:50:33'`); + // const res1_5 = await diffDefault(db, time().default('2025-05-23 15:50:33Z'), `'2025-05-23 15:50:33Z'`); + // const res1_6 = await diffDefault(db, time().default('2025-05-23T15:50:33+00'), `'2025-05-23T15:50:33+00'`); + // const res1_7 = await diffDefault(db, time().default('2025-05-23 15:50:33+03'), `'2025-05-23 15:50:33+03'`); + // const res1_16 = await diffDefault(db, time().default('15:50:33.123'), `'15:50:33.123'`); + const res1_17 = await diffDefault( + db, + time().default('15:50:33.123Z'), + `'15:50:33.123Z'`, + ); + + const res1_8 = await diffDefault( + db, + time({ withTimezone: true }).default('15:50:33'), + `'15:50:33'`, + ); + // const res1_9 = await diffDefault(db, time({ withTimezone: true }).default('15:50:33Z'), `'15:50:33Z'`); + // const res1_10 = await diffDefault(db, time({ withTimezone: true }).default('15:50:33+00'), `'15:50:33+00'`); + // const res1_11 = await diffDefault(db, time({ withTimezone: true }).default('15:50:33+03'), `'15:50:33+03'`); + // const res1_12 = await diffDefault( + // db, + // time({ withTimezone: true }).default('2025-05-23 15:50:33'), + // `'2025-05-23 15:50:33'`, + // ); + // const res1_13 = await diffDefault( + // db, + // time({ withTimezone: true }).default('2025-05-23 15:50:33Z'), + // `'2025-05-23 15:50:33Z'`, + // ); + // const res1_14 = await diffDefault( + // db, + // time({ withTimezone: true }).default('2025-05-23T15:50:33+00'), + // `'2025-05-23T15:50:33+00'`, + // ); + const res1_20 = await diffDefault( + db, + time({ withTimezone: true, precision: 1 }).default('15:50:33.123+03'), + `'15:50:33.123+03'`, + ); + + // normal time with precision that is same as in default + const res2 = await diffDefault( + db, + time({ precision: 3 }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res2_1 = await diffDefault(db, time({ precision: 3 }).default('15:50:33.123Z'), `'15:50:33.123Z'`); + // const res2_2 = await diffDefault(db, time({ precision: 3 }).default('15:50:33.123+00'), `'15:50:33.123+00'`); + // const res2_3 = await diffDefault(db, time({ precision: 3 }).default('15:50:33.123+03'), `'15:50:33.123+03'`); + // const res2_4 = await diffDefault( + // db, + // time({ precision: 3 }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res2_5 = await diffDefault( + // db, + // time({ precision: 3 }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res2_6 = await diffDefault( + // db, + // time({ precision: 3 }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res2_7 = await diffDefault( + db, + time({ precision: 3 }).default('2025-05-23 15:50:33.123+03'), + `'2025-05-23 15:50:33.123+03'`, + ); + + const res2_8 = await diffDefault( + db, + time({ precision: 3, withTimezone: true }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res2_9 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('15:50:33.123Z'), + // `'15:50:33.123Z'`, + // ); + // const res2_10 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('15:50:33.123+00'), + // `'15:50:33.123+00'`, + // ); + // const res2_11 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('15:50:33.123+03'), + // `'15:50:33.123+03'`, + // ); + // const res2_12 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res2_13 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res2_14 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res2_15 = await diffDefault( + db, + time({ precision: 3, withTimezone: true }).default( + '2025-05-23 15:50:33.123+03', + ), + `'2025-05-23 15:50:33.123+03'`, + ); + + // normal time with precision that is less than in default + const res3 = await diffDefault( + db, + time({ precision: 1 }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res3_1 = await diffDefault(db, time({ precision: 1 }).default('15:50:33.123Z'), `'15:50:33.123Z'`); + // const res3_2 = await diffDefault(db, time({ precision: 1 }).default('15:50:33.123+00'), `'15:50:33.123+00'`); + // const res3_3 = await diffDefault(db, time({ precision: 1 }).default('15:50:33.123+03'), `'15:50:33.123+03'`); + // const res3_4 = await diffDefault( + // db, + // time({ precision: 1 }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res3_5 = await diffDefault( + // db, + // time({ precision: 1 }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res3_6 = await diffDefault( + // db, + // time({ precision: 1 }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res3_7 = await diffDefault( + db, + time({ precision: 1 }).default('2025-05-23 15:50:33.123+03'), + `'2025-05-23 15:50:33.123+03'`, + ); + + const res3_8 = await diffDefault( + db, + time({ precision: 1, withTimezone: true }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res3_9 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('15:50:33.123Z'), + // `'15:50:33.123Z'`, + // ); + // const res3_10 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('15:50:33.123+00'), + // `'15:50:33.123+00'`, + // ); + // const res3_11 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('15:50:33.123+03'), + // `'15:50:33.123+03'`, + // ); + // const res3_12 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res3_13 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res3_14 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res3_15 = await diffDefault( + db, + time({ precision: 1, withTimezone: true }).default( + '2025-05-23 15:50:33.123+03', + ), + `'2025-05-23 15:50:33.123+03'`, + ); + + // normal time with precision that is bigger than in default + const res4 = await diffDefault( + db, + time({ precision: 5 }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res4_1 = await diffDefault(db, time({ precision: 5 }).default('15:50:33.123Z'), `'15:50:33.123Z'`); + // const res4_2 = await diffDefault(db, time({ precision: 5 }).default('15:50:33.123+00'), `'15:50:33.123+00'`); + // const res4_3 = await diffDefault(db, time({ precision: 5 }).default('15:50:33.123+03'), `'15:50:33.123+03'`); + // const res4_4 = await diffDefault( + // db, + // time({ precision: 5 }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res4_5 = await diffDefault( + // db, + // time({ precision: 5 }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res4_6 = await diffDefault( + // db, + // time({ precision: 5 }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res4_7 = await diffDefault( + db, + time({ precision: 5 }).default('2025-05-23 15:50:33.123+03'), + `'2025-05-23 15:50:33.123+03'`, + ); + + const res4_8 = await diffDefault( + db, + time({ precision: 5, withTimezone: true }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + // const res4_9 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('15:50:33.123Z'), + // `'15:50:33.123Z'`, + // ); + // const res4_10 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('15:50:33.123+00'), + // `'15:50:33.123+00'`, + // ); + // const res4_11 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('15:50:33.123+03'), + // `'15:50:33.123+03'`, + // ); + // const res4_12 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('2025-05-23 15:50:33.123'), + // `'2025-05-23 15:50:33.123'`, + // ); + // const res4_13 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('2025-05-23 15:50:33.123Z'), + // `'2025-05-23 15:50:33.123Z'`, + // ); + // const res4_14 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).default('2025-05-23T15:50:33.123+00'), + // `'2025-05-23T15:50:33.123+00'`, + // ); + const res4_15 = await diffDefault( + db, + time({ precision: 5, withTimezone: true }).default( + '2025-05-23 15:50:33.123+03', + ), + `'2025-05-23 15:50:33.123+03'`, + ); + + expect(res1).toStrictEqual([]); + // expect(res1_1).toStrictEqual([]); + // expect(res1_2).toStrictEqual([]); + // expect(res1_3).toStrictEqual([]); + // expect(res1_4).toStrictEqual([]); + // expect(res1_5).toStrictEqual([]); + // expect(res1_6).toStrictEqual([]); + // expect(res1_7).toStrictEqual([]); + expect(res1_8).toStrictEqual([]); + // expect(res1_9).toStrictEqual([]); + // expect(res1_10).toStrictEqual([]); + // expect(res1_11).toStrictEqual([]); + // expect(res1_12).toStrictEqual([]); + // expect(res1_13).toStrictEqual([]); + // expect(res1_14).toStrictEqual([]); + // expect(res1_16).toStrictEqual([]); + expect(res1_17).toStrictEqual([]); + expect(res1_20).toStrictEqual([]); + + expect(res2).toStrictEqual([]); + // expect(res2_1).toStrictEqual([]); + // expect(res2_2).toStrictEqual([]); + // expect(res2_3).toStrictEqual([]); + // expect(res2_4).toStrictEqual([]); + // expect(res2_5).toStrictEqual([]); + // expect(res2_6).toStrictEqual([]); + expect(res2_7).toStrictEqual([]); + expect(res2_8).toStrictEqual([]); + // expect(res2_9).toStrictEqual([]); + // expect(res2_10).toStrictEqual([]); + // expect(res2_11).toStrictEqual([]); + // expect(res2_12).toStrictEqual([]); + // expect(res2_13).toStrictEqual([]); + // expect(res2_14).toStrictEqual([]); + expect(res2_15).toStrictEqual([]); + + expect(res3).toStrictEqual([]); + // expect(res3_1).toStrictEqual([]); + // expect(res3_2).toStrictEqual([]); + // expect(res3_3).toStrictEqual([]); + // expect(res3_4).toStrictEqual([]); + // expect(res3_5).toStrictEqual([]); + // expect(res3_6).toStrictEqual([]); + expect(res3_7).toStrictEqual([]); + expect(res3_8).toStrictEqual([]); + // expect(res3_9).toStrictEqual([]); + // expect(res3_10).toStrictEqual([]); + // expect(res3_11).toStrictEqual([]); + // expect(res3_12).toStrictEqual([]); + // expect(res3_13).toStrictEqual([]); + // expect(res3_14).toStrictEqual([]); + expect(res3_15).toStrictEqual([]); + + expect(res4).toStrictEqual([]); + // expect(res4_1).toStrictEqual([]); + // expect(res4_2).toStrictEqual([]); + // expect(res4_3).toStrictEqual([]); + // expect(res4_4).toStrictEqual([]); + // expect(res4_5).toStrictEqual([]); + // expect(res4_6).toStrictEqual([]); + expect(res4_7).toStrictEqual([]); + expect(res4_8).toStrictEqual([]); + // expect(res4_9).toStrictEqual([]); + // expect(res4_10).toStrictEqual([]); + // expect(res4_11).toStrictEqual([]); + // expect(res4_12).toStrictEqual([]); + // expect(res4_13).toStrictEqual([]); + // expect(res4_14).toStrictEqual([]); + expect(res4_15).toStrictEqual([]); +}); + +test.concurrent('time arrays', async ({ dbc: db }) => { + // normal array time without precision + const res5 = await diffDefault( + db, + time().array().default(['15:50:33']), + `'{15:50:33}'::time[]`, + ); + // const res5_1 = await diffDefault(db, time().array().default(['15:50:33Z']), `'{15:50:33Z}'::time[]`); + // const res5_2 = await diffDefault(db, time().array().default(['15:50:33+00']), `'{15:50:33+00}'::time[]`); + // const res5_3 = await diffDefault(db, time().array().default(['15:50:33+03']), `'{15:50:33+03}'::time[]`); + // const res5_4 = await diffDefault( + // db, + // time().array().default(['2025-05-23 15:50:33']), + // `'{2025-05-23 15:50:33}'::time[]`, + // ); + // const res5_5 = await diffDefault( + // db, + // time().array().default(['2025-05-23 15:50:33Z']), + // `'{2025-05-23 15:50:33Z}'::time[]`, + // ); + // const res5_6 = await diffDefault( + // db, + // time().array().default(['2025-05-23T15:50:33+00']), + // `'{2025-05-23T15:50:33+00}'::time[]`, + // ); + const res5_7 = await diffDefault( + db, + time().array().default(['2025-05-23 15:50:33+03']), + `'{2025-05-23 15:50:33+03}'::time[]`, + ); + + const res5_8 = await diffDefault( + db, + time({ withTimezone: true }).array().default(['15:50:33']), + `'{15:50:33}'::timetz[]`, + ); + // const res5_9 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['15:50:33Z']), + // `'{15:50:33Z}'::timetz[]`, + // ); + // const res5_10 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['15:50:33+00']), + // `'{15:50:33+00}'::timetz[]`, + // ); + // const res5_11 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['15:50:33+03']), + // `'{15:50:33+03}'::timetz[]`, + // ); + // const res5_12 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['2025-05-23 15:50:33']), + // `'{2025-05-23 15:50:33}'::timetz[]`, + // ); + // const res5_13 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['2025-05-23 15:50:33Z']), + // `'{2025-05-23 15:50:33Z}'::timetz[]`, + // ); + // const res5_14 = await diffDefault( + // db, + // time({ withTimezone: true }).array().default(['2025-05-23T15:50:33+00']), + // `'{2025-05-23T15:50:33+00}'::timetz[]`, + // ); + const res5_15 = await diffDefault( + db, + time({ withTimezone: true }).array().default(['2025-05-23 15:50:33+03']), + `'{2025-05-23 15:50:33+03}'::timetz[]`, + ); + + // normal array time with precision that is same as in default + const res6 = await diffDefault( + db, + time({ precision: 3 }).array().default(['15:50:33.123']), + `'{15:50:33.123}'::time(3)[]`, + ); + // const res6_1 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::time(3)[]`, + // ); + // const res6_2 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::time(3)[]`, + // ); + // const res6_3 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::time(3)[]`, + // ); + // const res6_4 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::time(3)[]`, + // ); + // const res6_5 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::time(3)[]`, + // ); + // const res6_6 = await diffDefault( + // db, + // time({ precision: 3 }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::time(3)[]`, + // ); + const res6_7 = await diffDefault( + db, + time({ precision: 3 }).array().default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::time(3)[]`, + ); + + const res6_8 = await diffDefault( + db, + time({ precision: 3, withTimezone: true }) + .array() + .default(['15:50:33.123']), + `'{15:50:33.123}'::timetz(3)[]`, + ); + // const res6_9 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::timetz(3)[]`, + // ); + // const res6_10 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::timetz(3)[]`, + // ); + // const res6_11 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::timetz(3)[]`, + // ); + // const res6_12 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::timetz(3)[]`, + // ); + // const res6_13 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::timetz(3)[]`, + // ); + // const res6_14 = await diffDefault( + // db, + // time({ precision: 3, withTimezone: true }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::timetz(3)[]`, + // ); + const res6_15 = await diffDefault( + db, + time({ precision: 3, withTimezone: true }) + .array() + .default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::timetz(3)[]`, + ); + + // normal array time with precision that is less than in default + const res7 = await diffDefault( + db, + time({ precision: 1 }).array().default(['15:50:33.123']), + `'{15:50:33.123}'::time(1)[]`, + ); + // const res7_1 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::time(1)[]`, + // ); + // const res7_2 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::time(1)[]`, + // ); + // const res7_3 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::time(1)[]`, + // ); + // const res7_4 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::time(1)[]`, + // ); + // const res7_5 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::time(1)[]`, + // ); + // const res7_6 = await diffDefault( + // db, + // time({ precision: 1 }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::time(1)[]`, + // ); + const res7_7 = await diffDefault( + db, + time({ precision: 1 }).array().default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::time(1)[]`, + ); + + const res7_8 = await diffDefault( + db, + time({ precision: 1, withTimezone: true }) + .array() + .default(['15:50:33.123']), + `'{15:50:33.123}'::timetz(1)[]`, + ); + // const res7_9 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::timetz(1)[]`, + // ); + // const res7_10 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::timetz(1)[]`, + // ); + // const res7_11 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::timetz(1)[]`, + // ); + // const res7_12 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::timetz(1)[]`, + // ); + // const res7_13 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::timetz(1)[]`, + // ); + // const res7_14 = await diffDefault( + // db, + // time({ precision: 1, withTimezone: true }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::timetz(1)[]`, + // ); + const res7_15 = await diffDefault( + db, + time({ precision: 1, withTimezone: true }) + .array() + .default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::timetz(1)[]`, + ); + + // normal array time with precision that is bigger than in default + const res8 = await diffDefault( + db, + time({ precision: 5 }).array().default(['15:50:33.123']), + `'{15:50:33.123}'::time(5)[]`, + ); + // const res8_1 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::time(5)[]`, + // ); + // const res8_2 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::time(5)[]`, + // ); + // const res8_3 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::time(5)[]`, + // ); + // const res8_4 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::time(5)[]`, + // ); + // const res8_5 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::time(5)[]`, + // ); + // const res8_6 = await diffDefault( + // db, + // time({ precision: 5 }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::time(5)[]`, + // ); + const res8_7 = await diffDefault( + db, + time({ precision: 5 }).array().default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::time(5)[]`, + ); + + const res8_8 = await diffDefault( + db, + time({ precision: 5, withTimezone: true }) + .array() + .default(['15:50:33.123']), + `'{15:50:33.123}'::timetz(5)[]`, + ); + // const res8_9 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['15:50:33.123Z']), + // `'{15:50:33.123Z}'::timetz(5)[]`, + // ); + // const res8_10 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['15:50:33.123+00']), + // `'{15:50:33.123+00}'::timetz(5)[]`, + // ); + // const res8_11 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['15:50:33.123+03']), + // `'{15:50:33.123+03}'::timetz(5)[]`, + // ); + // const res8_12 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['2025-05-23 15:50:33.123']), + // `'{2025-05-23 15:50:33.123}'::timetz(5)[]`, + // ); + // const res8_13 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['2025-05-23 15:50:33.123Z']), + // `'{2025-05-23 15:50:33.123Z}'::timetz(5)[]`, + // ); + // const res8_14 = await diffDefault( + // db, + // time({ precision: 5, withTimezone: true }).array().default(['2025-05-23T15:50:33.123+00']), + // `'{2025-05-23T15:50:33.123+00}'::timetz(5)[]`, + // ); + const res8_15 = await diffDefault( + db, + time({ precision: 5, withTimezone: true }) + .array() + .default(['2025-05-23 15:50:33.123+03']), + `'{2025-05-23 15:50:33.123+03}'::timetz(5)[]`, + ); + + expect(res5).toStrictEqual([]); + // expect(res5_1).toStrictEqual([]); + // expect(res5_2).toStrictEqual([]); + // expect(res5_3).toStrictEqual([]); + // expect(res5_4).toStrictEqual([]); + // expect(res5_5).toStrictEqual([]); + // expect(res5_6).toStrictEqual([]); + expect(res5_7).toStrictEqual([]); + expect(res5_8).toStrictEqual([]); + // expect(res5_9).toStrictEqual([]); + // expect(res5_10).toStrictEqual([]); + // expect(res5_11).toStrictEqual([]); + // expect(res5_12).toStrictEqual([]); + // expect(res5_13).toStrictEqual([]); + // expect(res5_14).toStrictEqual([]); + expect(res5_15).toStrictEqual([]); + + expect(res6).toStrictEqual([]); + // expect(res6_1).toStrictEqual([]); + // expect(res6_2).toStrictEqual([]); + // expect(res6_3).toStrictEqual([]); + // expect(res6_4).toStrictEqual([]); + // expect(res6_5).toStrictEqual([]); + // expect(res6_6).toStrictEqual([]); + expect(res6_7).toStrictEqual([]); + expect(res6_8).toStrictEqual([]); + // expect(res6_9).toStrictEqual([]); + // expect(res6_10).toStrictEqual([]); + // expect(res6_11).toStrictEqual([]); + // expect(res6_12).toStrictEqual([]); + // expect(res6_13).toStrictEqual([]); + // expect(res6_14).toStrictEqual([]); + expect(res6_15).toStrictEqual([]); + + expect(res7).toStrictEqual([]); + // expect(res7_1).toStrictEqual([]); + // expect(res7_2).toStrictEqual([]); + // expect(res7_3).toStrictEqual([]); + // expect(res7_4).toStrictEqual([]); + // expect(res7_5).toStrictEqual([]); + // expect(res7_6).toStrictEqual([]); + expect(res7_7).toStrictEqual([]); + expect(res7_8).toStrictEqual([]); + // expect(res7_9).toStrictEqual([]); + // expect(res7_10).toStrictEqual([]); + // expect(res7_11).toStrictEqual([]); + // expect(res7_12).toStrictEqual([]); + // expect(res7_13).toStrictEqual([]); + // expect(res7_14).toStrictEqual([]); + expect(res7_15).toStrictEqual([]); + + expect(res8).toStrictEqual([]); + // expect(res8_1).toStrictEqual([]); + // expect(res8_2).toStrictEqual([]); + // expect(res8_3).toStrictEqual([]); + // expect(res8_4).toStrictEqual([]); + // expect(res8_5).toStrictEqual([]); + // expect(res8_6).toStrictEqual([]); + expect(res8_7).toStrictEqual([]); + expect(res8_8).toStrictEqual([]); + // expect(res8_9).toStrictEqual([]); + // expect(res8_10).toStrictEqual([]); + // expect(res8_11).toStrictEqual([]); + // expect(res8_12).toStrictEqual([]); + // expect(res8_13).toStrictEqual([]); + // expect(res8_14).toStrictEqual([]); + expect(res8_15).toStrictEqual([]); +}); + +test.concurrent('date', async ({ dbc: db }) => { + // dates + const res1 = await diffDefault( + db, + date({ mode: 'date' }).default(new Date('2025-05-23')), + `'2025-05-23'`, + ); + const res1_1 = await diffDefault( + db, + date({ mode: 'date' }).default(new Date('2025-05-23T12:12:31.213')), + `'2025-05-23'`, + ); + const res1_2 = await diffDefault( + db, + date({ mode: 'date' }).defaultNow(), + `now()`, + ); + + const res2_1 = await diffDefault( + db, + date({ mode: 'date' }).default(new Date('2025-05-23')), + `'2025-05-23'`, + ); + const res2_2 = await diffDefault( + db, + date({ mode: 'date' }).default(new Date('2025-05-23T12:12:31.213')), + `'2025-05-23'`, + ); + const res2_3 = await diffDefault( + db, + date({ mode: 'date' }).defaultNow(), + `now()`, + ); + + // strings + const res3 = await diffDefault( + db, + date({ mode: 'string' }).default('2025-05-23'), + `'2025-05-23'`, + ); + const res3_1 = await diffDefault( + db, + date({ mode: 'string' }).default('2025-05-23T12:12:31.213'), + `'2025-05-23T12:12:31.213'`, + ); + const res3_2 = await diffDefault( + db, + date({ mode: 'string' }).defaultNow(), + `now()`, + ); + const res3_3 = await diffDefault( + db, + date({ mode: 'string' }).default('2025-05-23 12:12:31.213+01:00'), + `'2025-05-23 12:12:31.213+01:00'`, + ); + + expect(res1).toStrictEqual([]); + expect(res1_1).toStrictEqual([]); + expect(res1_2).toStrictEqual([]); + + expect(res2_1).toStrictEqual([]); + expect(res2_2).toStrictEqual([]); + expect(res2_3).toStrictEqual([]); + + expect(res3).toStrictEqual([]); + expect(res3_1).toStrictEqual([]); + expect(res3_2).toStrictEqual([]); + expect(res3_3).toStrictEqual([]); +}); + +test.concurrent('date arrays', async ({ dbc: db }) => { + const res2 = await diffDefault( + db, + date({ mode: 'date' }).array().default([]), + `'{}'::date[]`, + ); + + const res4 = await diffDefault( + db, + date({ mode: 'string' }).array().default(['2025-05-23']), + `'{2025-05-23}'::date[]`, + ); + const res4_1 = await diffDefault( + db, + date({ mode: 'string' }).array().default(['2025-05-23T12:12:31.213']), + `'{2025-05-23T12:12:31.213}'::date[]`, + ); + const res4_2 = await diffDefault( + db, + date({ mode: 'string' }).array().default(['2025-05-23 12:12:31.213+01:00']), + `'{2025-05-23 12:12:31.213+01:00}'::date[]`, + ); + + expect(res2).toStrictEqual([]); + + expect(res4).toStrictEqual([]); + expect(res4_1).toStrictEqual([]); + expect(res4_2).toStrictEqual([]); +}); + +// This is not handled the way cockroach stores it +// since user can pass `1 2:3:4` and it will be stored as `1 day 02:03:04` +// so we just compare row values +// | This text is a duplicate from cockroach/grammar.ts | +test.concurrent('interval', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + interval().default('1 day'), + `'1 day'`, + ); + const res10 = await diffDefault( + db, + interval({ fields: 'day to second', precision: 3 }).default( + '1 day 3 second', + ), + `'1 day 3 second'`, + ); + + expect(res1).toStrictEqual([]); + // it's ok, that's due to '1 day 3 second' vs '1 day 00:00:03' + expect(res10.length).toBe(1); +}); + +test.concurrent('interval arrays', async ({ dbc: db }) => { + const res2 = await diffDefault( + db, + interval().array().default([]), + `'{}'::interval[]`, + ); + const res20 = await diffDefault( + db, + interval({ fields: 'day to second', precision: 3 }).array().default([]), + `'{}'::interval day to second(3)[]`, + ); + + const res3 = await diffDefault( + db, + interval().array().default(['1 day']), + `'{"1 day"}'::interval[]`, + ); + const res30 = await diffDefault( + db, + interval({ fields: 'day to second', precision: 3 }) + .array() + .default(['1 day 3 second']), + `'{"1 day 3 second"}'::interval day to second(3)[]`, + ); + + expect(res2).toStrictEqual([]); + expect(res20).toStrictEqual([]); + expect(res3).toStrictEqual([]); + // it's ok, that's due to '1 day 3 second' vs '1 day 00:00:03' + expect(res30.length).toBe(1); +}); + +test.concurrent('enum', async ({ dbc: db }) => { + const moodEnum = cockroachEnum('mood_enum', [ + 'sad', + 'ok', + 'happy', + `text'text`, + `text"text`, + `text\\text`, + `text,text`, + `no,''"\`rm`, + `mo''",\\\`}{od`, + `mo''"\\\\\\\`}{od`, + 'mo,\`od', + ]); + const pre = { moodEnum }; + + const res1 = await diffDefault( + db, + moodEnum().default('ok'), + `'ok'::"mood_enum"`, + { pre }, + ); + const res2 = await diffDefault( + db, + moodEnum().default(`text'text`), + `e'text\\'text'::"mood_enum"`, + { pre }, + ); + const res3 = await diffDefault( + db, + moodEnum().default('text"text'), + `'text"text'::"mood_enum"`, + { pre }, + ); + const res4 = await diffDefault( + db, + moodEnum().default('text\\text'), + `e'text\\\\text'::"mood_enum"`, + { pre }, + ); + const res5 = await diffDefault( + db, + moodEnum().default('text,text'), + `'text,text'::"mood_enum"`, + { pre }, + ); + const res6 = await diffDefault( + db, + moodEnum().default(`mo''"\\\\\\\`}{od`), + `e'mo\\'\\'"\\\\\\\\\\\\\`}{od'::"mood_enum"`, + { pre }, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); +}); + +test.concurrent('enum arrays', async ({ dbc: db }) => { + const moodEnum = cockroachEnum('mood_enum', [ + 'sad', + 'ok', + 'happy', + `text'text`, + `text"text`, + `text\\text`, + `text,text`, + `no,''"\`rm`, + `mo''",\\\`}{od`, + `mo''"\\\\\\\`}{od`, + 'mo,\`od', + ]); + const pre = { moodEnum }; + + const res1_1 = await diffDefault( + db, + moodEnum().array().default(['ok']), + `'{ok}'::"mood_enum"[]`, + { pre }, + ); + const res1_2 = await diffDefault( + db, + moodEnum().array().default(['sad']), + `'{sad}'::"mood_enum"[]`, + { pre }, + ); + const res2_1 = await diffDefault( + db, + moodEnum().array().default([`text'text`]), + `'{"text''text"}'::"mood_enum"[]`, + { pre }, + ); + const res3_1 = await diffDefault( + db, + moodEnum().array().default(['text"text']), + `'{"text\\"text"}'::"mood_enum"[]`, + { pre }, + ); + const res4_1 = await diffDefault( + db, + moodEnum().array().default(['text\\text']), + `'{"text\\\\text"}'::"mood_enum"[]`, + { pre }, + ); + const res6_1 = await diffDefault( + db, + moodEnum().array().default([`mo''"\\\\\\\`}{od`]), + `'{"mo''''\\"\\\\\\\\\\\\\`}{od"}'::"mood_enum"[]`, + { pre }, + ); + + expect(res1_1).toStrictEqual([]); + expect(res1_2).toStrictEqual([]); + expect(res2_1).toStrictEqual([]); + expect(res3_1).toStrictEqual([]); + expect(res4_1).toStrictEqual([]); + expect(res6_1).toStrictEqual([]); +}); + +test.concurrent('uuid', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + uuid().default('550e8400-e29b-41d4-a716-446655440000'), + `'550e8400-e29b-41d4-a716-446655440000'`, + ); + + const res5 = await diffDefault( + db, + uuid().defaultRandom(), + `gen_random_uuid()`, + ); + + expect(res1).toStrictEqual([]); + expect(res5).toStrictEqual([]); +}); + +test.concurrent('uuid arrays', async ({ dbc: db }) => { + const res2 = await diffDefault( + db, + uuid().array().default([]), + `'{}'::uuid[]`, + ); + + const res4 = await diffDefault( + db, + uuid().array().default(['550e8400-e29b-41d4-a716-446655440000']), + `'{550e8400-e29b-41d4-a716-446655440000}'::uuid[]`, + ); + + const res6 = await diffDefault( + db, + uuid() + .array() + .default(sql`'{550e8400-e29b-41d4-a716-446655440001}'`), + `'{550e8400-e29b-41d4-a716-446655440001}'::uuid[]`, + ); + + const res7 = await diffDefault( + db, + uuid() + .array() + .default(sql`'{550e8400-e29b-41d4-a716-446655440002}'::uuid[]`), + `'{550e8400-e29b-41d4-a716-446655440002}'::uuid[]`, + ); + + expect(res2).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); +}); + +test.concurrent('bit', async ({ dbc: db }) => { + const res1 = await diffDefault(db, bit().default(`101`), `'101'`); + const res2 = await diffDefault( + db, + bit().default(`1010010010`), + `'1010010010'`, + ); + + const res3 = await diffDefault( + db, + bit({ length: 4 }).default(`101`), + `'101'`, + ); + const res4 = await diffDefault( + db, + bit({ length: 4 }).default(`1010010010`), + `'1010010010'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('bit arrays', async ({ dbc: db }) => { + const res5 = await diffDefault( + db, + bit().array().default([]), + `'{}'::bit[]`, + ); + const res6 = await diffDefault( + db, + bit().array().default([`101`]), + `'{101}'::bit[]`, + ); + + const res7 = await diffDefault( + db, + bit({ length: 3 }).array().default([]), + `'{}'::bit(3)[]`, + ); + const res8 = await diffDefault( + db, + bit({ length: 3 }).array().default([`10110`]), + `'{10110}'::bit(3)[]`, + ); + + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); +}); + +test.concurrent('varbit', async ({ dbc: db }) => { + const res1 = await diffDefault(db, varbit().default(`101`), `'101'`); + const res2 = await diffDefault( + db, + varbit().default(`1010010010`), + `'1010010010'`, + ); + + const res3 = await diffDefault( + db, + varbit({ length: 4 }).default(`101`), + `'101'`, + ); + const res4 = await diffDefault( + db, + varbit({ length: 4 }).default(`1010010010`), + `'1010010010'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('varbit arrays', async ({ dbc: db }) => { + const res5 = await diffDefault( + db, + varbit().array().default([]), + `'{}'::varbit[]`, + ); + const res6 = await diffDefault( + db, + varbit().array().default([`101`]), + `'{101}'::varbit[]`, + ); + + const res7 = await diffDefault( + db, + varbit({ length: 3 }).array().default([]), + `'{}'::varbit(3)[]`, + ); + const res8 = await diffDefault( + db, + varbit({ length: 3 }).array().default([`10110`]), + `'{10110}'::varbit(3)[]`, + ); + + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res7).toStrictEqual([]); + expect(res8).toStrictEqual([]); +}); + +test.concurrent('vector', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + vector({ dimensions: 3 }).default([0, -2, 3]), + `'[0,-2,3]'`, + ); + const res2 = await diffDefault( + db, + vector({ dimensions: 1 }).default([0.0]), + `'[0]'`, + ); + const res3 = await diffDefault( + db, + vector({ dimensions: 5 }).default([0.0, 1.321, 5.21, 521.4, 4.0]), + `'[0,1.321,5.21,521.4,4]'`, + ); + const res4 = await diffDefault( + db, + vector({ dimensions: 3 }).default([0, -2.12345, 3.123456]), + `'[0,-2.12345,3.123456]'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); +}); + +test.concurrent('inet', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + inet().default('127.0.0.1'), + `'127.0.0.1'`, + ); + const res2 = await diffDefault( + db, + inet().default('::ffff:192.168.0.1/96'), + `'::ffff:192.168.0.1/96'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); +}); + +test.concurrent('inet arrays', async ({ dbc: db }) => { + const res1_1 = await diffDefault( + db, + inet().array().default(['127.0.0.1']), + `'{127.0.0.1}'::inet[]`, + ); + const res2_1 = await diffDefault( + db, + inet().array().default(['::ffff:192.168.0.1/96']), + `'{::ffff:192.168.0.1/96}'::inet[]`, + ); + + expect(res1_1).toStrictEqual([]); + expect(res2_1).toStrictEqual([]); +}); + +// postgis extension +// SRID=4326 -> these coordinates are longitude/latitude values +test.concurrent('geometry', async ({ dbc: db }) => { + const res1 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).default([ + 30.5234, + 50.4501, + ]), + `'SRID=4326;POINT(30.5234 50.4501)'`, + ); + + const res2 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).default({ + x: 30.5234, + y: 50.4501, + }), + `'SRID=4326;POINT(30.5234 50.4501)'`, + ); + + const res11 = await diffDefault( + db, + geometry({ mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }), + `'POINT(30.5234 50.4501)'`, + ); + + const res12 = await diffDefault( + db, + geometry({ mode: 'xy', type: 'point' }).default( + sql`'SRID=4326;POINT(10 10)'`, + ), + `'SRID=4326;POINT(10 10)'`, + ); + + expect(res1).toStrictEqual([]); + expect(res2).toStrictEqual([]); + expect(res11).toStrictEqual([]); + expect(res12).toStrictEqual([]); +}); + +test.concurrent('geometry arrays', async ({ dbc: db }) => { + const res3 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).array().default([]), + `'{}'::geometry(point,4326)[]`, + ); + const res4 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }) + .array() + .default([[30.5234, 50.4501]]), + `'{SRID=4326;POINT(30.5234 50.4501)}'::geometry(point,4326)[]`, + ); + + const res5 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).array().default([]), + `'{}'::geometry(point,4326)[]`, + ); + const res6 = await diffDefault( + db, + geometry({ srid: 4326, mode: 'xy', type: 'point' }) + .array() + .default([{ x: 30.5234, y: 50.4501 }]), + `'{SRID=4326;POINT(30.5234 50.4501)}'::geometry(point,4326)[]`, + ); + + const res13 = await diffDefault( + db, + geometry({ mode: 'xy', type: 'point' }) + .array() + .default([{ x: 13, y: 13 }]), + `'{POINT(13 13)}'::geometry(point)[]`, + ); + + const res15 = await diffDefault( + db, + geometry({ mode: 'xy', type: 'point' }) + .array() + .default(sql`'{SRID=4326;POINT(15 15)}'::geometry(point)[]`), + `'{SRID=4326;POINT(15 15)}'::geometry(point)[]`, + ); + + const res16 = await diffDefault( + db, + geometry({ mode: 'xy', type: 'point' }) + .array() + .default(sql`'{POINT(15 15)}'::geometry(point)[]`), + `'{POINT(15 15)}'::geometry(point)[]`, + ); + + expect(res3).toStrictEqual([]); + expect(res4).toStrictEqual([]); + expect(res5).toStrictEqual([]); + expect(res6).toStrictEqual([]); + expect(res13).toStrictEqual([]); + expect(res15).toStrictEqual([]); + expect(res16).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/cockroach/entity-filter.test.ts b/drizzle-kit/tests/cockroach/entity-filter.test.ts new file mode 100644 index 0000000000..afc959559f --- /dev/null +++ b/drizzle-kit/tests/cockroach/entity-filter.test.ts @@ -0,0 +1,342 @@ +import { sql } from 'drizzle-orm'; +import { cockroachSchema, cockroachView, int4 as int } from 'drizzle-orm/cockroach-core'; +import { afterAll, beforeAll, beforeEach, expect } from 'vitest'; +import { push, test } from './mocks'; +3; + +test('push schema #1', async ({ db }) => { + const to = { dev: cockroachSchema('dev') }; + const st0 = ['CREATE SCHEMA "dev";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } +}); + +test('push schema #2', async ({ db }) => { + const to = { dev: cockroachSchema('dev'), dev2: cockroachSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev";\n', 'CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev";\n']); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev2";\n']); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } +}); + +test('push schema #3', async ({ db }) => { + const to = { dev: cockroachSchema('dev').existing(), dev2: cockroachSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } +}); + +test('push schema #4', async ({ db }) => { + const dev = cockroachSchema('dev'); + const table1 = dev.table('table1', { id: int() }); + const table2 = dev.table('table2', { id: int() }); + const to = { dev, table1, table2, dev2: cockroachSchema('dev2') }; + + const st0 = [ + 'CREATE SCHEMA "dev";\n', + 'CREATE SCHEMA "dev2";\n', + 'CREATE TABLE "dev"."table1" (\n\t"id" int4\n);\n', + 'CREATE TABLE "dev"."table2" (\n\t"id" int4\n);\n', + ]; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([ + 'CREATE SCHEMA "dev";\n', + 'CREATE TABLE "dev"."table1" (\n\t"id" int4\n);\n', + 'CREATE TABLE "dev"."table2" (\n\t"id" int4\n);\n', + ]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev2";\n']); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } +}); + +test('push schema #5', async ({ db }) => { + const dev = cockroachSchema('dev').existing(); + const table1 = dev.table('table1', { id: int() }); + const table2 = dev.table('table2', { id: int() }); + const to = { dev, table1, table2, dev2: cockroachSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await db.clear(); + } +}); + +test('push schema #6', async ({ db }) => { + await db.query('create schema dev'); + + const to = { dev: cockroachSchema('dev').existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('push schema #6', async ({ db }) => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + + const to = { dev: cockroachSchema('dev').existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('push schema #7', async ({ db }) => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + + const to = { dev: cockroachSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(['DROP TABLE "dev"."users";']); +}); + +test('push schema #8', async ({ db }) => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view v as (select * from dev.users);'); + + const to = { dev: cockroachSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([ + 'DROP VIEW "v";', + 'DROP TABLE "dev"."users";', + ]); +}); + +test('push schema #9', async ({ db }) => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view dev.v as (select * from dev.users);'); + + const to = { dev: cockroachSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([ + 'DROP VIEW "dev"."v";', + 'DROP TABLE "dev"."users";', + ]); +}); + +test('push schema #10', async ({ db }) => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view v as (select * from dev.users);'); + + const to = { dev: cockroachSchema('dev').existing(), v: cockroachView('v', {}).existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('push schema #11', async ({ db }) => { + const schema = await import('./schemas/schema0'); + + await push({ db, to: schema }); + + const res1 = await push({ db, to: { ...schema, core: cockroachSchema('core').existing() } }); + expect(res1.sqlStatements).toStrictEqual([]); + + const res2 = await push({ db, to: schema }); + expect(res2.sqlStatements).toStrictEqual([]); +}); + +test('huge schema #1', async ({ db }) => { + const schema = await import('./schemas/schema1'); + + await push({ db, to: schema }); + + const res1 = await push({ db, to: { ...schema, core: cockroachSchema('core').existing() } }); + expect(res1.sqlStatements).toStrictEqual([]); + + const res2 = await push({ db, to: schema }); + expect(res2.sqlStatements).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/cockroach/enums.test.ts b/drizzle-kit/tests/cockroach/enums.test.ts new file mode 100644 index 0000000000..d2446526b5 --- /dev/null +++ b/drizzle-kit/tests/cockroach/enums.test.ts @@ -0,0 +1,2080 @@ +import { cockroachEnum, cockroachSchema, cockroachTable, int4, text, varchar } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('enums #1', async ({ db }) => { + const to = { + enum: cockroachEnum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #2', async ({ db }) => { + const folder = cockroachSchema('folder'); + const to = { + folder, + enum: folder.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff({ folder }, to, []); + await push({ db, to: { folder } }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TYPE "folder"."enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #3', async ({ db }) => { + const from = { + enum: cockroachEnum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + `DROP TYPE "enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #4', async ({ db }) => { + const folder = cockroachSchema('folder'); + + const from = { + folder, + enum: folder.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, { folder }, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: { folder } }); + + const st0 = [ + `DROP TYPE "folder"."enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #5', async ({ db }) => { + const folder1 = cockroachSchema('folder1'); + const folder2 = cockroachSchema('folder2'); + + const from = { + folder1, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, to, ['folder1->folder2']); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: ['folder1->folder2'], + }); + + const st0 = [ + `ALTER SCHEMA "folder1" RENAME TO "folder2";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #6', async ({ db }) => { + const folder1 = cockroachSchema('folder1'); + const folder2 = cockroachSchema('folder2'); + + const from = { + folder1, + folder2, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.enum->folder2.enum', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: ['folder1.enum->folder2.enum'], + }); + + const st0 = [ + `ALTER TYPE "folder1"."enum" SET SCHEMA "folder2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #7', async ({ db }) => { + const from = { + enum: cockroachEnum('enum', ['value1']), + }; + + const to = { + enum: cockroachEnum('enum', ['value1', 'value2']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TYPE "enum" ADD VALUE 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #8', async ({ db }) => { + const from = { + enum: cockroachEnum('enum', ['value1']), + }; + + const to = { + enum: cockroachEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TYPE "enum" ADD VALUE 'value2';`, + `ALTER TYPE "enum" ADD VALUE 'value3';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #9', async ({ db }) => { + const from = { + enum: cockroachEnum('enum', ['value1', 'value3']), + }; + + const to = { + enum: cockroachEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [`ALTER TYPE "enum" ADD VALUE 'value2' BEFORE 'value3';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #10', async ({ db }) => { + const schema = cockroachSchema('folder'); + const from = { + schema, + enum: schema.enum('enum', ['value1']), + }; + + const to = { + schema, + enum: schema.enum('enum', ['value1', 'value2']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [`ALTER TYPE "folder"."enum" ADD VALUE 'value2';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #11', async ({ db }) => { + const schema1 = cockroachSchema('folder1'); + const from = { + schema1, + enum: schema1.enum('enum', ['value1']), + }; + + const to = { + schema1, + enum: cockroachEnum('enum', ['value1']), + }; + + const renames = [ + 'folder1.enum->public.enum', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "folder1"."enum" SET SCHEMA "public";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #12', async ({ db }) => { + const schema1 = cockroachSchema('folder1'); + const from = { + schema1, + enum: cockroachEnum('enum', ['value1']), + }; + + const to = { + schema1, + enum: schema1.enum('enum', ['value1']), + }; + + const renames = [ + 'public.enum->folder1.enum', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum" SET SCHEMA "folder1";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #13', async ({ db }) => { + const from = { + enum: cockroachEnum('enum1', ['value1']), + }; + + const to = { + enum: cockroachEnum('enum2', ['value1']), + }; + + const renames = [ + 'public.enum1->public.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" RENAME TO "enum2";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #14', async ({ db }) => { + const folder1 = cockroachSchema('folder1'); + const folder2 = cockroachSchema('folder2'); + const from = { + folder1, + folder2, + enum: folder1.enum('enum1', ['value1']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum2', ['value1']), + }; + + const renames = [ + 'folder1.enum1->folder2.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`, + `ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #15', async ({ db }) => { + const folder1 = cockroachSchema('folder1'); + const folder2 = cockroachSchema('folder2'); + const from = { + folder1, + folder2, + enum: folder1.enum('enum1', ['value1', 'value4']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), + }; + + const renames = ['folder1.enum1->folder2.enum2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`, + `ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`, + `ALTER TYPE "folder2"."enum2" ADD VALUE 'value2' BEFORE 'value4';`, + `ALTER TYPE "folder2"."enum2" ADD VALUE 'value3' BEFORE 'value4';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #16', async ({ db }) => { + const enum1 = cockroachEnum('enum1', ['value1']); + const enum2 = cockroachEnum('enum2', ['value1']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column'), + }), + }; + + const renames = [ + 'public.enum1->public.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" RENAME TO "enum2";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #17', async ({ db }) => { + const schema = cockroachSchema('schema'); + const enum1 = cockroachEnum('enum1', ['value1']); + const enum2 = schema.enum('enum1', ['value1']); + + const from = { + schema, + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const to = { + schema, + enum2, + table: cockroachTable('table', { + column: enum2('test_column'), + }), + }; + + const renames = [ + 'public.enum1->schema.enum1', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" SET SCHEMA "schema";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #18', async ({ db }) => { + const schema1 = cockroachSchema('schema1'); + const schema2 = cockroachSchema('schema2'); + + const enum1 = schema1.enum('enum1', ['value1']); + const enum2 = schema2.enum('enum2', ['value1']); + + const from = { + schema1, + schema2, + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const to = { + schema1, + schema2, + enum2, + table: cockroachTable('table', { + column: enum2('test_column'), + }), + }; + + const renames = [ + 'schema1.enum1->schema2.enum2', + ]; + // change name and schema of the enum, no table changes + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "schema1"."enum1" SET SCHEMA "schema2";`, + `ALTER TYPE "schema2"."enum1" RENAME TO "enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #19', async ({ db }) => { + const myEnum = cockroachEnum('my_enum', ["escape's quotes"]); + + const from = {}; + + const to = { myEnum }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = ["CREATE TYPE \"my_enum\" AS ENUM('escape''s quotes');"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #20', async ({ db }) => { + const myEnum = cockroachEnum('my_enum', ['one', 'two', 'three']); + + const from = { + myEnum, + table: cockroachTable('table', { + id: int4('id').primaryKey(), + }), + }; + + const to = { + myEnum, + table: cockroachTable('table', { + id: int4('id').primaryKey(), + col1: myEnum('col1'), + col2: int4('col2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum";', + 'ALTER TABLE "table" ADD COLUMN "col2" int4;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #21', async ({ db }) => { + const myEnum = cockroachEnum('my_enum', ['one', 'two', 'three']); + + const from = { + myEnum, + table: cockroachTable('table', { + id: int4('id').primaryKey(), + }), + }; + + const to = { + myEnum, + table: cockroachTable('table', { + id: int4('id').primaryKey(), + col1: myEnum('col1').array(), + col2: int4('col2').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum"[];', + 'ALTER TABLE "table" ADD COLUMN "col2" int4[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #22', async ({ db }) => { + const schema = cockroachSchema('schema'); + const en = schema.enum('e', ['a', 'b']); + + const from = { + schema, + en, + }; + + const to = { + schema, + en, + table: cockroachTable('table', { + en: en(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['CREATE TABLE "table" (\n\t"en" "schema"."e"\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums #23', async ({ db }) => { + const schema = cockroachSchema('schema'); + const en = schema.enum('e', ['a', 'b']); + + const from = { + schema, + en, + }; + + const to = { + schema, + en, + table: cockroachTable('table', { + en1: en().array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE TABLE "table" (\n\t"en1" "schema"."e"[]\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop enum value', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3']); + const to = { + enum2, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop enum values', async ({ db }) => { + const newSchema = cockroachSchema('mySchema'); + const enum3 = cockroachEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'addedToMiddle', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema1 = { + enum3, + table: cockroachTable('enum_table', { + id: enum3(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum3(), + }), + }; + + const enum4 = cockroachEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema2 = { + enum4, + table: cockroachTable('enum_table', { + id: enum4(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum4(), + }), + }; + + const schemas = ['public', 'mySchema']; + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas }); + + const st0 = [ + `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + `DROP TYPE "enum_users_customer_and_ship_to_settings_roles";`, + `CREATE TYPE "enum_users_customer_and_ship_to_settings_roles" AS ENUM('addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer');`, + `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE "enum_users_customer_and_ship_to_settings_roles" USING "id"::"enum_users_customer_and_ship_to_settings_roles";`, + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE "enum_users_customer_and_ship_to_settings_roles" USING "id"::"enum_users_customer_and_ship_to_settings_roles";`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + users: cockroachTable('users', { + col: enum1().default('value1'), + }), + }; + + const to = { + users: cockroachTable('users', { + col: text().default('value1'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "col" DROP DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "col" SET DATA TYPE string;', + 'ALTER TABLE "users" ALTER COLUMN "col" SET DEFAULT \'value1\';', + `DROP TYPE "enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop enum value. enum is columns data type', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const schema = cockroachSchema('new_schema'); + + const from = { + schema, + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + table2: schema.table('table', { + column: enum1('test_column'), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3']); + const to = { + schema, + enum2, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + table2: schema.table('table', { + column: enum1('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('shuffle enum values', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const schema = cockroachSchema('new_schema'); + + const from = { + schema, + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + table2: schema.table('table', { + column: enum1('test_column'), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + table2: schema.table('table', { + column: enum1('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is enum type with default value. shuffle enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').default('value2'), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + 'ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";', + 'ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT \'value2\'::"enum";', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums as ts enum', async ({ db }) => { + enum Test { + value = 'value', + } + + const to = { + enum: cockroachEnum('enum', Test), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is enum type with default value. shuffle enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').default('value2'), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value2'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum type with default value. shuffle enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array().default(['value2']), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').array().default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum with custom size type with default value. shuffle enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(3).default(['value2']), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value2}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum with custom size type. shuffle enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(3), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').array(3), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is enum type with default value. custom schema. shuffle enum', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + const from = { + schema, + enum1, + table: cockroachTable('table', { + column: enum1('test_column').default('value2'), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "new_schema"."enum" USING "test_column"::"new_schema"."enum";`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value2'::"new_schema"."enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum type with default value. custom schema. shuffle enum', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('test_column').array().default(['value2']), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('test_column').array().default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE "new_schema"."enum"[] USING "test_column"::"new_schema"."enum"[];`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DEFAULT '{value2}'::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum type with custom size with default value. custom schema. shuffle enum', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('test_column').array(3).default(['value2']), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('test_column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + 'ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" DROP DEFAULT;', + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE "new_schema"."enum"[] USING "test_column"::"new_schema"."enum"[];`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DEFAULT '{value2}'::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is array enum type with custom size. custom schema. shuffle enum', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('test_column').array(3), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('test_column').array(3), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE text;`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "test_column" SET DATA TYPE "new_schema"."enum"[] USING "test_column"::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('column is enum type without default value. add default to column', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const enum2 = cockroachEnum('enum', ['value1', 'value3']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value3'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to enum', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column'), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to enum. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').default('value2'), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum" USING "test_column"::"enum";`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value3'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array standart type to array enum. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array().default(['value2']), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array().default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array standart type to array enum. column without default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array standart type with custom size to array enum with custom size. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(3).default(['value2']), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(3).default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array standart type with custom size to array enum with custom size. column without default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(2), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum"[] USING "test_column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from enum type to standart type', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array enum type to standart type', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from enum type to standart type. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').default('value3'), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array enum type to array standart type', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array enum with custom size type to array standart type with custom size', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(2), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// +test.concurrent('change data type from array enum type to array standart type. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array().default(['value2']), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array().default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value2}'::varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from array enum type with custom size to array standart type with custom size. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum', ['value1', 'value2']); + + const from = { + enum1, + table: cockroachTable('table', { + column: enum1('test_column').array(3).default(['value2']), + }), + }; + + const to = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE varchar[];`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{value2}'::varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to standart type', async ({ db }) => { + const from = { + table: cockroachTable('table', { + column: varchar('test_column'), + }), + }; + + const to = { + table: cockroachTable('table', { + column: text('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to standart type. column has default', async ({ db }) => { + const from = { + table: cockroachTable('table', { + column: varchar('test_column').default('value3'), + }), + }; + + const to = { + table: cockroachTable('table', { + column: text('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// TODO if leave "column" as name - strange error occurres. Could be bug in cockroachdb +test.concurrent('change data type from standart type to standart type. columns are arrays', async ({ db }) => { + const from = { + table: cockroachTable('table', { + test_column: varchar('test_column').array(), + }), + }; + + const to = { + table: cockroachTable('table', { + test_column: text('test_column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to standart type. columns are arrays with custom sizes', async ({ db }) => { + const from = { + table: cockroachTable('table', { + test_column: varchar('test_column').array(2), + }), + }; + + const to = { + table: cockroachTable('table', { + test_column: text('test_column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to standart type. columns are arrays. column has default', async ({ db }) => { + const from = { + table: cockroachTable('table', { + test_column: varchar('test_column').array().default(['hello']), + }), + }; + + const to = { + table: cockroachTable('table', { + test_column: text('test_column').array().default(['hello']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string[];`, + // TODO: discuss with @AndriiSherman, redundand statement + // `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT '{"hello"}';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from standart type to standart type. columns are arrays with custom sizes.column has default', async ({ db }) => { + const from = { + table: cockroachTable('table', { + column: varchar('test_column').array(2).default(['hello']), + }), + }; + + const to = { + table: cockroachTable('table', { + column: text('test_column').array(2).default(['hello']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE string[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from one enum to other', async ({ db }) => { + const enum1 = cockroachEnum('enum1', ['value1', 'value3']); + const enum2 = cockroachEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum1('test_column'), + }), + }; + + const to = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum2('test_column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum2" USING "test_column"::text::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from one enum to other. column has default', async ({ db }) => { + const enum1 = cockroachEnum('enum1', ['value1', 'value3']); + const enum2 = cockroachEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum1('test_column').default('value3'), + }), + }; + + const to = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum2" USING "test_column"::text::"enum2";`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value3'::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change data type from one enum to other. changed defaults', async ({ db }) => { + const enum1 = cockroachEnum('enum1', ['value1', 'value3']); + const enum2 = cockroachEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum1('test_column').default('value3'), + }), + }; + + const to = { + enum1, + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value1'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum2" USING "test_column"::text::"enum2";`, + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value1'::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('check filtering json statements. here we have recreate enum + set new type + alter default', async ({ db }) => { + const enum1 = cockroachEnum('enum1', ['value1', 'value3']); + const from = { + enum1, + table: cockroachTable('table', { + column: varchar('test_column').default('value3'), + }), + }; + + const enum2 = cockroachEnum('enum1', ['value3', 'value1', 'value2']); + const to = { + enum2, + table: cockroachTable('table', { + column: enum2('test_column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'DROP TYPE "enum1";', + `CREATE TYPE "enum1" AS ENUM('value3', 'value1', 'value2');`, + 'ALTER TABLE "table" ALTER COLUMN "test_column" DROP DEFAULT;', + 'ALTER TABLE "table" ALTER COLUMN "test_column" SET DATA TYPE "enum1" USING "test_column"::"enum1";', + `ALTER TABLE "table" ALTER COLUMN "test_column" SET DEFAULT 'value2'::"enum1";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add column with same name as enum', async ({ db }) => { + const statusEnum = cockroachEnum('status', ['inactive', 'active', 'banned']); + + const schema1 = { + statusEnum, + table1: cockroachTable('table1', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + statusEnum, + table1: cockroachTable('table1', { + id: int4('id').primaryKey(), + status: statusEnum('status').default('inactive'), + }), + table2: cockroachTable('table2', { + id: int4('id').primaryKey(), + status: statusEnum('status').default('inactive'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'CREATE TABLE "table2" (\n\t"id" int4 PRIMARY KEY,\n\t"status" "status" DEFAULT \'inactive\'::"status"\n);\n', + 'ALTER TABLE "table1" ADD COLUMN "status" "status" DEFAULT \'inactive\'::"status";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('enums ordering', async ({ db }) => { + const schema1 = { + enum: cockroachEnum('settings', ['all', 'admin']), + }; + + const { next: n1 } = await diff({}, schema1, []); + await push({ db, to: schema1 }); + + const schema3 = { + enum: cockroachEnum('settings', ['new', 'all', 'admin']), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, schema3, []); + const { sqlStatements: pst2 } = await push({ db, to: schema3 }); + + expect(st2).toStrictEqual(["ALTER TYPE \"settings\" ADD VALUE 'new' BEFORE 'all';"]); + expect(pst2).toStrictEqual(["ALTER TYPE \"settings\" ADD VALUE 'new' BEFORE 'all';"]); + + const schema4 = { + enum3: cockroachEnum('settings', ['new', 'all', 'new2', 'admin']), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, schema4, []); + const { sqlStatements: pst3 } = await push({ db, to: schema4 }); + + const st0 = [ + `ALTER TYPE "settings" ADD VALUE 'new2' BEFORE 'admin';`, + ]; + + expect(st3).toStrictEqual(st0); + expect(pst3).toStrictEqual(st0); + + const { sqlStatements: st4 } = await diff(n3, schema4, []); + const { sqlStatements: pst4 } = await push({ db, to: schema4 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/cockroach/generated.test.ts b/drizzle-kit/tests/cockroach/generated.test.ts new file mode 100644 index 0000000000..040c944b22 --- /dev/null +++ b/drizzle-kit/tests/cockroach/generated.test.ts @@ -0,0 +1,465 @@ +import { SQL, sql } from 'drizzle-orm'; +import { cockroachTable, int4, text } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('generated as callback: add column with generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as callback: add generated constraint to an exisiting column', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as callback: drop generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} || 'to delete'`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" DROP COLUMN \"gen_name\";`, + `ALTER TABLE \"users\" ADD COLUMN \"gen_name\" string;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as callback: change generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push +}); + +test.concurrent('generated as sql: add column with generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as sql: add generated constraint to an exisiting column', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`\"users\".\"name\" || 'to add'`), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as sql: drop generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'to delete'`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" DROP COLUMN \"gen_name\";`, + `ALTER TABLE \"users\" ADD COLUMN \"gen_name\" string;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as sql: change generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\"`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + sql`\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push +}); + +test.concurrent('generated as string: add column with generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as string: add generated constraint to an exisiting column', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`\"users\".\"name\" || 'to add'`), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as string: drop generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'to delete'`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" DROP COLUMN \"gen_name\";`, + `ALTER TABLE \"users\" ADD COLUMN \"gen_name\" string;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('generated as string: change generated constraint', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `\"users\".\"name\" || 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push +}); + +test.concurrent('alter generated constraint', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), + }), + }; + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" string GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignores definition changes +}); diff --git a/drizzle-kit/tests/cockroach/grammar.test.ts b/drizzle-kit/tests/cockroach/grammar.test.ts new file mode 100644 index 0000000000..1ac6d974e3 --- /dev/null +++ b/drizzle-kit/tests/cockroach/grammar.test.ts @@ -0,0 +1,76 @@ +import { splitSqlType, trimDefaultValueSuffix } from 'src/dialects/cockroach/grammar'; +import { expect, test } from 'vitest'; + +test.each([ + ["'a'::my_enum", "'a'"], + ["'abc'::text", "'abc'"], + ["'abc'::character varying", "'abc'"], + ["'abc'::bpchar", "'abc'"], + [`'{"attr":"value"}'::json`, `'{"attr":"value"}'`], + [`'{"attr": "value"}'::jsonb`, `'{"attr": "value"}'`], + [`'00:00:00'::time without time zone`, `'00:00:00'`], + [`'2025-04-24 08:30:45.08+00'::timestamp with time zone`, `'2025-04-24 08:30:45.08+00'`], + [`'2024-01-01'::date`, `'2024-01-01'`], + [`'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid`, `'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'`], + [`now()`, `now()`], + [`CURRENT_TIMESTAMP`, `CURRENT_TIMESTAMP`], + [`timezone('utc'::text, now())`, `timezone('utc'::text, now())`], + [`'{a,b}'::my_enum[]`, `'{a,b}'`], + [`'{10,20}'::smallint[]`, `'{10,20}'`], + [`'{10,20}'::integer[]`, `'{10,20}'`], + [`'{99.9,88.8}'::some::string[]`, `'{99.9,88.8}'`], + [`'{99.9,88.8}'::some::string(3)[]`, `'{99.9,88.8}'`], + [`'{99.9,88.8}'::numeric[]`, `'{99.9,88.8}'`], + [`'{100,200}'::bigint[]`, `'{100,200}'`], + [`'{t,f}'::boolean[]`, `'{t,f}'`], + [`'{abc,def}'::text[]`, `'{abc,def}'`], + [`'{abc,def}'::character varying[]`, `'{abc,def}'`], + [`'{abc,def}'::bpchar[]`, `'{abc,def}'`], + [`'{100,200}'::double precision[]`, `'{100,200}'`], + [`'{100,200}'::real[]`, `'{100,200}'`], + [ + `'{"{\"attr\":\"value1\"}","{\"attr\":\"value2\"}"}'::json[]`, + `'{"{\"attr\":\"value1\"}","{\"attr\":\"value2\"}"}'`, + ], + [ + `'{"{\"attr\": \"value1\"}","{\"attr\": \"value2\"}"}'::jsonb[]`, + `'{"{\"attr\": \"value1\"}","{\"attr\": \"value2\"}"}'`, + ], + [`'{00:00:00,01:00:00}'::time without time zone[]`, `'{00:00:00,01:00:00}'`], + [ + `'{"2025-04-24 10:41:36.623+00","2025-04-24 10:41:36.623+00"}'::timestamp with time zone[]`, + `'{"2025-04-24 10:41:36.623+00","2025-04-24 10:41:36.623+00"}'`, + ], + [`'{2024-01-01,2024-01-02}'::date[]`, `'{2024-01-01,2024-01-02}'`], + [ + `'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12}'::uuid[]`, + `'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12}'`, + ], + [`'{127.0.0.1,127.0.0.2}'::inet[]`, `'{127.0.0.1,127.0.0.2}'`], + [`'{127.0.0.1/32,127.0.0.2/32}'::cidr[]`, `'{127.0.0.1/32,127.0.0.2/32}'`], + [`'{00:00:00:00:00:00,00:00:00:00:00:01}'::macaddr[]`, `'{00:00:00:00:00:00,00:00:00:00:00:01}'`], + [ + `'{00:00:00:ff:fe:00:00:00,00:00:00:ff:fe:00:00:01}'::macaddr8[]`, + `'{00:00:00:ff:fe:00:00:00,00:00:00:ff:fe:00:00:01}'`, + ], + [`'{"1 day 01:00:00","1 day 02:00:00"}'::interval[]`, `'{"1 day 01:00:00","1 day 02:00:00"}'`], +])('trim default suffix %#: %s', (it, expected) => { + expect(trimDefaultValueSuffix(it)).toBe(expected); +}); + +test('split sql type', () => { + expect.soft(splitSqlType('numeric')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)')).toStrictEqual({ type: 'numeric', options: '10,2' }); + + expect.soft(splitSqlType('numeric[]')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)[]')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)[]')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)[]')).toStrictEqual({ type: 'numeric', options: '10,2' }); + + expect.soft(splitSqlType('numeric[][]')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)[][]')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)[][]')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)[][]')).toStrictEqual({ type: 'numeric', options: '10,2' }); +}); diff --git a/drizzle-kit/tests/cockroach/identity.test.ts b/drizzle-kit/tests/cockroach/identity.test.ts new file mode 100644 index 0000000000..f8a46e9d14 --- /dev/null +++ b/drizzle-kit/tests/cockroach/identity.test.ts @@ -0,0 +1,528 @@ +import { bigint, cockroachTable, int2, int4, int8, text } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('create table: identity always/by default - no params', async ({ db }) => { + const from = {}; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity(), + id1: int8('id1', { mode: 'number' }).generatedByDefaultAsIdentity(), + id2: int2('id2').generatedByDefaultAsIdentity(), + }), + }; + + const { sqlStatements: st, next } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" int8 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),\n\t"id2" int2 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table: identity always/by default - few params', async ({ db }) => { + const from = {}; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + }), + id1: int8('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + }), + id2: int2('id2').generatedByDefaultAsIdentity({ cache: 1 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" int8 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 1),\n\t"id2" int2 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table: identity always/by default - all params', async ({ db }) => { + // TODO revise: added id1, id2 columns to users table, like in same test from push.test.ts + const from = {}; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + }), + id1: int8('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + increment: 3, + cache: 100, + }), + id2: int2('id2').generatedByDefaultAsIdentity({ minValue: 1 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 4 MINVALUE 3 MAXVALUE 1000 START WITH 3 CACHE 200),\n\t"id1" int8 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 3 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 100),\n\t"id2" int2 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('no diff: identity always/by default - no params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity(), + id2: int4('id2').generatedAlwaysAsIdentity(), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity(), + id2: int4('id2').generatedAlwaysAsIdentity(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('no diff: identity always/by default - few params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('no diff: identity always/by default - all params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cache: 10, + increment: 2, + }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cache: 10, + increment: 2, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop identity from a column - no params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + log: 'statements', + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop identity from a column - few params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + }), + id1: int4('id1').generatedByDefaultAsIdentity({ + increment: 4, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + increment: 4, + }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id'), + id1: int4('id1'), + id2: int4('id2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop identity from a column - all params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + cache: 100, + }), + id1: int4('id1').generatedByDefaultAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cache: 10, + increment: 2, + }), + id2: int4('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cache: 10, + increment: 2, + }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id'), + id1: int4('id1'), + id2: int4('id2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + `ALTER TABLE \"users\" ALTER COLUMN \"id1\" DROP IDENTITY;`, + `ALTER TABLE \"users\" ALTER COLUMN \"id2\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter identity from a column - no params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter identity from a column - few params', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 100, + cache: 10, + increment: 4, + maxValue: 10000, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter identity from a column - by default to always', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: cockroachTable('users', { + id: bigint('id', { mode: 'number' }).generatedAlwaysAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter identity from a column - always to by default', async ({ db }) => { + const from = { + users: cockroachTable('users', { + id: int4('id').generatedAlwaysAsIdentity({ maxValue: 10000 }), + }), + }; + + const to = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + maxValue: 10000, + startWith: 100, + cache: 10, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add column with identity - few params', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + email: text('email'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + email: text('email'), + id: int4('id').generatedByDefaultAsIdentity({}), + id1: int4('id1').generatedAlwaysAsIdentity({ + increment: 4, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ADD COLUMN "id" int4 GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ADD COLUMN "id1" int4 GENERATED ALWAYS AS IDENTITY (INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add identity to column - few params', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').notNull(), + id1: int4('id1').notNull(), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({}), + id1: int4('id1').generatedAlwaysAsIdentity({ + increment: 4, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ALTER COLUMN "id1" ADD GENERATED ALWAYS AS IDENTITY (INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/indexes-without-tx.test.ts b/drizzle-kit/tests/cockroach/indexes-without-tx.test.ts new file mode 100644 index 0000000000..ae6c822b06 --- /dev/null +++ b/drizzle-kit/tests/cockroach/indexes-without-tx.test.ts @@ -0,0 +1,33 @@ +import { cockroachTable, index, int4, vector } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test('vector index', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + embedding: vector('name', { dimensions: 3 }), + }, (t) => [ + index('vector_embedding_idx') + .using('cspann', t.embedding), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "vector_embedding_idx" ON "users" USING cspann ("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/indexes.test.ts b/drizzle-kit/tests/cockroach/indexes.test.ts new file mode 100644 index 0000000000..945d9e6bbd --- /dev/null +++ b/drizzle-kit/tests/cockroach/indexes.test.ts @@ -0,0 +1,428 @@ +import { sql } from 'drizzle-orm'; +import { boolean, cockroachTable, index, int4, text, uuid } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('adding basic indexes', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + name: text('name'), + }, + (t) => [ + index() + .on(t.name, t.id.desc()) + .where(sql`name != 'alef'`), + index('indx1').using('hash', t.name), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "users_name_id_index" ON "users" ("name","id" DESC) WHERE name != 'alef';`, + `CREATE INDEX "indx1" ON "users" ("name") USING hash;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('dropping basic index', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + name: text('name'), + }, + (t) => [index().on(t.name.desc(), t.id.asc())], + ), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [`DROP INDEX "users_name_id_index";`]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('altering indexes', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('removeColumn').on(t.name, t.id), + index('addColumn').on(t.name.desc()), + index('removeExpression').on(t.name.desc(), sql`id`), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name`), + index('changeName').on(t.name.desc(), t.id.asc()), + index('changeUsing').on(t.name), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('removeColumn').on(t.name), + index('addColumn').on(t.name.desc(), t.id.asc()), + index('removeExpression').on(t.name.desc()), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('newName').on(t.name.desc(), sql`id`), + index('changeUsing').using('hash', t.name), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC,"id");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX "removeExpression" ON "users" ("name" DESC);', + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC,name desc);', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" ("name") USING hash;', + 'CREATE INDEX "newName" ON "users" ("name" DESC,id);', + ]); + + // for push we ignore change of index expressions + expect(pst).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC,"id");', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" ("name") USING hash;', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX "removeExpression" ON "users" ("name" DESC);', + // 'DROP INDEX "changeExpression";', + 'CREATE INDEX "newName" ON "users" ("name" DESC,id);', + // 'CREATE INDEX "changeExpression" ON "users" ("id" DESC,name desc);', + ]); +}); + +test.concurrent('indexes test case #1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => [ + index().on(t.id.desc()), + index('indx1').on(t.id, t.imageUrl), + index('indx4').on(t.id), + ], + ), + }; + + const schema2 = { + users: cockroachTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => [ + index().on(t.id.desc()), + index('indx1').on(t.id, t.imageUrl), + index('indx4').on(t.id), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('Indexes properties that should not trigger push changes', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('changeExpression').on(t.id.desc(), sql`name`), + index('indx1').on(t.name.desc()), + index('indx2').on(t.name.desc()).where(sql`true`), + index('indx4').on(sql`lower(name)`).where(sql`true`), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('indx1').on(t.name.desc()), + index('indx2').on(t.name.desc()).where(sql`false`), + index('indx4').on(sql`lower(id)`).where(sql`true`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC,name desc);', + 'DROP INDEX "indx2";', + 'CREATE INDEX "indx2" ON "users" ("name" DESC) WHERE false;', + 'DROP INDEX "indx4";', + 'CREATE INDEX "indx4" ON "users" (lower(id));', + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX "indx2";', + 'CREATE INDEX "indx2" ON "users" ("name" DESC) WHERE false;', + ]); +}); + +test.concurrent('indexes #0', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + name: text('name'), + }, + ( + t, + ) => [ + index('removeColumn').on(t.name, t.id), + index('addColumn').on(t.name.desc()), + index('removeExpression').on(t.name.desc(), sql`id`), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name`), + index('changeName').on(t.name.desc(), t.id.asc()), + index('changeUsing').on(t.name), + ], + ), + }; + + const schema2 = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + name: text('name'), + }, + (t) => [ + index('removeColumn').on(t.name), + index('addColumn').on(t.name.desc(), t.id), + index('removeExpression').on(t.name.desc()), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('newName').on(t.name.desc(), sql`id`), + index('changeUsing').using('hash', t.name), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC,"id");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX "removeExpression" ON "users" ("name" DESC);', + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC,name desc);', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" ("name") USING hash;', + 'CREATE INDEX "newName" ON "users" ("name" DESC,id);', + ]); + + // for push we ignore change of index expressions + expect(pst).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC,"id");', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" ("name") USING hash;', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX "removeExpression" ON "users" ("name" DESC);', + // 'DROP INDEX "changeExpression";', + 'CREATE INDEX "newName" ON "users" ("name" DESC,id);', + // 'CREATE INDEX "changeExpression" ON "users" ("id" DESC,name desc);', + ]); +}); + +test.concurrent('index #2', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('indx').on(t.name.desc()), + index('indx1').on(t.name.desc()), + index('indx3').on(sql`lower(name)`), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('indx').on(t.name.desc()), + index('indx1').on(t.name.desc()).where(sql`false`), + index('indx3').on(sql`lower(${t.name})`), + index('indx4').on(sql`lower(name)`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "indx1";', + 'CREATE INDEX "indx1" ON "users" ("name" DESC) WHERE false;', + 'DROP INDEX "indx3";', + 'CREATE INDEX "indx3" ON "users" (lower("name"));', + 'CREATE INDEX "indx4" ON "users" (lower(name));', + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX "indx1";', + // TODO: we ignore columns changes during 'push', we should probably tell user about it in CLI? + // 'DROP INDEX "indx3";', + 'CREATE INDEX "indx1" ON "users" ("name" DESC) WHERE false;', + 'CREATE INDEX "indx4" ON "users" (lower(name));', + // 'CREATE INDEX "indx3" ON "users" (lower("name"));', + ]); +}); + +/** +There are two similar tests shown here +When creating an index with the sql`name != 'alex'`, Cockroach automatically adds 'alex'::STRING +Since this behavior comes directly from the sql`` we can't handle it + +The second test passes because it explicitly add ::STRING +We should provide some kind of hint or suggestion to inform the user about this + */ +test.concurrent('index #3', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index().on(t.name.desc(), t.id.asc()).where(sql`name != 'alex'`), + index('indx1').using('hash', sql`${t.name}`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, ignoreSubsequent: true }); + + const st0 = [ + `CREATE INDEX "users_name_id_index" ON "users" ("name" DESC,"id") WHERE name != 'alex';`, + `CREATE INDEX "indx1" ON "users" ("name") USING hash;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test.concurrent('index #3_1', async ({ dbc: db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name'), + }, (t) => [ + index().on(t.name.desc(), t.id.asc()).where(sql`name != 'alex'::STRING`), + index('indx1').using('hash', sql`${t.name}`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "users_name_id_index" ON "users" ("name" DESC,"id") WHERE name != 'alex'::STRING;`, + `CREATE INDEX "indx1" ON "users" ("name") USING hash;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/mocks.ts b/drizzle-kit/tests/cockroach/mocks.ts new file mode 100644 index 0000000000..e42e30f6f5 --- /dev/null +++ b/drizzle-kit/tests/cockroach/mocks.ts @@ -0,0 +1,733 @@ +import { is } from 'drizzle-orm'; +import { + AnyCockroachColumn, + CockroachColumnBuilder, + CockroachDialect, + CockroachEnum, + CockroachEnumObject, + CockroachMaterializedView, + CockroachPolicy, + CockroachRole, + CockroachSchema, + CockroachSequence, + CockroachTable, + cockroachTable, + CockroachView, + int4, + isCockroachEnum, + isCockroachMaterializedView, + isCockroachSequence, + isCockroachView, +} from 'drizzle-orm/cockroach-core'; +import { CasingType } from 'src/cli/validations/common'; +import { CockroachDDL, Column, createDDL, interimToDDL, SchemaError } from 'src/dialects/cockroach/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/cockroach/diff'; +import { + defaultFromColumn, + fromDrizzleSchema, + prepareFromSchemaFiles, + unwrapColumn, +} from 'src/dialects/cockroach/drizzle'; +import { mockResolver } from 'src/utils/mocks'; +import '../../src/@types/utils'; +import Docker from 'dockerode'; +import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs'; +import getPort from 'get-port'; +import { Pool, PoolClient } from 'pg'; +import { introspect } from 'src/cli/commands/pull-cockroach'; +import { suggestions } from 'src/cli/commands/push-cockroach'; +import { EmptyProgressView, psqlExplain } from 'src/cli/views'; +import { defaultToSQL, isSystemRole } from 'src/dialects/cockroach/grammar'; +import { fromDatabaseForDrizzle } from 'src/dialects/cockroach/introspect'; +import { ddlToTypeScript } from 'src/dialects/cockroach/typescript'; +import { DB } from 'src/utils'; +import { v4 as uuidV4 } from 'uuid'; +import 'zx/globals'; +import { randomUUID } from 'crypto'; +import { EntitiesFilter, EntitiesFilterConfig } from 'src/cli/validations/cli'; +import { hash } from 'src/dialects/common'; +import { extractCrdbExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { measure, tsc } from 'tests/utils'; +import { expect, test as base } from 'vitest'; + +mkdirSync('tests/cockroach/tmp', { recursive: true }); + +export type CockroachDBSchema = Record< + string, + | CockroachTable + | CockroachEnum + | CockroachEnumObject + | CockroachSchema + | CockroachSequence + | CockroachView + | CockroachMaterializedView + | CockroachRole + | CockroachPolicy +>; + +class MockError extends Error { + constructor(readonly errors: SchemaError[]) { + super(); + } +} + +export const drizzleToDDL = ( + schema: CockroachDBSchema, + casing: CasingType | undefined, + filterConfig: EntitiesFilterConfig = { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, +) => { + const tables = Object.values(schema).filter((it) => is(it, CockroachTable)) as CockroachTable[]; + const schemas = Object.values(schema).filter((it) => is(it, CockroachSchema)) as CockroachSchema[]; + const enums = Object.values(schema).filter((it) => isCockroachEnum(it)) as CockroachEnum[]; + const sequences = Object.values(schema).filter((it) => isCockroachSequence(it)) as CockroachSequence[]; + const roles = Object.values(schema).filter((it) => is(it, CockroachRole)) as CockroachRole[]; + const policies = Object.values(schema).filter((it) => is(it, CockroachPolicy)) as CockroachPolicy[]; + const views = Object.values(schema).filter((it) => isCockroachView(it)) as CockroachView[]; + const materializedViews = Object.values(schema).filter((it) => + isCockroachMaterializedView(it) + ) as CockroachMaterializedView[]; + + const existing = extractCrdbExisting(schemas, views, materializedViews); + const filter = prepareEntityFilter('cockroach', filterConfig, existing); + const { schema: res, errors, warnings } = fromDrizzleSchema( + { + schemas, + tables, + enums, + sequences, + roles, + policies, + views, + matViews: materializedViews, + }, + casing, + filter, + ); + + if (errors.length > 0) { + throw new Error(); + } + + return { ...interimToDDL(res), existing }; +}; + +// 2 schemas -> 2 ddls -> diff +export const diff = async ( + left: CockroachDBSchema | CockroachDDL, + right: CockroachDBSchema | CockroachDDL, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1, errors: err1 } = 'entities' in left && '_' in left + ? { ddl: left as CockroachDDL, errors: [] } + : drizzleToDDL(left, casing); + const { ddl: ddl2, errors: err2 } = 'entities' in right && '_' in right + ? { ddl: right as CockroachDDL, errors: [] } + : drizzleToDDL(right, casing); + + if (err1.length > 0 || err2.length > 0) { + throw new MockError([...err1, ...err2]); + } + + const renames = new Set(renamesArr); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'default', + ); + return { sqlStatements, statements, groupedStatements, next: ddl2 }; +}; + +export const pushM = async (config: { + db: DB; + to: CockroachDBSchema | CockroachDDL; + renames?: string[]; + schemas?: string[]; + casing?: CasingType; + log?: 'statements' | 'none'; + entities?: EntitiesFilter; +}) => { + return measure(push(config), 'push'); +}; +// init schema flush to db -> introspect db to ddl -> compare ddl with destination schema +export const push = async ( + config: { + db: DB; + to: CockroachDBSchema | CockroachDDL; + renames?: string[]; + schemas?: string[]; + casing?: CasingType; + log?: 'statements' | 'none'; + entities?: EntitiesFilter; + ignoreSubsequent?: boolean; + explain?: true; + }, +) => { + const { db, to } = config; + const log = config.log ?? 'none'; + const casing = config.casing ?? 'camelCase'; + + const filterConfig: EntitiesFilterConfig = { + schemas: config.schemas, + tables: undefined, + entities: config.entities, + extensions: [], + }; + + const { ddl: ddl2, errors: err3, existing } = 'entities' in to && '_' in to + ? { ddl: to as CockroachDDL, errors: [], existing: [] } + : drizzleToDDL(to, casing, filterConfig); + + const filter = prepareEntityFilter('cockroach', filterConfig, existing); + + const { schema } = await introspect(db, filter, new EmptyProgressView()); + + const { ddl: ddl1, errors: err2 } = interimToDDL(schema); + + if (err2.length > 0) { + throw new MockError(err2); + } + + if (err3.length > 0) { + throw new MockError(err3); + } + + // TODO: handle errors + + const renames = new Set(config.renames ?? []); + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const { hints, losses } = await suggestions(db, statements); + + if (config.explain) { + // const text = groupedStatements.map((x) => psqlExplain(x.jsonStatement, x.sqlStatements)).filter(Boolean).join('\n'); + // console.log(text); + return { sqlStatements, statements, hints, losses }; + } + + for (const sql of sqlStatements) { + if (log === 'statements') console.log(sql); + await db.query(sql); + } + + // subsequent push + if (!config.ignoreSubsequent) { + { + const { schema } = await introspect(db, filter, new EmptyProgressView()); + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + if (sqlStatements.length > 0) { + // const msg = groupedStatements.map((x) => psqlExplain(x.jsonStatement, x.sqlStatements)).join('\n'); + console.error('---- subsequent push is not empty ----'); + // console.error(msg); + expect(sqlStatements.join('\n')).toBe(''); + } + } + } + + return { sqlStatements, statements, hints, losses }; +}; + +export const diffPush = async (config: { + db: DB; + from: CockroachDBSchema; + to: CockroachDBSchema; + renames?: string[]; + schemas?: string[]; + casing?: CasingType; + entities?: EntitiesFilter; + before?: string[]; + after?: string[]; + apply?: boolean; +}) => { + const { db, from: initSchema, to: destination, casing, before, after, renames: rens, entities } = config; + + const apply = typeof config.apply === 'undefined' ? true : config.apply; + const filterConfig: EntitiesFilterConfig = { + schemas: config.schemas, + tables: [], + entities: config.entities, + extensions: [], + }; + const { ddl: initDDL, existing } = drizzleToDDL(initSchema, casing, filterConfig); + const { sqlStatements: inits } = await ddlDiffDry(createDDL(), initDDL, 'default'); + + const init = [] as string[]; + if (before) init.push(...before); + if (apply) init.push(...inits); + if (after) init.push(...after); + const mViewsRefreshes = initDDL.views.list({ materialized: true }).map((it) => + `REFRESH MATERIALIZED VIEW "${it.schema}"."${it.name}"${it.withNoData ? ' WITH NO DATA;' : ';'};` + ); + init.push(...mViewsRefreshes); + + for (const st of init) { + await db.query(st); + } + + const filter = prepareEntityFilter('cockroach', filterConfig, existing); + + // do introspect into CockroachSchemaInternal + const introspectedSchema = await fromDatabaseForDrizzle(db, filter); + + const { ddl: ddl1, errors: err3 } = interimToDDL(introspectedSchema); + const { ddl: ddl2, errors: err2 } = drizzleToDDL(destination, casing, filterConfig); + + // TODO: handle errors + + const renames = new Set(rens); + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const { hints, losses } = await suggestions(db, statements); + return { sqlStatements, statements, hints, losses }; +}; + +// init schema to db -> pull from db to file -> ddl from files -> compare ddl from db with ddl from file +export const diffIntrospect = async ( + db: DB, + initSchema: CockroachDBSchema, + testName: string, + schemas: string[] = [], + entities?: EntitiesFilter, + casing?: CasingType | undefined, +) => { + const filterConfig: EntitiesFilterConfig = { + schemas, + entities, + tables: [], + extensions: [], + }; + const { ddl: initDDL, existing } = drizzleToDDL(initSchema, casing, filterConfig); + const { sqlStatements: init } = await ddlDiffDry(createDDL(), initDDL, 'default'); + + for (const st of init) await db.query(st); + const filter = prepareEntityFilter('cockroach', filterConfig, existing); + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, filter); + + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const filePath = `tests/cockroach/tmp/${testName}.ts`; + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel'); + writeFileSync(filePath, file.file); + + await tsc(file.file); + + // generate snapshot from ts file + const response = await prepareFromSchemaFiles([filePath]); + + const { schema: schema2, errors: e2, warnings } = fromDrizzleSchema(response, casing, filter); + const { ddl: ddl2, errors: e3 } = interimToDDL(schema2); + + const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements } = await ddlDiffDry( + ddl1, + ddl2, + 'push', + ); + + rmSync(`tests/cockroach/tmp/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +export const diffDefault = async ( + db: TestDatabase, + builder: T, + expectedDefault: string, + override?: { + expectError?: boolean; + ignoreSubsequent?: boolean; + pre?: CockroachDBSchema; + }, +) => { + await db.clear(); + + const config = (builder as any).config; + + const expectError = override?.expectError ?? false; + const ignoreSubsequent = typeof override?.ignoreSubsequent === 'undefined' ? true : override.ignoreSubsequent; + const pre: CockroachDBSchema | null = override?.pre ?? null; + const def = config['default']; + + const column = cockroachTable('table', { column: builder }).column; + const { dimensions, typeSchema, sqlType: sqlt } = unwrapColumn(column); + const type = sqlt.replaceAll('[]', ''); + + const columnDefault = defaultFromColumn(column, column.default, dimensions, new CockroachDialect()); + + const defaultSql = defaultToSQL({ + default: columnDefault, + type, + dimensions, + typeSchema: typeSchema, + }); + + const res = [] as string[]; + if (defaultSql !== expectedDefault) { + res.push(`Unexpected sql: \n${defaultSql}\n${expectedDefault}`); + } + + const init = { + ...pre, + table: cockroachTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent }); + const { sqlStatements: st1 } = await push({ db, to: init, ignoreSubsequent }); + const { sqlStatements: st2 } = await push({ db, to: init, ignoreSubsequent }); + + const typeSchemaPrefix = typeSchema && typeSchema !== 'public' ? `"${typeSchema}".` : ''; + const typeValue = typeSchema ? `"${type}"` : type; + const sqlType = `${typeSchemaPrefix}${typeValue}${'[]'.repeat(dimensions)}`; + const expectedInit = `CREATE TABLE "table" (\n\t"column" ${sqlType} DEFAULT ${expectedDefault}\n);\n`; + + if (st1.length !== 1 || st1[0] !== expectedInit) res.push(`Unexpected init:\n${st1}\n\n${expectedInit}`); + if (st2.length > 0) res.push(`Unexpected subsequent init:\n${st2}`); + + try { + await db.query('INSERT INTO "table" ("column") VALUES (default);'); + } catch (error) { + if (!expectError) throw error; + res.push(`Insert default failed`); + } + + const filter = () => true; + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, filter); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel'); + const path = `tests/cockroach/tmp/temp-${randomUUID()}.ts`; + + if (existsSync(path)) rmSync(path); + writeFileSync(path, file.file); + await tsc(file.file); + + const response = await prepareFromSchemaFiles([path]); + + const { schema: sch } = fromDrizzleSchema(response, 'camelCase', () => true); + const { ddl: ddl2, errors: e3 } = interimToDDL(sch); + + const { sqlStatements: afterFileSqlStatements } = await ddlDiffDry(ddl1, ddl2, 'push'); + + if (afterFileSqlStatements.length === 0) { + rmSync(path); + } else { + console.log(afterFileSqlStatements); + console.log(`./${path}`); + res.push(`Default type mismatch after diff:\n${`./${path}`}`); + } + + await db.clear(); + + config.hasDefault = false; + config.default = undefined; + const schema1 = { + ...pre, + table: cockroachTable('table', { column: builder }), + }; + + config.hasDefault = true; + config.default = def; + const schema2 = { + ...pre, + table: cockroachTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent }); + await push({ db, to: schema1, ignoreSubsequent }); + const { sqlStatements: st3 } = await push({ db, to: schema2, ignoreSubsequent }); + const expectedAlter = `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT ${expectedDefault};`; + if (st3.length !== 1 || st3[0] !== expectedAlter) res.push(`Unexpected default alter:\n${st3}\n\n${expectedAlter}`); + + await db.clear(); + + const schema3 = { + ...pre, + table: cockroachTable('table', { id: int4() }), + }; + + const schema4 = { + ...pre, + table: cockroachTable('table', { id: int4(), column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent }); + await push({ db, to: schema3, ignoreSubsequent }); + const { sqlStatements: st4 } = await push({ db, to: schema4, ignoreSubsequent }); + + const expectedAddColumn = `ALTER TABLE "table" ADD COLUMN "column" ${sqlType} DEFAULT ${expectedDefault};`; + if (st4.length !== 1 || st4[0] !== expectedAddColumn) { + res.push(`Unexpected add column:\n${st4[0]}\n\n${expectedAddColumn}`); + } + + return res; +}; + +export type TestDatabase = DB & { + batch: (sql: string[]) => Promise; + close: () => void; + clear: () => Promise; +}; + +export type TestDatabaseKit = { + acquire: () => Promise<{ db: TestDatabase; release: () => void }>; + acquireTx: () => Promise<{ db: TestDatabase; release: () => void }>; + close: () => Promise; +}; + +export async function createDockerDB() { + const docker = new Docker(); + const port = await getPort({ port: 26257 }); + const image = 'cockroachdb/cockroach:v25.2.0'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + const container = await docker.createContainer({ + Image: image, + Cmd: ['start-single-node', '--insecure'], + name: `drizzle-integration-tests-${uuidV4()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '26257/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await container.start(); + + return { + url: `postgresql://root@127.0.0.1:${port}/defaultdb?sslmode=disable`, + container, + }; +} + +const prepareClient = async (url: string, n: string, tx: boolean) => { + const name = `${n}${hash(String(Math.random()), 10)}`; + + const client = await new Pool({ connectionString: url, max: 1 }).connect(); + + await client.query(`DROP DATABASE IF EXISTS ${name};`); + await client.query(`CREATE DATABASE IF NOT EXISTS ${name};`); + await client.query(`USE ${name}`); + + await client.query('SET autocommit_before_ddl = OFF;'); // for transactions to work + await client.query(`SET CLUSTER SETTING feature.vector_index.enabled = true;`); + + // await client.query(`SET TIME ZONE '+01';`); + + if (tx) { + await client.query('BEGIN'); + } + + const clear = async () => { + if (tx) { + await client.query('ROLLBACK'); + await client.query('BEGIN'); + } else { + await client.query(`DROP DATABASE IF EXISTS ${name};`); + await client.query(`CREATE DATABASE ${name};`); + await client.query(`USE ${name};`); + const roles = await client.query<{ rolname: string }>( + `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, + ).then((it) => it.rows.filter((it) => !isSystemRole(it.rolname))); + + for (const role of roles) { + await client.query(`DROP ROLE "${role.rolname}"`); + } + } + }; + + const db: TestDatabase = { + query: async (sql, params) => { + return client + .query(sql, params) + .then((it) => it.rows as any[]) + .catch((e: Error) => { + const error = new Error(`query error: ${sql}\n\n${e.message}`); + throw error; + }); + }, + batch: async (sqls) => { + for (const sql of sqls) { + await client.query(sql); + } + }, + clear: clear, + close: async () => { + client.release(); + }, + }; + return db; +}; + +export const prepareTestDatabase = async (): Promise => { + const envUrl = process.env.COCKROACH_CONNECTION_STRING; + const { url, container } = envUrl ? { url: envUrl, container: null } : await createDockerDB(); + + const clients = [ + await prepareClient(url, 'db0', false), + await prepareClient(url, 'db1', false), + await prepareClient(url, 'db2', false), + await prepareClient(url, 'db3', false), + await prepareClient(url, 'db4', false), + ]; + + const clientsTxs = [ + await prepareClient(url, 'dbc0', true), + await prepareClient(url, 'dbc1', true), + await prepareClient(url, 'dbc2', true), + await prepareClient(url, 'dbc3', true), + await prepareClient(url, 'dbc4', true), + ]; + + const closureTxs = async () => { + while (true) { + const c = clientsTxs.shift(); + if (!c) { + await sleep(50); + continue; + } + return { + db: c, + release: () => { + clientsTxs.push(c); + }, + }; + } + }; + + const closure = async () => { + while (true) { + const c = clients.shift(); + if (!c) { + await sleep(50); + continue; + } + return { + db: c, + release: () => { + clients.push(c); + }, + }; + } + }; + + return { + acquire: closure, + acquireTx: closureTxs, + close: async () => { + for (const c of clients) { + c.close(); + } + await container?.stop(); + }, + }; +}; + +export const test = base.extend<{ kit: TestDatabaseKit; db: TestDatabase; dbc: TestDatabase }>({ + kit: [ + async ({}, use) => { // oxlint-disable-line no-empty-pattern + const kit = await prepareTestDatabase(); + try { + await use(kit); + } finally { + await kit.close(); + } + }, + { scope: 'worker' }, + ], + // concurrent no transactions + db: [ + async ({ kit }, use) => { + const { db, release } = await kit.acquire(); + await use(db); + await db.clear(); + release(); + }, + { scope: 'test' }, + ], + + // concurrent with transactions + dbc: [ + async ({ kit }, use) => { + const { db, release } = await kit.acquireTx(); + await use(db); + await db.clear(); + release(); + }, + { scope: 'test' }, + ], +}); diff --git a/drizzle-kit/tests/cockroach/policy.test.ts b/drizzle-kit/tests/cockroach/policy.test.ts new file mode 100644 index 0000000000..762ec3f4ac --- /dev/null +++ b/drizzle-kit/tests/cockroach/policy.test.ts @@ -0,0 +1,1526 @@ +import { sql } from 'drizzle-orm'; +import { cockroachPolicy, cockroachRole, cockroachSchema, cockroachTable, int4 } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +const systemRoles = ['admin', 'root']; +test('full policy: no changes', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + enable rls', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy + disable rls', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy without enable rls', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' }), cockroachPolicy('newRls')]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy without disable rls', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' }), cockroachPolicy('oldRls')]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "oldRls" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/** + * Subsequent push is disabled for the first test (currest_user, session_user treated as corner cases) + * Subsequent push is enabled for the first test + */ +test('alter policy without recreation: changing roles', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', to: 'session_user' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO session_user;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy without recreation: changing roles #2', async ({ db }) => { + const role = cockroachRole('owner'); + const schema1 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', to: role })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { exclude: systemRoles } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { exclude: systemRoles } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy without recreation: changing using', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', using: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy without recreation: changing with check', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +/// + +test('alter policy with recreation: changing as', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'restrictive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing for', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', for: 'delete' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing both "as" and "for"', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'restrictive', for: 'insert' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing all fields', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', for: 'select', using: sql`true` })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'restrictive', to: 'current_user', withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_user WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing all fields #2', async ({ db }) => { + const root = cockroachRole('root'); + const admin = cockroachRole('admin'); + const owner = cockroachRole('owner'); + const schema1 = { + root, + admin, + owner, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', for: 'select', using: sql`true` })]), + }; + + const schema2 = { + root, + admin, + owner, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'restrictive', to: owner, withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: true } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: true }, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO "owner" WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename policy', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('newName', { as: 'permissive' })]), + }; + + const renames = [ + 'public.users.test->public.users.newName', + ]; + + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename policy in renamed table', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [ + cockroachPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + users: cockroachTable('users2', { + id: int4('id').primaryKey(), + }, (t) => [cockroachPolicy('newName', { as: 'permissive' })]), + }; + + const renames = ['public.users->public.users2', 'public.users2.test->public.users2.newName']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with a policy', async ({ db }) => { + const schema1 = {}; + + const schema2 = { + users: cockroachTable('users2', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'CREATE TABLE "users2" (\n\t"id" int4 PRIMARY KEY\n);\n', + 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop table with a policy', async ({ db }) => { + const schema1 = { + users: cockroachTable('users2', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = {}; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users2";', + 'DROP TABLE "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with multiple "to" roles', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const role = cockroachRole('manager'); + + const schema2 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: ['current_user', role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_user, "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with multiple "to" roles #2', async ({ db }) => { + const role2 = cockroachRole('owner'); + const schema1 = { + role2, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const role = cockroachRole('manager'); + + const schema2 = { + role2, + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: [role2, role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { exclude: systemRoles } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { exclude: systemRoles } }, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO "manager", "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with rls enabled', async ({ db }) => { + const schema1 = {}; + + const schema2 = { + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enable rls force', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('disable rls force', async ({ db }) => { + const schema1 = { + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy with enabled rls', async ({ db }) => { + const role = cockroachRole('manager'); + + const schema1 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: ['current_user', role] })]).enableRLS(), + }; + + const schema2 = { + role, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, ignoreSubsequent: true }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('drop policy with enabled rls #2', async ({ db }) => { + const role = cockroachRole('manager'); + + const schema1 = { + role, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: [role] })]), + }; + + const schema2 = { + role, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with enabled rls', async ({ db }) => { + const schema1 = { + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const role = cockroachRole('manager'); + + const schema2 = { + role, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: ['current_user', role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + ignoreSubsequent: true, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_user, "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with enabled rls #2', async ({ db }) => { + const role2 = cockroachRole('owner'); + const schema1 = { + role2, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }), + }; + + const role = cockroachRole('manager'); + + const schema2 = { + role2, + role, + users: cockroachTable.withRLS('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: [role2, role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['owner'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { exclude: systemRoles } }, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO "manager", "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link table', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('link table', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + rls: cockroachPolicy('test', { as: 'permissive' }), + }; + + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unlink table', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy with link', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy in table and with link table', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [ + cockroachPolicy('test1', { to: 'current_user' }), + ]); + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO current_user;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy in table and with link table #2', async ({ db }) => { + const role = cockroachRole('owner'); + const schema1 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [ + cockroachPolicy('test1', { to: role }), + ]); + + const schema2 = { + role, + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['owner'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['owner'] } }, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('link non-schema table', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { users }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unlink non-schema table', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link non-schema table', async ({ db }) => { + const cities = cockroachTable.withRLS('cities', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + cities, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + cities, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test2'), + ]), + rls: cockroachPolicy('test', { as: 'permissive' }).link(cities), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link non-schema table from auth schema', async ({ db }) => { + const authSchema = cockroachSchema('auth'); + const cities = authSchema.table('cities', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + authSchema, + cities, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const schema2 = { + authSchema, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test2'), + ]), + cities, + rls: cockroachPolicy('test', { as: 'permissive' }).link(cities), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'ALTER TABLE "auth"."cities" ENABLE ROW LEVEL SECURITY;', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]); + expect(pst).toStrictEqual([ + 'ALTER TABLE "auth"."cities" ENABLE ROW LEVEL SECURITY;', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]); +}); + +test('rename policy that is linked', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('newName', { as: 'permissive' }).link(users), + }; + + const renames = [ + 'public.users.test->public.users.newName', + ]; + + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy that is linked', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive', to: 'current_user' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO current_user;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy that is linked #2', async ({ db }) => { + const role = cockroachRole('owner'); + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + role, + users, + rls: cockroachPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + role, + users, + rls: cockroachPolicy('test', { as: 'permissive', to: role }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['owner'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['owner'] } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy that is linked: withCheck', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy that is linked: using', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { as: 'permissive', using: sql`true` }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { as: 'permissive', using: sql`false` }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy that is linked: using', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users, + rls: cockroachPolicy('test', { for: 'insert' }).link(users), + }; + + const schema2 = { + users, + rls: cockroachPolicy('test', { for: 'delete' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +//// + +test('alter policy in the table', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', to: 'current_user' }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO current_user;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy in the table #2', async ({ db }) => { + const role = cockroachRole('owner'); + const schema1 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + role, + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', to: role }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { exclude: systemRoles } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { exclude: systemRoles } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy in the table: withCheck', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', withCheck: sql`true` }), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', withCheck: sql`false` }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy in the table: using', async ({ db }) => { + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', using: sql`true` }), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { as: 'permissive', using: sql`false` }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy in the table: using', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { for: 'insert' }), + ]), + }; + + const schema2 = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, (t) => [ + cockroachPolicy('test', { for: 'delete' }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/pull-without-tx.test.ts b/drizzle-kit/tests/cockroach/pull-without-tx.test.ts new file mode 100644 index 0000000000..6973a48c72 --- /dev/null +++ b/drizzle-kit/tests/cockroach/pull-without-tx.test.ts @@ -0,0 +1,161 @@ +import { sql } from 'drizzle-orm'; +import { cockroachPolicy, cockroachRole, cockroachTable, int4 } from 'drizzle-orm/cockroach-core'; +import { diffIntrospect, test } from 'tests/cockroach/mocks'; +import { expect } from 'vitest'; + +test('basic policy', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test')]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-policy', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic policy with "as"', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive' })]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-policy-as', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic policy with CURRENT_USER role', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { to: 'current_user' })]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-policy-with-current-user-role', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic policy with all fields except "using" and "with"', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { as: 'permissive', for: 'all', to: ['root'] })]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-policy-all-fields', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic policy with "using" and "with"', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { using: sql`true`, withCheck: sql`true` })]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-policy-using-withcheck', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('multiple policies', async ({ db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }, () => [cockroachPolicy('test', { using: sql`true`, withCheck: sql`true` }), cockroachPolicy('newRls')]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'multiple-policies', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('multiple policies with roles', async ({ db }) => { + await db.query(`CREATE ROLE new_manager;`); + + const schema = { + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + }, + () => [ + cockroachPolicy('test', { using: sql`true`, withCheck: sql`true` }), + cockroachPolicy('newRls', { to: ['root', 'new_manager'] }), + ], + ), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'multiple-policies-with-roles', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('multiple policies with roles from schema', async ({ db }) => { + const usersRole = cockroachRole('user_role', { createRole: true }); + + const schema = { + usersRole, + users: cockroachTable( + 'users', + { + id: int4('id').primaryKey(), + }, + () => [ + cockroachPolicy('test', { using: sql`true`, withCheck: sql`true` }), + cockroachPolicy('newRls', { to: ['root', usersRole] }), + ], + ), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'multiple-policies-with-roles-from-schema', + ['public'], + { roles: { include: ['user_role'] } }, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/cockroach/pull.test.ts b/drizzle-kit/tests/cockroach/pull.test.ts new file mode 100644 index 0000000000..1a6fe44828 --- /dev/null +++ b/drizzle-kit/tests/cockroach/pull.test.ts @@ -0,0 +1,695 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + bit, + bool, + char, + check, + cockroachEnum, + cockroachMaterializedView, + cockroachRole, + cockroachSchema, + cockroachTable, + cockroachView, + date, + decimal, + doublePrecision, + float, + geometry, + index, + inet, + int4, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varbit, + varchar, +} from 'drizzle-orm/cockroach-core'; +import { diffIntrospect, test } from 'tests/cockroach/mocks'; +import { expect } from 'vitest'; + +test.concurrent('basic introspect test', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').notNull(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-introspect'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('basic identity always test', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedAlwaysAsIdentity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-identity-always-introspect'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('basic identity by default test', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-identity-default-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('basic index test', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + firstName: text('first_name'), + lastName: text('last_name'), + data: jsonb('data'), + }, (table) => [ + index('single_column').on(table.firstName), + index('multi_column').on(table.firstName, table.lastName), + index('single_expression').on(sql`lower(${table.firstName})`), + index('multi_expression').on(sql`lower(${table.firstName})`, sql`lower(${table.lastName})`), + index('expression_with_comma').on( + sql`(lower(${table.firstName}) || ', '::text || lower(${table.lastName}))`, + ), + index('expression_with_double_quote').on(sql`('"'::text || ${table.firstName})`), + index('expression_with_jsonb_operator').on( + sql`(${table.data} #>> '{a,b,1}'::text[])`, + ), + ]), + }; + + const { sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-index-introspect', + ); + + expect(sqlStatements).toStrictEqual([]); +}); + +test.concurrent('identity always test: few params', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedAlwaysAsIdentity({ + startWith: 100, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-always-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('identity by default test: few params', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + maxValue: 10000, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-default-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('identity always test: all params', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedAlwaysAsIdentity({ + startWith: 10, + increment: 4, + minValue: 10, + maxValue: 10000, + cache: 100, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-always-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('identity by default test: all params', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedByDefaultAsIdentity({ + startWith: 10, + increment: 4, + minValue: 10, + maxValue: 10000, + cache: 100, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-default-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('generated column: link to another column', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id').generatedAlwaysAsIdentity(), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`email`, + ), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'generated-link-column', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect all column types', async ({ dbc: db }) => { + const myEnum = cockroachEnum('my_enum', ['a', 'b', 'c']); + const schema = { + enum_: myEnum, + columns: cockroachTable('columns', { + bigint: bigint('bigint', { mode: 'number' }).default(100), + bool: bool('bool').default(true), + geometry: geometry({ srid: 213, mode: 'tuple' }), + char: char('char', { length: 3 }).default('abc'), + date1: date('date1').default('2024-01-01'), + date2: date('date2').defaultNow(), + date3: date('date3').default(sql`current_timestamp`), + numeric: numeric('numeric', { precision: 3, scale: 1 }).default('99.9'), + numeric2: numeric('numeric2', { precision: 1, scale: 1 }).default('0.9'), + numeric3: numeric('numeric3').default('99.9'), + decimal: decimal('decimal', { precision: 3, scale: 1 }).default('99.9'), + decimal2: decimal('decimal2', { precision: 1, scale: 1 }).default('0.9'), + decimal3: decimal('decimal3').default('99.9'), + enum: myEnum('my_enum').default('a'), + bit: bit('bit').default('1'), + varit: varbit('varbit').default('1'), + float: float('float').default(100), + doublePrecision: doublePrecision('doublePrecision').default(100), + inet: inet('inet').default('127.0.0.1'), + int4: int4('int4').default(10), + interval: interval('interval').default('1 day 01:00:00'), + jsonb: jsonb('jsonb').$type<{ attr: string }>().default({ attr: 'value' }), + real: real('real').default(100), + smallint: smallint('smallint').default(10), + string: string('string').default('value'), + text: text('test').default('abc'), + time1: time('time1').default('00:00:00'), + timestamp1: timestamp('timestamp1', { withTimezone: true, precision: 6 }).default(new Date()), + timestamp2: timestamp('timestamp2', { withTimezone: true, precision: 6 }).defaultNow(), + timestamp3: timestamp('timestamp3', { withTimezone: true, precision: 6 }).default( + sql`timezone('utc'::text, now())`, + ), + uuid1: uuid('uuid1').default('a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'), + uuid2: uuid('uuid2').defaultRandom(), + varchar: varchar('varchar', { length: 25 }).default('abc'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-all-columns-types', + ); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test.concurrent('introspect all column array types', async ({ dbc: db }) => { + const myEnum = cockroachEnum('my_enum', ['a', 'b', 'c']); + const schema = { + enum_: myEnum, + columns: cockroachTable('columns', { + bigint: bigint('bigint', { mode: 'number' }).default(100).array(), + bit: bit().array(), + varbit: varbit().array(), + geometry: geometry().array(), + bool: bool('bool').default(true).array(), + char: char('char', { length: 3 }).default('abc').array(), + date1: date('date1').default('2024-01-01').array(), + date2: date('date2').defaultNow().array(), + date3: date('date3').default(sql`current_timestamp`).array(), + numeric: numeric('numeric', { precision: 3, scale: 1 }).default('99.9').array(), + numeric2: numeric('numeric2', { precision: 1, scale: 1 }).default('0.9').array(), + numeric3: numeric('numeric3').default('99.9').array(), + decimal: decimal('decimal', { precision: 3, scale: 1 }).default('99.9').array(), + decimal2: decimal('decimal2', { precision: 1, scale: 1 }).default('0.9').array(), + decimal3: decimal('decimal3').default('99.9').array(), + enum: myEnum('my_enum').default('a').array(), + float: float('float').default(100).array(), + doublePrecision: doublePrecision('doublePrecision').default(100).array(), + inet: inet('inet').default('127.0.0.1').array(), + int4: int4('int4').default(10).array(), + interval: interval('interval').default('1 day 01:00:00').array(), + real: real('real').default(100).array(), + smallint: smallint('smallint').default(10).array(), + string: string('string').default('value').array(), + text: text('test').default('abc').array(), + time1: time('time1').default('00:00:00').array(), + timestamp1: timestamp('timestamp1', { withTimezone: true, precision: 6 }).default(new Date()).array(), + timestamp2: timestamp('timestamp2', { withTimezone: true, precision: 6 }).defaultNow().array(), + timestamp3: timestamp('timestamp3', { withTimezone: true, precision: 6 }).default( + sql`timezone('utc'::text, now())`, + ).array(), + uuid1: uuid('uuid1').default('a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11').array(), + uuid2: uuid('uuid2').defaultRandom().array(), + varchar: varchar('varchar', { length: 25 }).default('abc').array(), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-all-columns-array-types', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect columns with name with non-alphanumeric characters', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + 'not:allowed': int4('not:allowed'), + 'nuh--uh': int4('nuh-uh'), + '1_nope': int4('1_nope'), + valid: int4('valid'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-column-with-name-with-non-alphanumeric-characters', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect enum from different schema', async ({ dbc: db }) => { + const schema2 = cockroachSchema('schema2'); + const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); + const schema = { + schema2, + myEnumInSchema2, + users: cockroachTable('users', { + col: myEnumInSchema2('col'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-enum-from-different-schema', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect enum with same names across different schema', async ({ dbc: db }) => { + const schema2 = cockroachSchema('schema2'); + const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); + const myEnum = cockroachEnum('my_enum', ['a', 'b', 'c']); + const schema = { + schema2, + myEnumInSchema2, + myEnum, + users: cockroachTable('users', { + col1: myEnumInSchema2('col1'), + col2: myEnum('col2'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-enum-with-same-names-across-different-schema', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect enum with similar name to native type', async ({ dbc: db }) => { + const timeLeft = cockroachEnum('time_left', ['short', 'medium', 'long']); + const schema = { + timeLeft, + auction: cockroachTable('auction', { + col: timeLeft('col1'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-enum-with-similar-name-to-native-type', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect strings with single quotes', async ({ dbc: db }) => { + const myEnum = cockroachEnum('my_enum', ['escape\'s quotes " ']); + const schema = { + enum_: myEnum, + columns: cockroachTable('columns', { + enum: myEnum('my_enum').default('escape\'s quotes " '), + text: text('text').default('escape\'s quotes " '), + varchar: varchar('varchar').default('escape\'s quotes " '), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-strings-with-single-quotes', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect checks', async ({ dbc: db }) => { + const schema = { + users: cockroachTable('users', { + id: int4('id'), + name: varchar('name'), + age: int4('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-checks', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect checks from different schemas with same names', async ({ dbc: db }) => { + const mySchema = cockroachSchema('schema2'); + const schema = { + mySchema, + users: cockroachTable('users', { + id: int4('id'), + age: int4('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + usersInMySchema: mySchema.table('users', { + id: int4('id'), + age: int4('age'), + }, (table) => [check('some_check', sql`${table.age} < 1`)]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-checks-diff-schema-same-names', + ['public', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect view #1', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = cockroachView('some_view').as((qb) => qb.select().from(users)); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-view', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect view #2', async ({ dbc: db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = cockroachView('some_view', { id: int4('asd') }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-view-2', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect view in other schema', async ({ dbc: db }) => { + const newSchema = cockroachSchema('new_schema'); + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = newSchema.view('some_view', { id: int4('asd') }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + newSchema, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-view-in-other-schema', + ['new_schema', 'public'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect materialized view in other schema', async ({ db }) => { + const newSchema = cockroachSchema('new_schema'); + const users = cockroachTable('users', { + id: int4().primaryKey(), + name: varchar(), + }); + + const view = newSchema.materializedView('some_view', { id: int4() }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + newSchema, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-mat-view-in-other-schema', + ['new_schema', 'public'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect materialized view #1', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = cockroachMaterializedView('some_view').withNoData().as((qb) => qb.select().from(users)); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-materialized-view', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('introspect materialized view #2', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = cockroachMaterializedView('some_view', { id: int4('asd') }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-materialized-view-2', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('basic roles', async ({ dbc: db }) => { + const schema = { + usersRole: cockroachRole('user'), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-roles', + ['public'], + { roles: { include: ['user'] } }, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('role with properties', async ({ dbc: db }) => { + const schema = { + usersRole: cockroachRole('user', { createDb: true, createRole: true }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'roles-with-properties', + ['public'], + { roles: { include: ['user'] } }, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('role with a few properties', async ({ dbc: db }) => { + const schema = { + usersRole: cockroachRole('user', { createRole: true }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'roles-with-few-properties', + ['public'], + { roles: { include: ['user'] } }, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test.concurrent('case sensitive schema name + identity column', async ({ dbc: db }) => { + const mySchema = cockroachSchema('CaseSensitiveSchema'); + const schema = { + mySchema, + users: mySchema.table('users', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'case-sensitive-schema-name', + ['CaseSensitiveSchema'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/cockroach/role.test.ts b/drizzle-kit/tests/cockroach/role.test.ts new file mode 100644 index 0000000000..288d9dc0c6 --- /dev/null +++ b/drizzle-kit/tests/cockroach/role.test.ts @@ -0,0 +1,197 @@ +import { cockroachRole } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test('create role', async ({ db }) => { + const schema1 = {}; + + const schema2 = { + manager: cockroachRole('manager'), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'CREATE ROLE "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create role with properties', async ({ db }) => { + const schema1 = {}; + + const schema2 = { + manager: cockroachRole('manager', { createDb: true, createRole: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'CREATE ROLE "manager" WITH CREATEDB CREATEROLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create role with some properties', async ({ db }) => { + const schema1 = {}; + + const schema2 = { + manager: cockroachRole('manager', { createDb: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'CREATE ROLE "manager" WITH CREATEDB;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop role', async ({ db }) => { + const schema1 = { manager: cockroachRole('manager') }; + + const schema2 = {}; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'DROP ROLE "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create and drop role', async ({ db }) => { + const schema1 = { + manager: cockroachRole('manager'), + }; + + const schema2 = { + superuser: cockroachRole('superuser'), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager', 'superuser'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager', 'superuser'] } }, + }); + + const st0 = [ + 'DROP ROLE "manager";', + 'CREATE ROLE "superuser";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename role - recreate', async ({ db }) => { + const schema1 = { + manager: cockroachRole('manager'), + }; + + const schema2 = { + superuser: cockroachRole('superuser'), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager', 'superuser'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager', 'superuser'] } }, + }); + + const st0 = [ + `DROP ROLE "manager";`, + `CREATE ROLE "superuser";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter all role field', async ({ db }) => { + const schema1 = { + manager: cockroachRole('manager'), + }; + + const schema2 = { + manager: cockroachRole('manager', { createDb: true, createRole: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'ALTER ROLE "manager" WITH CREATEDB CREATEROLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter createdb in role', async ({ db }) => { + const schema1 = { + manager: cockroachRole('manager'), + }; + + const schema2 = { + manager: cockroachRole('manager', { createDb: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'ALTER ROLE "manager" WITH CREATEDB NOCREATEROLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter createrole in role', async ({ db }) => { + const schema1 = { + manager: cockroachRole('manager'), + }; + + const schema2 = { + manager: cockroachRole('manager', { createRole: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'ALTER ROLE "manager" WITH NOCREATEDB CREATEROLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/schemas.test.ts b/drizzle-kit/tests/cockroach/schemas.test.ts new file mode 100644 index 0000000000..83eb9857ae --- /dev/null +++ b/drizzle-kit/tests/cockroach/schemas.test.ts @@ -0,0 +1,143 @@ +import { cockroachSchema } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('add schema #1', async ({ db }) => { + const to = { + devSchema: cockroachSchema('dev'), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "dev";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add schema #2', async ({ db }) => { + const from = { + devSchema: cockroachSchema('dev'), + }; + const to = { + devSchema: cockroachSchema('dev'), + devSchema2: cockroachSchema('dev2'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('delete schema #1', async ({ db }) => { + const from = { + devSchema: cockroachSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + 'DROP SCHEMA "dev";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('delete schema #2', async ({ db }) => { + const from = { + devSchema: cockroachSchema('dev'), + devSchema2: cockroachSchema('dev2'), + }; + const to = { + devSchema: cockroachSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SCHEMA "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename schema #1', async ({ db }) => { + const from = { + devSchema: cockroachSchema('dev'), + }; + + const to = { + devSchema2: cockroachSchema('dev2'), + }; + + const renames = ['dev->dev2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "dev" RENAME TO "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename schema #2', async ({ db }) => { + const from = { + devSchema: cockroachSchema('dev'), + devSchema1: cockroachSchema('dev1'), + }; + const to = { + devSchema: cockroachSchema('dev'), + devSchema2: cockroachSchema('dev2'), + }; + + const renames = ['dev1->dev2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "dev1" RENAME TO "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/schemas/schema0.ts b/drizzle-kit/tests/cockroach/schemas/schema0.ts new file mode 100644 index 0000000000..5a3355a628 --- /dev/null +++ b/drizzle-kit/tests/cockroach/schemas/schema0.ts @@ -0,0 +1,61 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + boolean, + check, + cockroachEnum, + cockroachSchema, + cockroachSequence, + index, + jsonb, + smallint, + text, + timestamp, + unique, + uniqueIndex, + uuid, +} from 'drizzle-orm/cockroach-core'; + +// generated with AI and updated manually in some places + +export const core = cockroachSchema('core'); +export const currencyCode = cockroachEnum('currency_code', ['USD', 'EUR', 'GBP', 'UAH', 'JPY']); + +export const seqOrgCode = cockroachSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', +}); + +export const organizationsInCore = core.table('organizations', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: bigint({ mode: 'number' }).default(sql`nextval('public.seq_org_code'::REGCLASS)`).notNull(), + name: text().notNull(), + domain: text(), + currency: currencyCode().default('EUR').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_org_name_idx').using('btree', table.name.asc()), + index('organizations_code_idx').using('btree', table.code.asc()), + unique('organizations_domain_key').on(table.domain), + check('organizations_name_check', sql`char_length(name) > 1`), +]); + +export const taskQueueInAnalytics = core.table('task_queue', { + id: uuid().defaultRandom().primaryKey().notNull(), + queueName: text('queue_name').default('default').notNull(), + payload: jsonb().notNull(), + priority: smallint().default(100).notNull(), + reserved: boolean().default(false).notNull(), + reservedUntil: timestamp('reserved_until', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (t) => [ + uniqueIndex('analytics_task_queue_unique_unreserved').using( + 'btree', + sql`queue_name`, + sql`((${t.payload} ->> 'task_type'::text))`, + ).where(sql`(reserved = false)`), +]); diff --git a/drizzle-kit/tests/cockroach/schemas/schema1.ts b/drizzle-kit/tests/cockroach/schemas/schema1.ts new file mode 100644 index 0000000000..a9e008e104 --- /dev/null +++ b/drizzle-kit/tests/cockroach/schemas/schema1.ts @@ -0,0 +1,1134 @@ +import { eq, sql } from 'drizzle-orm'; +import { + AnyCockroachColumn, + bigint, + boolean, + char, + check, + cockroachEnum, + cockroachPolicy, + cockroachSchema, + cockroachSequence, + decimal, + doublePrecision, + foreignKey, + index, + inet, + int4 as integer, + interval, + jsonb, + numeric, + primaryKey, + smallint, + text, + timestamp, + unique, + uniqueIndex, + uuid, +} from 'drizzle-orm/cockroach-core'; + +// generated with AI and updated manually in some places + +export const core = cockroachSchema('core'); +export const analytics = cockroachSchema('analytics'); +export const billing = cockroachSchema('billing'); +export const monitoring = cockroachSchema('monitoring'); +export const alertAction = cockroachEnum('alert_action', ['email', 'pagerd/ut"\'y', 'slack', 'webhook']); +export const currencyCode = cockroachEnum('currency_code', ['USD', 'EUR', 'GBP', 'UAH', 'JPY']); +export const datasetVisibility = cockroachEnum('dataset_visibility', ['priv"ate', 'team', 'public']); +export const env = cockroachEnum('env', ['dev', 'staging', 'prod']); +export const featureState = cockroachEnum('feature_state', ['enabled', 'disabled', 'gradual']); +export const invoiceStatus = cockroachEnum('invoice_status', ['draft', "iss'ued", 'paid', 'voided', 'failed']); +export const jobState = cockroachEnum('job_state', ['queued', 'running', 'success', 'failed', 'cancelled']); +export const notificationChannel = cockroachEnum('notification_channel', ['email', 'sms', 'in_app', 'webhook']); +export const paymentMethod = cockroachEnum('payment_method', ['card', 'bank_transfer', 'paypal', 'crypto']); +export const pipelineStatus = cockroachEnum('pipeline_status', [ + 'created', + 'running', + 'paused', + 'completed', + 'errored', +]); +export const roleKind = cockroachEnum('role_kind', ['system', 'custom']); +export const ruleConditionOperator = cockroachEnum('rule_condition_operator', [ + 'eq', + 'neq', + 'gt', + 'lt', + 'gte', + 'lte', + 'in', + 'nin', +]); +export const severityLevel = cockroachEnum('severity_level', ['low', 'medium', 'high', 'critical']); +export const userStatus = cockroachEnum('user_status', ['active', 'inactive', 'suspended', 'pending']); + +export const seqOrgCode = cockroachSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', +}); + +export const organizationsInCore = core.table('organizations', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: bigint({ mode: 'number' }).default(sql`nextval('public.seq_org_code'::REGCLASS)`).notNull(), + name: text().notNull(), + domain: text(), + currency: currencyCode().default('EUR').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_org_name_idx').using('btree', table.name.asc()), + index('organizations_code_idx').using('btree', table.code.asc()), + unique('organizations_domain_key').on(table.domain), + check('organizations_name_check', sql`char_length(name) > 1`), +]); + +export const usersInCore = core.table('users', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + username: text().notNull(), + status: userStatus().default('pending').notNull(), + locale: text().default('en-US').notNull(), + lastLogin: timestamp('last_login', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + bio: text().$onUpdate(() => sql`bio || 'some test'`), + profile: jsonb(), +}, (table) => [ + index('core_users_username_idx').using( + 'btree', + table.organizationId.asc(), + table.username.asc(), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'users_organization_id_fkey', + }).onDelete('cascade'), + unique('users_org_username_unique').on(table.organizationId, table.username), +]); + +export const rolesInCore = core.table('roles', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull().references(() => organizationsInCore.id, { onDelete: 'cascade' }), + name: text().notNull(), + kind: roleKind().default('custom').notNull(), + builtin: boolean().default(false).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + unique('roles_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const permissionsInCore = core.table('permissions', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + code: text().notNull().unique(), + description: text(), +}); + +export const membershipsInCore = core.table('memberships', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + roleId: integer('role_id').notNull(), + organizationId: uuid('organization_id').notNull(), + joinedAt: timestamp('joined_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'memberships_user_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'memberships_role_id_fkey', + }), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'memberships_organization_id_fkey', + }).onDelete('cascade'), + unique('unique_membership').on(table.userId, table.organizationId), +]); + +export const apiKeysInCore = core.table('api_keys', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + userId: uuid('user_id'), + name: text().notNull(), + keyHash: text('key_hash').notNull(), + revoked: boolean().default(false).notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb().generatedAlwaysAs(sql`'{"some":"test"}'`), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_apikey_org_idx').using('btree', table.organizationId.asc()).where( + sql`(revoked = false)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'api_keys_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'api_keys_user_id_fkey', + }).onDelete('set null'), + unique('api_keys_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const sessionsInCore = core.table('sessions', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + ip: inet(), + userAgent: text('user_agent'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }).notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + index('core_sessions_user_expires').using( + 'btree', + table.userId.asc(), + table.expiresAt.asc(), + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'sessions_user_id_fkey', + }).onDelete('cascade'), +]); + +export const oauthProvidersInCore = core.table('oauth_providers', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + provider: text().notNull(), + clientId: text('client_id').notNull(), + clientSecret: text('client_secret').notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'oauth_providers_organization_id_fkey', + }).onDelete('cascade'), + unique('oauth_providers_organization_id_provider_key').on(table.organizationId, table.provider), +]); + +export const featureFlagsInCore = core.table('feature_flags', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + state: featureState().default('disabled').notNull(), + rolloutPercent: smallint('rollout_percent').default(0), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'feature_flags_organization_id_fkey', + }).onDelete('cascade'), + unique('feature_flags_organization_id_key_key').on(table.organizationId, table.key), + check('feature_flags_rollout_percent_check', sql`(rollout_percent >= 0) AND (rollout_percent <= 100)`), +]); + +export const projectsInCore = core.table('projects', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + slug: text().notNull(), + description: text(), + visibility: datasetVisibility().default('priv"ate').notNull(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_projects_org_name_idx').using( + 'btree', + table.organizationId.asc(), + table.name.asc(), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'projects_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'projects_created_by_fkey', + }), + unique('projects_org_slug_unique').on(table.organizationId, table.slug), +]); + +export const repositoriesInCore = core.table('repositories', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + provider: text().notNull(), + repoOwner: text('repo_owner').notNull(), + repoName: text('repo_name').notNull(), + defaultBranch: text('default_branch').default('main').notNull(), + cloneUrl: text('clone_url'), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'repositories_project_id_fkey', + }).onDelete('cascade'), + unique('repositories_project_id_provider_repo_owner_repo_name_key').on( + table.projectId, + table.provider, + table.repoOwner, + table.repoName, + ), +]); + +export const buildsInCore = core.table('builds', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + triggeredBy: uuid('triggered_by'), + commitSha: char('commit_sha', { length: 40 }).notNull(), + status: pipelineStatus().default('created').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + index('core_builds_project_status_idx').using( + 'btree', + table.projectId.asc(), + table.status.asc(), + ), + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'builds_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.triggeredBy], + foreignColumns: [usersInCore.id], + name: 'builds_triggered_by_fkey', + }), + unique('builds_project_id_commit_sha_key').on(table.projectId, table.commitSha), +]); + +export const pipelinesInCore = core.table('pipelines', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + name: text().notNull(), + spec: jsonb().notNull(), + status: pipelineStatus().default('created').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'pipelines_project_id_fkey', + }).onDelete('cascade'), + unique('pipelines_project_id_name_key').on(table.projectId, table.name), +]); + +export const pipelineRunsInAnalytics = analytics.table('pipeline_runs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineId: uuid('pipeline_id').notNull(), + + runNumber: bigint('run_number', { mode: 'number' }).notNull(), + state: jobState().default('queued').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + logs: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_pipeline_runs_state_idx').using('btree', table.state.asc()), + foreignKey({ + columns: [table.pipelineId], + foreignColumns: [pipelinesInCore.id], + name: 'pipeline_runs_pipeline_id_fkey', + }).onDelete('cascade'), + unique('pipeline_runs_unique_run').on(table.pipelineId, table.runNumber), +]); + +export const jobsInAnalytics = analytics.table('jobs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineRunId: uuid('pipeline_run_id'), + name: text().notNull(), + state: jobState().default('queued').notNull(), + attempts: integer().default(0).notNull(), + lastError: text('last_error'), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_jobs_state_attempts_idx').using( + 'btree', + table.state.asc(), + table.attempts.asc(), + ), + foreignKey({ + columns: [table.pipelineRunId], + foreignColumns: [pipelineRunsInAnalytics.id], + name: 'jobs_pipeline_run_id_fkey', + }).onDelete('cascade'), +]); + +export const storageBucketsInCore = core.table('storage_buckets', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + region: text().notNull(), + config: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'storage_buckets_organization_id_fkey', + }).onDelete('cascade'), + unique('storage_buckets_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const objectsInCore = core.table('objects', { + id: uuid().defaultRandom().primaryKey().notNull(), + bucketId: uuid('bucket_id').notNull(), + path: text().notNull(), + + size: bigint({ mode: 'number' }).default(0).notNull(), + contentType: text('content_type'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_objects_bucket_path_gin').using('gin', table.metadata.asc()), + foreignKey({ + columns: [table.bucketId], + foreignColumns: [storageBucketsInCore.id], + name: 'objects_bucket_id_fkey', + }).onDelete('cascade'), + unique('objects_bucket_id_path_key').on(table.bucketId, table.path), +]); + +export const filesInCore = core.table('files', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + name: text().notNull(), + latestObjectId: uuid('latest_object_id'), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'files_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.latestObjectId], + foreignColumns: [objectsInCore.id], + name: 'files_latest_object_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'files_created_by_fkey', + }), + unique('files_project_id_name_key').on(table.projectId, table.name), +]); + +export const fileVersionsInCore = core.table('file_versions', { + id: uuid().defaultRandom().primaryKey().notNull(), + fileId: uuid('file_id').notNull(), + objectId: uuid('object_id').notNull(), + versionNumber: integer('version_number').notNull(), + checksum: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.fileId], + foreignColumns: [filesInCore.id], + name: 'file_versions_file_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.objectId], + foreignColumns: [objectsInCore.id], + name: 'file_versions_object_id_fkey', + }).onDelete('cascade'), + unique('file_versions_file_id_version_number_key').on(table.fileId, table.versionNumber), +]); + +export const tagsInCore = core.table('tags', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + value: text(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'tags_organization_id_fkey', + }).onDelete('cascade'), + unique('tags_organization_id_key_value_key').on(table.organizationId, table.key, table.value), +]); + +export const conversationsInCore = core.table('conversations', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + title: text(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'conversations_project_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'conversations_created_by_fkey', + }), +]); + +export const chatMessagesInCore = core.table('chat_messages', { + id: uuid().defaultRandom().primaryKey().notNull(), + conversationId: uuid('conversation_id').notNull(), + senderId: uuid('sender_id'), + body: text().notNull(), + attachments: jsonb(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + editedAt: timestamp('edited_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + index('core_chat_conv_sent_at_idx').using( + 'btree', + table.conversationId.asc(), + table.sentAt.desc(), + ), + foreignKey({ + columns: [table.conversationId], + foreignColumns: [conversationsInCore.id], + name: 'chat_messages_conversation_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.senderId], + foreignColumns: [usersInCore.id], + name: 'chat_messages_sender_id_fkey', + }).onDelete('set null'), +]); + +export const notificationsInCore = core.table('notifications', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + channel: notificationChannel().default('in_app').notNull(), + payload: jsonb().notNull(), + seen: boolean().default(false).notNull(), + deliveredAt: timestamp('delivered_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_notifications_unseen_idx').using('btree', table.userId.asc()).where( + sql`(seen = false)`, + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'notifications_user_id_fkey', + }).onDelete('cascade'), +]); + +export const customersInBilling = billing.table('customers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + name: text().notNull(), + address: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'customers_organization_id_fkey', + }).onDelete('cascade'), + unique('customers_organization_id_key').on(table.organizationId), + unique('idnameunique').on(table.id, table.name), +]); + +export const subscriptionsInBilling = billing.table('subscriptions', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + plan: text().notNull(), + status: text().default('active').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + endedAt: timestamp('ended_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'subscriptions_customer_id_fkey', + }).onDelete('cascade'), +]); + +export const paymentsInBilling = billing.table('payments', { + id: uuid().defaultRandom().primaryKey().notNull(), + invoiceId: uuid('invoice_id').notNull(), + paidAt: timestamp('paid_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + amount: numeric({ precision: 12, scale: 2 }).notNull(), + amount2: decimal({ precision: 12, scale: 2 }).notNull(), + method: paymentMethod().notNull(), + transactionRef: text('transaction_ref'), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.invoiceId], + foreignColumns: [invoicesInBilling.id], + name: 'payments_invoice_id_fkey', + }).onDelete('cascade'), +]); + +export const couponsInBilling = billing.table('coupons', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: text().notNull(), + description: text(), + discountPercent: smallint('discount_percent'), + redeemableFrom: timestamp('redeemable_from', { withTimezone: true, mode: 'string' }), + redeemableTo: timestamp('redeemable_to', { withTimezone: true, mode: 'string' }), + maxRedemptions: integer('max_redemptions').generatedAlwaysAsIdentity(), + metadata: jsonb(), +}, (table) => [ + unique('coupons_code_key').on(table.code), + check('coupons_discount_percent_check', sql`(discount_percent >= 0) AND (discount_percent <= 100)`), +]); + +export const webhooksInCore = core.table('webhooks', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + url: text().notNull(), + secret: text(), + events: text().array().notNull(), + active: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_webhooks_org_active_idx').using('btree', table.organizationId.asc()).where( + sql`(active = true)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'webhooks_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const metricSourcesInAnalytics = analytics.table('metric_sources', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'metric_sources_organization_id_fkey', + }).onDelete('cascade'), + unique('metric_sources_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const metricsInAnalytics = analytics.table('metrics', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + sourceId: uuid('source_id').notNull(), + metricKey: text('metric_key').notNull(), + ts: timestamp({ withTimezone: true, mode: 'string' }).notNull(), + value: doublePrecision().notNull(), + tags: jsonb(), +}, (table) => [ + index('analytics_metrics_key_ts_idx').using( + 'btree', + table.metricKey.asc(), + table.ts.desc(), + ), + foreignKey({ + columns: [table.sourceId], + foreignColumns: [metricSourcesInAnalytics.id], + name: 'metrics_source_id_fkey', + }).onDelete('cascade'), + unique('metrics_source_id_metric_key_ts_key').on(table.sourceId, table.metricKey, table.ts), +]); + +export const alertRulesInMonitoring = monitoring.table('alert_rules', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + description: text(), + severity: severityLevel().default('medium').notNull(), + enabled: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'alert_rules_organization_id_fkey', + }).onDelete('cascade'), + unique('alert_rules_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const ruleConditionsInMonitoring = monitoring.table('rule_conditions', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + metricKey: text('metric_key').notNull(), + operator: ruleConditionOperator().notNull().unique('some_name'), + threshold: doublePrecision().notNull(), + window: interval().default('00:05:00').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'rule_conditions_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const alertsInMonitoring = monitoring.table('alerts', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + triggeredAt: timestamp('triggered_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + resolvedAt: timestamp('resolved_at', { withTimezone: true, mode: 'string' }), + payload: jsonb(), + state: text().default('firing').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'alerts_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const escalationsInMonitoring = monitoring.table('escalations', { + id: uuid().defaultRandom().primaryKey().notNull(), + alertId: uuid('alert_id').notNull(), + action: alertAction().notNull(), + target: text().notNull(), + executedAt: timestamp('executed_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + foreignKey({ + columns: [table.alertId], + foreignColumns: [alertsInMonitoring.id], + name: 'escalations_alert_id_fkey', + }).onDelete('cascade'), +]); + +export const ssoProvidersInCore = core.table('sso_providers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + config: jsonb().notNull(), + enabled: boolean().default(false).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'sso_providers_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const auditLogsInCore = core.table('audit_logs', { + id: bigint({ mode: 'number' }).generatedAlwaysAsIdentity().primaryKey().notNull(), + organizationId: uuid('organization_id'), + actorId: uuid('actor_id'), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').array(), + action: text().notNull(), + beforeState: jsonb('before_state'), + afterState: jsonb('after_state'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_audit_org_idx').using( + 'btree', + table.organizationId.asc(), + table.createdAt.desc(), + ), +]); + +export const rateLimitsInCore = core.table('rate_limits', { + id: uuid().defaultRandom().primaryKey().notNull(), + apiKeyId: uuid('api_key_id').notNull(), + windowStart: timestamp('window_start', { withTimezone: true, mode: 'string' }).notNull(), + requests: integer().generatedByDefaultAsIdentity().notNull().array(), + limit: integer().generatedAlwaysAs(() => sql`1`).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.apiKeyId], + foreignColumns: [apiKeysInCore.id], + name: 'rate_limits_api_key_id_fkey', + }).onDelete('cascade'), + unique('rate_limits_api_key_id_window_start_key').on(table.apiKeyId, table.windowStart), +]); + +export const experimentsInCore = core.table('experiments', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'experiments_organization_id_fkey', + }).onDelete('cascade'), + unique('experiments_organization_id_key_key').on(table.organizationId, table.key), +]); + +export const experimentVariantsInCore = core.table('experiment_variants', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + name: text().notNull(), + allocationPercent: smallint('allocation_percent').default(0).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_variants_experiment_id_fkey', + }).onDelete('cascade'), + unique('experiment_variants_experiment_id_name_key').on(table.experimentId, table.name), + check('experiment_variants_allocation_percent_check', sql`(allocation_percent >= 0) AND (allocation_percent <= 100)`), +]); + +export const experimentAssignmentsInCore = core.table('experiment_assignments', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + variantId: uuid('variant_id').notNull(), + userId: uuid('user_id').notNull(), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_assignments_experiment_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.variantId], + foreignColumns: [experimentVariantsInCore.id], + name: 'experiment_assignments_variant_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'experiment_assignments_user_id_fkey', + }).onDelete('cascade'), + unique('experiment_assignments_experiment_id_user_id_key').on(table.experimentId, table.userId), +]); + +export const deploymentsInCore = core.table('deployments', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + environment: env().default('dev').notNull(), + version: text().notNull(), + deployedBy: uuid('deployed_by'), + deployedAt: timestamp('deployed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + notes: text(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'deployments_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.deployedBy], + foreignColumns: [usersInCore.id], + name: 'deployments_deployed_by_fkey', + }), + unique('deployments_project_id_environment_version_key').on(table.projectId, table.environment, table.version), +]); + +export const servicesInCore = core.table('services', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + kind: text(), + ownerId: uuid('owner_id'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string', precision: 6 }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'services_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.ownerId], + foreignColumns: [usersInCore.id], + name: 'services_owner_id_fkey', + }), + unique('services_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const locksInCore = core.table('locks', { + name: text().primaryKey().notNull(), + owner: text(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string', precision: 2 }), +}); + +export const entitiesInCore = core.table('entities', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + data: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'entities_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const taskQueueInAnalytics = analytics.table('task_queue', { + id: uuid().defaultRandom().primaryKey().notNull(), + queueName: text('queue_name').default('default').notNull(), + payload: jsonb().notNull(), + priority: smallint().default(100).notNull(), + reserved: boolean().default(false).notNull(), + reservedUntil: timestamp('reserved_until', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex('analytics_task_queue_unique_unreserved').using( + 'btree', + sql`queue_name`, + sql`((payload ->> 'task_type'::text))`, + ).where(sql`(reserved = false)`), +]); + +export const invoicesInBilling = billing.table('invoices', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + number: text().notNull(), + issuedAt: timestamp('issued_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + dueAt: timestamp('due_at', { withTimezone: true, mode: 'string' }), + totalAmount: numeric('total_amount', { precision: 12, scale: 2 }).default('0.0').notNull(), + currency: currencyCode().default('USD').notNull(), + status: invoiceStatus().default('draft').notNull(), + notes: text(), +}, (table) => [ + index('billing_invoices_status_idx').using('btree', table.status.asc()), + foreignKey({ + columns: [table.customerId, table.number], + foreignColumns: [customersInBilling.id, customersInBilling.name], + name: 'invoices_customer_id_fkey', + }).onDelete('cascade'), + unique('invoices_customer_id_number_key').on(table.customerId, table.number), + check('invoices_total_nonnegative', sql`total_amount >= (0)::numeric`), +]); + +export const aliasesInCore = core.table('aliases', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + alias: text().notNull().unique('unique_with_name'), + organizationId: uuid('organization_id'), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'aliases_organization_id_fkey', + }).onUpdate('cascade'), + unique('aliases_object_type_object_id_alias_key').on(table.objectType, table.objectId, table.alias), +]); + +export const selfRef = core.table('self_ref', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull().unique().references((): AnyCockroachColumn => selfRef.organizationId), + organizationId: text('organization_id').notNull().unique(), +}); + +export const couponRedemptionsInBilling = billing.table('coupon_redemptions', { + couponId: uuid('coupon_id').notNull(), + customerId: uuid('customer_id').notNull(), + redeemedAt: timestamp('redeemed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.couponId], + foreignColumns: [couponsInBilling.id], + name: 'coupon_redemptions_coupon_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'coupon_redemptions_customer_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.couponId, table.customerId], name: 'coupon_redemptions_pkey' }), +]); + +export const entityLinksInCore = core.table('entity_links', { + parentEntityId: uuid('parent_entity_id').notNull(), + childEntityId: uuid('child_entity_id').notNull(), + relationship: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.parentEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_parent_entity_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.childEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_child_entity_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.parentEntityId, table.childEntityId, table.relationship], name: 'entity_links_pkey' }), +]); + +export const rolePermissionsInCore = core.table('role_permissions', { + roleId: integer('role_id').notNull(), + permissionId: integer('permission_id').notNull(), + assignedBy: uuid('assigned_by'), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'role_permissions_role_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.permissionId], + foreignColumns: [permissionsInCore.id], + name: 'role_permissions_permission_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.assignedBy], + foreignColumns: [usersInCore.id], + name: 'role_permissions_assigned_by_fkey', + }), + primaryKey({ columns: [table.roleId, table.permissionId], name: 'role_permissions_pkey' }), +]); + +export const taggingsInCore = core.table('taggings', { + tagId: integer('tag_id').notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.tagId], + foreignColumns: [tagsInCore.id], + name: 'taggings_tag_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.tagId, table.objectType, table.objectId], name: 'taggings_pkey' }), +]); + +export const reactionsInCore = core.table('reactions', { + messageId: uuid('message_id').notNull(), + userId: uuid('user_id').notNull(), + reaction: text().notNull().array(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.messageId], + foreignColumns: [chatMessagesInCore.id], + name: 'reactions_message_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'reactions_user_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.messageId, table.userId, table.reaction], name: 'reactions_pkey' }), +]); + +// views +export const projectSearchInAnalytics = analytics.materializedView('project_search', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).withNoData().as( + sql`SELECT id, name, slug, description FROM core.projects p`, +); + +export const projectSearchInAnalytics2 = analytics.materializedView('project_search2', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).withNoData().existing(); + +export const vActiveUsersInCore = core.view('v_active_users').as((qb) => + qb.select({ + id: usersInCore.id, + username: usersInCore.username, + organization_id: usersInCore.organizationId, + }).from(usersInCore).where(eq(usersInCore.status, 'active')) +); +export const vActiveUsersInCore2 = core.view('v_active_users2', {}).existing(); + +// polices +export const rls = cockroachSchema('rls'); +export const documentsInRls = rls.table('documents', { + docId: uuid('doc_id').defaultRandom().primaryKey().notNull(), + ownerId: uuid('owner_id').notNull(), + title: text().notNull(), + content: text().notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + cockroachPolicy('documents_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(owner_id = (CURRENT_USER)::uuid)`, + }), + cockroachPolicy('documents_update_own', { as: 'permissive', for: 'update', to: ['public'] }), + cockroachPolicy('documents_select_own', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const messagesInRls = rls.table.withRLS('messages', { + msgId: uuid('msg_id').defaultRandom().primaryKey().notNull(), + senderId: uuid('sender_id').notNull(), + recipientId: uuid('recipient_id').notNull(), + message: text().notNull(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + cockroachPolicy('messages_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(sender_id = (CURRENT_USER)::uuid)`, + }), + cockroachPolicy('messages_visibility', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const projectsInRls = rls.table('projects', { + projectId: uuid('project_id').defaultRandom().primaryKey().notNull(), + name: text().notNull(), + description: text(), + ownerId: uuid('owner_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + cockroachPolicy('projects_visibility', { + as: 'permissive', + for: 'select', + to: ['public'], + using: sql`(owner_id = (CURRENT_USER)::uuid)`, + }), +]); + +export const projectMembersInRls = rls.table.withRLS('project_members', { + projectId: uuid('project_id').notNull(), + userId: uuid('user_id').notNull(), + role: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInRls.projectId], + name: 'project_members_project_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.projectId, table.userId], name: 'project_members_pkey' }), + cockroachPolicy('project_members_manage', { + as: 'permissive', + for: 'all', + to: ['public'], + using: sql`(user_id = CURRENT_USER::uuid)`, + }), + cockroachPolicy('project_members_visibility', { as: 'permissive', for: 'select', to: ['public'] }), + check('project_members_role_check', sql`role = ANY (ARRAY['member'::text, 'admin'::text])`), +]); + +export const policy = cockroachPolicy('new_policy', { + as: 'restrictive', + to: 'root', + withCheck: sql`1 = 1`, + for: 'all', +}).link(organizationsInCore); diff --git a/drizzle-kit/tests/cockroach/sequences.test.ts b/drizzle-kit/tests/cockroach/sequences.test.ts new file mode 100644 index 0000000000..8bc4673253 --- /dev/null +++ b/drizzle-kit/tests/cockroach/sequences.test.ts @@ -0,0 +1,410 @@ +import { cockroachSchema, cockroachSequence } from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('create sequence', async ({ db }) => { + const to = { + seq: cockroachSequence('name', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create sequence: all fields', async ({ db }) => { + const from = {}; + const to = { + seq: cockroachSequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create sequence: custom schema', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + const from = { customSchema }; + const to = { + customSchema, + seq: customSchema.sequence('name', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create sequence: custom schema + all fields', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + const from = { customSchema }; + const to = { + customSchema, + seq: customSchema.sequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop sequence', async ({ db }) => { + const from = { seq: cockroachSequence('name', { startWith: 100 }) }; + const to = {}; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SEQUENCE "public"."name";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop sequence: custom schema', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SEQUENCE "custom"."name";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename sequence', async ({ db }) => { + const from = { seq: cockroachSequence('name', { startWith: 100 }) }; + const to = { seq: cockroachSequence('name_new', { startWith: 100 }) }; + + const renames = [ + 'public.name->public.name_new', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" RENAME TO "name_new";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename sequence in custom schema', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: customSchema.sequence('name_new', { startWith: 100 }) }; + + const renames = [ + 'custom.name->custom.name_new', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "custom"."name" RENAME TO "name_new";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('move sequence between schemas #1', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + const from = { customSchema, seq: cockroachSequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + + const renames = [ + 'public.name->custom.name', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" SET SCHEMA "custom";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('move sequence between schemas #2', async ({ db }) => { + const customSchema = cockroachSchema('custom'); + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: cockroachSequence('name', { startWith: 100 }) }; + + const renames = [ + 'custom.name->public.name', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "custom"."name" SET SCHEMA "public";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter sequence', async ({ db }) => { + const from = { seq: cockroachSequence('name', { startWith: 100 }) }; + const to = { seq: cockroachSequence('name', { startWith: 105 }) }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 105 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('full sequence: no changes', async ({ db }) => { + const schema1 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('basic sequence: change fields', async ({ db }) => { + const schema1 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 100000, + minValue: 100, + + cache: 10, + increment: 4, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 100000 START WITH 100 CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('basic sequence: change name', async ({ db }) => { + const schema1 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: cockroachSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const renames = ['public.my_seq->public.my_seq2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" RENAME TO "my_seq2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('basic sequence: change name and fields', async ({ db }) => { + const schema1 = { + seq: cockroachSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: cockroachSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + + cache: 10, + increment: 4, + }), + }; + + const renames = ['public.my_seq->public.my_seq2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" RENAME TO "my_seq2";', + 'ALTER SEQUENCE "my_seq2" INCREMENT BY 4 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('Add basic sequences', async ({ db }) => { + const schema1 = { + seq: cockroachSequence('my_seq', { startWith: 100 }), + }; + + const schema2 = { + seq: cockroachSequence('my_seq', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/tables.test.ts b/drizzle-kit/tests/cockroach/tables.test.ts new file mode 100644 index 0000000000..e588af284f --- /dev/null +++ b/drizzle-kit/tests/cockroach/tables.test.ts @@ -0,0 +1,1140 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + cockroachSchema, + cockroachTable, + cockroachTableCreator, + foreignKey, + geometry, + index, + int4, + primaryKey, + text, + unique, + uniqueIndex, + vector, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('add table #1', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', {}), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #2', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #3', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + id: int4('id'), + }, (t) => [primaryKey({ name: 'users_pk', columns: [t.id] })]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n' + + '\t"id" int4,\n' + + '\tCONSTRAINT "users_pk" PRIMARY KEY("id")\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #4', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { id: int4() }), + posts: cockroachTable('posts', { id: int4() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4\n);\n', + 'CREATE TABLE "posts" (\n\t"id" int4\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #5', async ({ dbc: db }) => { + const schema = cockroachSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', { + id: int4(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "folder"."users" (\n\t"id" int4\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #6', async ({ dbc: db }) => { + const from = { + users1: cockroachTable('users1', { id: int4() }), + }; + + const to = { + users2: cockroachTable('users2', { id: int4() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users2" (\n\t"id" int4\n);\n', + 'DROP TABLE "users1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #7', async ({ dbc: db }) => { + const from = { + users1: cockroachTable('users1', { id: int4() }), + }; + + const to = { + users: cockroachTable('users', { id: int4() }), + users2: cockroachTable('users2', { id: int4() }), + }; + + const renames = ['public.users1->public.users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" int4\n);\n', + 'ALTER TABLE "users1" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #8: geometry types', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + geom: geometry('geom', { type: 'point' }).notNull(), + geom1: geometry('geom1').notNull(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"geom" geometry(point) NOT NULL,\n\t"geom1" geometry(point) NOT NULL\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline */ +test.concurrent('add table #9', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n' + + '\t"name" string,\n' + + '\tCONSTRAINT "users_name_key" UNIQUE("name")\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline named */ +test.concurrent('add table #10', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('users', { + name: text().unique('name_unique'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "name_unique" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #11', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('users', { + name: text().unique('name_unique'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "name_unique" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #12', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('users', { + name: text().unique('users_name_key'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique default-named */ +test.concurrent('add table #13', async ({ dbc: db }) => { + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('users_name_key').on(t.name)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #14', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('users_name_key').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "users_name_key" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique */ +test.concurrent('add table #15', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('users', { + name: text(), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" string,\n\tCONSTRAINT "name_unique" UNIQUE("name")\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('multiproject schema add table #1', async ({ dbc: db }) => { + const table = cockroachTableCreator((name) => `prefix_${name}`); + + const to = { + users: table('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "prefix_users" (\n\t"id" int4 PRIMARY KEY\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('multiproject schema drop table #1', async ({ dbc: db }) => { + const table = cockroachTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: int4('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + 'DROP TABLE "prefix_users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('multiproject schema alter table name #1', async ({ dbc: db }) => { + const table = cockroachTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: int4('id').primaryKey(), + }), + }; + const to = { + users1: table('users1', { + id: int4('id').primaryKey(), + }), + }; + + const renames = [ + 'public.prefix_users->public.prefix_users1', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "prefix_users" RENAME TO "prefix_users1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add table #8: column with vector', async ({ dbc: db }) => { + const to = { + users2: cockroachTable('users2', { + id: int4('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users2" (\n\t"id" int4 PRIMARY KEY,\n\t"name" vector(3)\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add schema + table #1', async ({ dbc: db }) => { + const schema = cockroachSchema('folder'); + + const to = { + schema, + users: schema.table('users', { + id: int4(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "folder";\n', + 'CREATE TABLE "folder"."users" (\n\t"id" int4\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change schema with tables #1', async ({ dbc: db }) => { + const schema = cockroachSchema('folder'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const renames = ['folder->folder2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "folder" RENAME TO "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #1', async ({ dbc: db }) => { + const schema = cockroachSchema('folder'); + const from = { + schema, + users: cockroachTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const renames = [ + 'public.users->folder.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "users" SET SCHEMA "folder";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #2', async ({ dbc: db }) => { + const schema = cockroachSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: cockroachTable('users', {}), + }; + + const renames = [ + 'folder.users->public.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "folder"."users" SET SCHEMA "public";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #3', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #4', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE SCHEMA "folder2";\n', + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #5', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE SCHEMA "folder2";\n', + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + 'DROP SCHEMA "folder1";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #5', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const renames = [ + 'folder1.users->folder2.users2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + 'ALTER TABLE "folder1"."users" RENAME TO "users2";', + 'ALTER TABLE "folder1"."users2" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('change table schema #6', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const renames = [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "folder1" RENAME TO "folder2";\n', + 'ALTER TABLE "folder2"."users" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop table + rename schema #1', async ({ dbc: db }) => { + const schema1 = cockroachSchema('folder1'); + const schema2 = cockroachSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + // drop table + }; + + const renames = ['folder1->folder2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + 'ALTER SCHEMA "folder1" RENAME TO "folder2";\n', + 'DROP TABLE "folder2"."users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop tables with fk constraint', async ({ dbc: db }) => { + const table1 = cockroachTable('table1', { + column1: int4().primaryKey(), + }); + const table2 = cockroachTable('table2', { + column1: int4().primaryKey(), + column2: int4().references(() => table1.column1), + }); + const schema1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "table1" (\n\t"column1" int4 PRIMARY KEY\n);\n', + 'CREATE TABLE "table2" (\n\t"column1" int4 PRIMARY KEY,\n\t"column2" int4\n);\n', + 'ALTER TABLE "table2" ADD CONSTRAINT "table2_column2_table1_column1_fkey" FOREIGN KEY ("column2") REFERENCES "table1"("column1");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, {}, []); + const { sqlStatements: pst2 } = await push({ db, to: {} }); + + const expectedSt2 = [ + 'ALTER TABLE "table2" DROP CONSTRAINT "table2_column2_table1_column1_fkey";', + 'DROP TABLE "table1";', + 'DROP TABLE "table2";', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test.concurrent('create table with tsvector', async ({ dbc: db }) => { + const from = {}; + const to = { + users: cockroachTable('posts', { + id: int4('id').primaryKey(), + title: text('title').notNull(), + description: text('description').notNull(), + }, (table) => [ + index('title_search_index').using('gin', sql`to_tsvector('english', ${table.title})`), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "posts" (\n\t"id" int4 PRIMARY KEY,\n\t"title" string NOT NULL,\n\t"description" string NOT NULL\n);\n', + `CREATE INDEX "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('composite primary key', async ({ dbc: db }) => { + const from = {}; + const to = { + table: cockroachTable('works_to_creators', { + workId: int4('work_id').notNull(), + creatorId: int4('creator_id').notNull(), + classification: text('classification').notNull(), + }, (t) => [ + primaryKey({ columns: [t.workId, t.creatorId, t.classification] }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "works_to_creators" (\n\t"work_id" int4,\n\t"creator_id" int4,\n\t"classification" string,\n\tCONSTRAINT "works_to_creators_pkey" PRIMARY KEY("work_id","creator_id","classification")\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('add column before creating unique constraint', async ({ dbc: db }) => { + const from = { + table: cockroachTable('table', { + id: int4('id').primaryKey(), + }), + }; + const to = { + table: cockroachTable('table', { + id: int4('id').primaryKey(), + name: text('name').notNull(), + }, (t) => [unique('uq').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "name" string NOT NULL;', + 'CREATE UNIQUE INDEX "uq" ON "table" ("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter composite primary key', async ({ dbc: db }) => { + const from = { + table: cockroachTable('table', { + col1: int4('col1').notNull(), + col2: int4('col2').notNull(), + col3: text('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col1, t.col2], + }), + ]), + }; + const to = { + table: cockroachTable('table', { + col1: int4('col1').notNull(), + col2: int4('col2').notNull(), + col3: text('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col2, t.col3], + }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" DROP CONSTRAINT "table_pk", ADD CONSTRAINT "table_pk" PRIMARY KEY("col2","col3");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('optional db aliases (snake case)', async ({ dbc: db }) => { + const from = {}; + + const t1 = cockroachTable( + 't1', + { + t1Id1: int4().notNull().primaryKey(), + t1Col2: int4().notNull(), + t1Col3: int4().notNull(), + t2Ref: int4().notNull().references(() => t2.t2Id), + t1Uni: int4().notNull(), + t1UniIdx: int4().notNull(), + t1Idx: int4().notNull(), + }, + (table) => [ + unique('t1_uni').on(table.t1Uni), + uniqueIndex('t1_uni_idx').on(table.t1UniIdx), + index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`), + foreignKey({ + columns: [table.t1Col2, table.t1Col3], + foreignColumns: [t3.t3Id1, t3.t3Id2], + }), + ], + ); + + const t2 = cockroachTable( + 't2', + { + t2Id: int4().primaryKey(), + }, + ); + + const t3 = cockroachTable( + 't3', + { + t3Id1: int4(), + t3Id2: int4(), + }, + (table) => [primaryKey({ columns: [table.t3Id1, table.t3Id2] })], + ); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'snake_case'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + const { sqlStatements: pst } = await push({ + db, + to, + casing, + }); + + const st1 = `CREATE TABLE "t1" ( + "t1_id1" int4 PRIMARY KEY, + "t1_col2" int4 NOT NULL, + "t1_col3" int4 NOT NULL, + "t2_ref" int4 NOT NULL, + "t1_uni" int4 NOT NULL, + "t1_uni_idx" int4 NOT NULL, + "t1_idx" int4 NOT NULL, + CONSTRAINT "t1_uni" UNIQUE("t1_uni"), + CONSTRAINT "t1_uni_idx" UNIQUE("t1_uni_idx") +); +`; + + const st2 = `CREATE TABLE "t2" ( + "t2_id" int4 PRIMARY KEY +); +`; + + const st3 = `CREATE TABLE "t3" ( + "t3_id1" int4, + "t3_id2" int4, + CONSTRAINT "t3_pkey" PRIMARY KEY("t3_id1","t3_id2") +); +`; + + const st4 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2_ref_t2_t2_id_fkey" FOREIGN KEY ("t2_ref") REFERENCES "t2"("t2_id");`; + const st5 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fkey" FOREIGN KEY ("t1_col2","t1_col3") REFERENCES "t3"("t3_id1","t3_id2");`; + + const st6 = `CREATE INDEX "t1_idx" ON "t1" ("t1_idx") WHERE "t1"."t1_idx" > 0;`; + + const st0 = [st1, st2, st3, st4, st5, st6]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('optional db aliases (camel case)', async ({ dbc: db }) => { + const from = {}; + + const t1 = cockroachTable('t1', { + t1_id1: int4().notNull().primaryKey(), + t1_col2: int4().notNull(), + t1_col3: int4().notNull(), + t2_ref: int4().notNull().references(() => t2.t2_id), + t1_uni: int4().notNull(), + t1_uni_idx: int4().notNull(), + t1_idx: int4().notNull(), + }, (table) => [ + unique('t1Uni').on(table.t1_uni), + uniqueIndex('t1UniIdx').on(table.t1_uni_idx), + index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`), + foreignKey({ + columns: [table.t1_col2, table.t1_col3], + foreignColumns: [t3.t3_id1, t3.t3_id2], + }), + ]); + + const t2 = cockroachTable('t2', { + t2_id: int4().primaryKey(), + }); + + const t3 = cockroachTable('t3', { + t3_id1: int4(), + t3_id2: int4(), + }, (table) => [primaryKey({ columns: [table.t3_id1, table.t3_id2] })]); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'camelCase'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + const { sqlStatements: pst } = await push({ + db, + to, + casing, + }); + + const st1 = `CREATE TABLE "t1" ( + "t1Id1" int4 PRIMARY KEY, + "t1Col2" int4 NOT NULL, + "t1Col3" int4 NOT NULL, + "t2Ref" int4 NOT NULL, + "t1Uni" int4 NOT NULL, + "t1UniIdx" int4 NOT NULL, + "t1Idx" int4 NOT NULL, + CONSTRAINT "t1Uni" UNIQUE("t1Uni"), + CONSTRAINT "t1UniIdx" UNIQUE("t1UniIdx") +); +`; + + const st2 = `CREATE TABLE "t2" ( + "t2Id" int4 PRIMARY KEY +); +`; + + const st3 = `CREATE TABLE "t3" ( + "t3Id1" int4, + "t3Id2" int4, + CONSTRAINT "t3_pkey" PRIMARY KEY("t3Id1","t3Id2") +); +`; + + const st4 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2Ref_t2_t2Id_fkey" FOREIGN KEY ("t2Ref") REFERENCES "t2"("t2Id");`; + const st5 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fkey" FOREIGN KEY ("t1Col2","t1Col3") REFERENCES "t3"("t3Id1","t3Id2");`; + + const st6 = `CREATE INDEX "t1Idx" ON "t1" ("t1Idx") WHERE "t1"."t1Idx" > 0;`; + + const st0 = [st1, st2, st3, st4, st5, st6]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table with generated column', async ({ dbc: db }) => { + const schema1 = {}; + const schema2 = { + users: cockroachTable('users', { + id: int4('id'), + id2: int4('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE "users" (\n\t"id" int4,\n\t"id2" int4,\n\t"name" string,\n\t"gen_name" string GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename table with composite primary key', async ({ dbc: db }) => { + const schema1 = { + table: cockroachTable('table1', { + productId: text('product_id').notNull(), + categoryId: text('category_id').notNull(), + }, (t) => [primaryKey({ columns: [t.productId, t.categoryId] })]), + }; + const schema2 = { + test: cockroachTable('table2', { + productId: text('product_id').notNull(), + categoryId: text('category_id').notNull(), + }, (t) => [primaryKey({ columns: [t.productId, t.categoryId] })]), + }; + + const renames = ['public.table1->public.table2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, losses } = await push({ db, to: schema2, renames }); + + const st0: string[] = ['ALTER TABLE "table1" RENAME TO "table2";']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/cockroach/views.test.ts b/drizzle-kit/tests/cockroach/views.test.ts new file mode 100644 index 0000000000..3ac2b70ce6 --- /dev/null +++ b/drizzle-kit/tests/cockroach/views.test.ts @@ -0,0 +1,1067 @@ +import { eq, sql } from 'drizzle-orm'; +import { + cockroachMaterializedView, + cockroachSchema, + cockroachTable, + cockroachView, + int4, +} from 'drizzle-orm/cockroach-core'; +import { expect } from 'vitest'; +import { diff, push, test } from './mocks'; + +test.concurrent('create view', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: cockroachView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'CREATE VIEW "view" AS (select distinct "id" from "test");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and view #1', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: cockroachView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view" AS (select "id" from "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and view #2', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view" AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and view #5', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + view2: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + // view_name_duplicate + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to })).rejects.toThrow(); +}); + +test.concurrent('create view with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create materialized view', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: cockroachMaterializedView('view') + .withNoData() + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + log: 'statements', + }); + + const st0: string[] = [ + 'CREATE MATERIALIZED VIEW "view" AS (select distinct "id" from "test") WITH NO DATA;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and materialized view #1', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: cockroachMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view" AS (select "id" from "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and materialized view #2', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view" AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and materialized view #3', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: cockroachMaterializedView('some_view1', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + view2: cockroachMaterializedView('some_view2') + .withNoData().as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" int4 PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view1" AS (SELECT * FROM "users");`, + `CREATE MATERIALIZED VIEW "some_view2" AS (select "id" from "users") WITH NO DATA;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('create table and materialized view #4', async ({ db }) => { + // same names + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + view2: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + // view_name_duplicate + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to })).rejects.toThrow(); +}); + +test.concurrent('create materialized view with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop view #1', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP VIEW "some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop view #2', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'DROP VIEW "view";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop view with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop view with data', async ({ db }) => { + const table = cockroachTable('table', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const seedStatements = [`INSERT INTO "table" ("id") VALUES (1), (2), (3)`]; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ + db, + to: schema2, + }); + + // seeding + for (const seedSt of seedStatements) { + await db.query(seedSt); + } + + const st0: string[] = [ + `DROP VIEW "view";`, + ]; + const hints0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual(hints0); +}); + +test.concurrent('drop materialized view #1', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP MATERIALIZED VIEW "some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop materialized view #2', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'DROP MATERIALIZED VIEW "view";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop materialized view with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop materialized view with data', async ({ db }) => { + const table = cockroachTable('table', { + id: int4('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: cockroachMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.query(`INSERT INTO "table" ("id") VALUES (1), (2), (3)`); + + const { sqlStatements: pst, hints, losses } = await push({ db, to: schema2 }); + + const st0: string[] = [ + `DROP MATERIALIZED VIEW "view";`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(hints).toStrictEqual([]); + expect(losses).toStrictEqual([]); +}); + +test.concurrent('drop materialized view without data', async ({ db }) => { + const table = cockroachTable('table', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + `DROP MATERIALIZED VIEW "view";`, + ]; + const hints0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual(hints0); +}); + +test.concurrent('rename view #1', async ({ db }) => { + const from = { + users: cockroachTable('users', { id: int4() }), + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + users: cockroachTable('users', { id: int4() }), + view: cockroachView('new_some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER VIEW "some_view" RENAME TO "new_some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename view with existing flag', async ({ db }) => { + const from = { + view: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + view: cockroachView('new_some_view', { id: int4('id') }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename materialized view #1', async ({ db }) => { + const from = { + users: cockroachTable('users', { id: int4() }), + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + users: cockroachTable('users', { id: int4() }), + view: cockroachMaterializedView('new_some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER MATERIALIZED VIEW "some_view" RENAME TO "new_some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('rename materialized view with existing flag', async ({ db }) => { + const from = { + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + view: cockroachMaterializedView('new_some_view', { id: int4('id') }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('view alter schema', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const from = { + users: cockroachTable('users', { id: int4() }), + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: cockroachTable('users', { id: int4() }), + view: schema.view('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + `ALTER VIEW "some_view" SET SCHEMA "new_schema";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('view alter schema with existing flag', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const from = { + view: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + schema, + view: schema.view('some_view', { id: int4('id') }).existing(), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('view alter schema for materialized', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const from = { + users: cockroachTable('users', { id: int4() }), + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: cockroachTable('users', { id: int4() }), + view: schema.materializedView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + `ALTER MATERIALIZED VIEW "some_view" SET SCHEMA "new_schema";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('view alter schema for materialized with existing flag', async ({ db }) => { + const schema = cockroachSchema('new_schema'); + + const from = { + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + schema, + view: schema.materializedView('some_view', { id: int4('id') }).existing(), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter view ".as" value', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachView('some_view', { id: int4('id') }).as(sql`select * from users where id > 100`), + }; + + const to = { + users, + view: cockroachView('some_view', { id: int4('id') }).as(sql`select * from users where id > 101`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + 'DROP VIEW "some_view";', + `CREATE VIEW "some_view" AS (select * from users where id > 101);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignored definition change +}); + +test.concurrent('alter view ".as" value with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + users, + view: cockroachView('some_view', { id: int4('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('alter materialized view ".as" value', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT '123'`), + }; + + const to = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT '1234'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + 'DROP MATERIALIZED VIEW "some_view";', + `CREATE MATERIALIZED VIEW "some_view" AS (SELECT '1234');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore definition changes for push +}); + +test.concurrent('alter materialized view ".as" value with existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop existing flag', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).existing(), + }; + + const to = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `CREATE MATERIALIZED VIEW "some_view" AS (SELECT 'asd');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('set existing - materialized', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('some_view', { id: int4('id') }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: cockroachMaterializedView('new_some_view', { id: int4('id') }).withNoData().existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = ['DROP MATERIALIZED VIEW "some_view";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('drop existing - materialized', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachMaterializedView('view', { id: int4('id') }).existing(), + }; + + const to = { + users, + view: cockroachMaterializedView('view', { id: int4('id') }).withNoData().as( + sql`SELECT * FROM users WHERE id > 100`, + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE MATERIALIZED VIEW "view" AS (SELECT * FROM users WHERE id > 100) WITH NO DATA;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('set existing', async ({ db }) => { + const users = cockroachTable('users', { + id: int4('id').primaryKey().notNull(), + }); + + const from = { + users, + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * from users where id > 100`), + }; + + const to = { + users, + view: cockroachView('new_some_view', { id: int4('id') }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['DROP VIEW "some_view";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('moved schema', async ({ db }) => { + const schema = cockroachSchema('my_schema'); + const from = { + schema, + users: cockroachTable('users', { id: int4() }), + view: cockroachView('some_view', { id: int4('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: cockroachTable('users', { id: int4() }), + view: schema.view('some_view', { id: int4('id') }).as( + sql`SELECT * FROM "users"`, + ), + }; + + const renames = ['public.some_view->my_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [ + `ALTER VIEW "some_view" SET SCHEMA "my_schema";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test.concurrent('push view with same name', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: cockroachView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP VIEW "view";', + 'CREATE VIEW "view" AS (select distinct "id" from "test" where "test"."id" = 1);', + ]); + expect(pst).toStrictEqual([]); +}); + +test.concurrent('push materialized view with same name', async ({ db }) => { + const table = cockroachTable('test', { + id: int4('id').primaryKey(), + }); + const schema1 = { + test: table, + view: cockroachMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: cockroachMaterializedView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP MATERIALIZED VIEW "view";', + 'CREATE MATERIALIZED VIEW "view" AS (select distinct "id" from "test" where "test"."id" = 1);', + ]); + expect(pst).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/common.ts b/drizzle-kit/tests/common.ts deleted file mode 100644 index 631614218b..0000000000 --- a/drizzle-kit/tests/common.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { test } from 'vitest'; - -export interface DialectSuite { - /** - * 1 statement | create column: - * - * id int primary key autoincrement - */ - columns1(): Promise; -} - -export const run = (suite: DialectSuite) => { - test('add columns #1', suite.columns1); -}; -// test("add columns #1", suite.columns1) diff --git a/drizzle-kit/tests/gel/gel.ext.test.ts b/drizzle-kit/tests/gel/gel.ext.test.ts new file mode 100644 index 0000000000..9fe70cd232 --- /dev/null +++ b/drizzle-kit/tests/gel/gel.ext.test.ts @@ -0,0 +1,55 @@ +import fs from 'fs'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import 'zx/globals'; +import { DB } from 'src/utils'; +import { prepareTestDatabase, pull, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +fs.mkdirSync('tests/gel/tmp', { recursive: true }); + +$.quiet = true; + +const ENABLE_LOGGING = false; +const tlsSecurity = 'insecure'; + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(ENABLE_LOGGING, tlsSecurity); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('basic introspect test', async () => { + await $`pnpm gel query 'CREATE EXTENSION pgcrypto VERSION "1.3"; + CREATE EXTENSION auth VERSION "1.0"; + CREATE TYPE default::User { + CREATE REQUIRED LINK identity: ext::auth::Identity; + CREATE REQUIRED PROPERTY email: std::str; + CREATE REQUIRED PROPERTY username: std::str; + }; + CREATE GLOBAL default::current_user := (std::assert_single((SELECT + default::User { + id, + username, + email + } + FILTER + (.identity = GLOBAL ext::auth::ClientTokenIdentity) + )));' --tls-security=${tlsSecurity} --dsn=${_.url}`; + + const path = await pull(db, 'basic-ext-introspect', ['ext::auth', 'public']); + + const result = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(true); + expect(result.exitCode).toBe(0); + fs.rmSync(path); +}); diff --git a/drizzle-kit/tests/introspect/gel.test.ts b/drizzle-kit/tests/gel/gel.test.ts similarity index 71% rename from drizzle-kit/tests/introspect/gel.test.ts rename to drizzle-kit/tests/gel/gel.test.ts index 9c9d95fc56..85c92c9dda 100644 --- a/drizzle-kit/tests/introspect/gel.test.ts +++ b/drizzle-kit/tests/gel/gel.test.ts @@ -1,86 +1,33 @@ -import Docker from 'dockerode'; -import { drizzle, GelJsDatabase } from 'drizzle-orm/gel'; import fs from 'fs'; -import createClient, { type Client } from 'gel'; -import getPort from 'get-port'; -import { introspectGelToFile } from 'tests/schemaDiffer'; -import { v4 as uuidV4 } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import 'zx/globals'; +import { DB } from 'src/utils'; +import { prepareTestDatabase, pull, TestDatabase } from './mocks'; -if (!fs.existsSync('tests/introspect/gel')) { - fs.mkdirSync('tests/introspect/gel'); -} - -$.quiet = true; +// @vitest-environment-options {"max-concurrency":1} const ENABLE_LOGGING = false; +const tlsSecurity = 'insecure'; -let client: Client; -let db: GelJsDatabase; -const tlsSecurity: string = 'insecure'; -let dsn: string; -let container: Docker.Container | undefined; - -async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { - const docker = new Docker(); - const port = await getPort({ port: 5656 }); - const image = 'geldata/gel:6.0'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - const gelContainer = await docker.createContainer({ - Image: image, - Env: [ - 'GEL_CLIENT_SECURITY=insecure_dev_mode', - 'GEL_SERVER_SECURITY=insecure_dev_mode', - 'GEL_CLIENT_TLS_SECURITY=no_host_verification', - 'GEL_SERVER_PASSWORD=password', - ], - name: `drizzle-integration-tests-${uuidV4()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5656/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await gelContainer.start(); - - return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; -} - -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} +let _: TestDatabase; +let db: DB; beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - - await sleep(15 * 1000); - client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - - db = drizzle(client, { logger: ENABLE_LOGGING }); - - dsn = connectionString; + _ = await prepareTestDatabase(ENABLE_LOGGING, tlsSecurity); + db = _.db; }); afterAll(async () => { - await client?.close().catch(console.error); - await container?.stop().catch(console.error); + await _.close(); }); +beforeEach(async () => { + await _.clear(); +}); + +fs.mkdirSync('tests/gel/tmp', { recursive: true }); +$.quiet = true; + test('basic introspect test', async () => { await $`pnpm gel query 'CREATE TYPE default::all_columns { @@ -216,12 +163,9 @@ test('basic introspect test', async () => { create property defaultbytesColumn: bytes { SET DEFAULT := b"Hello, world"; }; - }' --tls-security=${tlsSecurity} --dsn=${dsn}`; + }' --tls-security=${tlsSecurity} --dsn=${_.url}`; - const path = await introspectGelToFile( - client, - 'basic-introspect', - ); + const path = await pull(db, 'basic-introspect'); const result = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(true); expect(result.exitCode).toBe(0); diff --git a/drizzle-kit/tests/gel/mocks.ts b/drizzle-kit/tests/gel/mocks.ts new file mode 100644 index 0000000000..fb4553a97d --- /dev/null +++ b/drizzle-kit/tests/gel/mocks.ts @@ -0,0 +1,140 @@ +import Docker from 'dockerode'; +import { drizzle, GelJsDatabase } from 'drizzle-orm/gel'; +import createClient from 'gel'; +import getPort from 'get-port'; +import { EntitiesFilter, EntitiesFilterConfig } from 'src/cli/validations/cli'; +import { CasingType } from 'src/cli/validations/common'; +import { interimToDDL } from 'src/dialects/postgres/ddl'; +import { isSystemNamespace, isSystemRole } from 'src/dialects/postgres/grammar'; +import { fromDatabase } from 'src/dialects/postgres/introspect'; +import { ddlToTypeScript } from 'src/dialects/postgres/typescript'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { DB } from 'src/utils'; +import { tsc } from 'tests/utils'; +import { v4 as uuid } from 'uuid'; + +export type TestDatabase = { + url: string; + db: DB; + drizzle: GelJsDatabase; + close: () => Promise; + clear: () => Promise; +}; + +export const prepareTestDatabase = async ( + logging: boolean, + tlsSecurity: 'insecure' | 'no_host_verification' | 'strict' | 'default', +): Promise => { + const envUrl = process.env.GEL_CONNECTION_STRING; + const { url, container } = envUrl ? { url: envUrl, container: null } : await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + do { + try { + await new Promise((resolve) => setTimeout(resolve, 15 * 1000)); + const client = createClient({ dsn: url, tlsSecurity }); + + const drizzleDB = drizzle({ client, logger: logging }); + + const db = { + query: async (sql: string, params?: any[]) => { + const [res] = await client.query(sql); + return res as T[]; + }, + }; + const close = async () => { + await client?.close().catch(console.error); + await container?.stop().catch(console.error); + }; + const clear = async () => { + const namespaces = await db.query<{ name: string }>('select oid, nspname as name from pg_namespace').then(( + res, + ) => res.filter((r) => !isSystemNamespace(r.name))); + + const roles = await client.query<{ rolname: string }>( + `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, + ).then((it) => it.filter((it) => !isSystemRole(it.rolname))); + + for (const namespace of namespaces) { + await client.query(`DROP SCHEMA "${namespace.name}" cascade`); + } + + await client.query('CREATE SCHEMA public;'); + + for (const role of roles) { + await client.query(`DROP ROLE "${role.rolname}"`); + } + }; + return { url, db, drizzle: drizzleDB, close, clear }; + } catch (e) { + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + + throw new Error(); +}; + +export const pull = async ( + db: DB, + testName: string, + schemas: string[] = [], + entities?: EntitiesFilter, + casing?: CasingType | undefined, +) => { + const filterConfig: EntitiesFilterConfig = { + entities, + schemas, + tables: [], + extensions: [], + }; + // introspect to schema + const filter = prepareEntityFilter('gel', filterConfig, []); + const interim = await fromDatabase(db, filter); + const { ddl } = interimToDDL(interim); + // write to ts file + const file = ddlToTypeScript(ddl, interim.viewColumns, 'camel', 'gel'); + + const path = `tests/gel/tmp/${testName}.ts`; + fs.writeFileSync(path, file.file); + await tsc(file.file); + + const typeCheckResult = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(); + if (typeCheckResult.exitCode !== 0) { + throw new Error(typeCheckResult.stderr || typeCheckResult.stdout); + } + + return path; +}; + +async function createDockerDB(): Promise<{ url: string; container: Docker.Container }> { + const docker = new Docker(); + const port = await getPort({ port: 5656 }); + const image = 'geldata/gel:6.0'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + const gelContainer = await docker.createContainer({ + Image: image, + Env: [ + 'GEL_CLIENT_SECURITY=insecure_dev_mode', + 'GEL_SERVER_SECURITY=insecure_dev_mode', + 'GEL_CLIENT_TLS_SECURITY=no_host_verification', + 'GEL_SERVER_PASSWORD=password', + ], + name: `drizzle-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '5656/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await gelContainer.start(); + return { url: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; +} diff --git a/drizzle-kit/tests/indexes/common.ts b/drizzle-kit/tests/indexes/common.ts deleted file mode 100644 index 5bdc244465..0000000000 --- a/drizzle-kit/tests/indexes/common.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { afterAll, beforeAll, test } from 'vitest'; - -export interface DialectSuite { - simpleIndex(context?: any): Promise; - vectorIndex(context?: any): Promise; - indexesToBeTriggered(context?: any): Promise; -} - -export const run = ( - suite: DialectSuite, - beforeAllFn?: (context: any) => Promise, - afterAllFn?: (context: any) => Promise, -) => { - let context: any = {}; - beforeAll(beforeAllFn ? () => beforeAllFn(context) : () => {}); - test('index #1: simple index', () => suite.simpleIndex(context)); - test('index #2: vector index', () => suite.vectorIndex(context)); - test('index #3: fields that should be triggered on generate and not triggered on push', () => - suite.indexesToBeTriggered(context)); - afterAll(afterAllFn ? () => afterAllFn(context) : () => {}); -}; diff --git a/drizzle-kit/tests/indexes/pg.test.ts b/drizzle-kit/tests/indexes/pg.test.ts deleted file mode 100644 index 57f77c103a..0000000000 --- a/drizzle-kit/tests/indexes/pg.test.ts +++ /dev/null @@ -1,245 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { index, pgTable, serial, text, vector } from 'drizzle-orm/pg-core'; -import { JsonCreateIndexStatement } from 'src/jsonStatements'; -import { PgSquasher } from 'src/serializer/pgSchema'; -import { diffTestSchemas } from 'tests/schemaDiffer'; -import { expect } from 'vitest'; -import { DialectSuite, run } from './common'; - -const pgSuite: DialectSuite = { - async vectorIndex() { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: vector('name', { dimensions: 3 }), - }), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - embedding: vector('name', { dimensions: 3 }), - }, - (t) => ({ - indx2: index('vector_embedding_idx') - .using('hnsw', t.embedding.op('vector_ip_ops')) - .with({ m: 16, ef_construction: 64 }), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'create_index_pg', - data: { - columns: [ - { - asc: true, - expression: 'name', - isExpression: false, - nulls: 'last', - opclass: 'vector_ip_ops', - }, - ], - concurrently: false, - isUnique: false, - method: 'hnsw', - name: 'vector_embedding_idx', - where: undefined, - with: { - ef_construction: 64, - m: 16, - }, - }, - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `CREATE INDEX "vector_embedding_idx" ON "users" USING hnsw ("name" vector_ip_ops) WITH (m=16,ef_construction=64);`, - ); - }, - - async indexesToBeTriggered() { - const schema1 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index('indx').on(t.name.desc()).concurrently(), - indx1: index('indx1') - .on(t.name.desc()) - .where(sql`true`), - indx2: index('indx2') - .on(t.name.op('text_ops')) - .where(sql`true`), - indx3: index('indx3') - .on(sql`lower(name)`) - .where(sql`true`), - }), - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index('indx').on(t.name.desc()), - indx1: index('indx1') - .on(t.name.desc()) - .where(sql`false`), - indx2: index('indx2') - .on(t.name.op('test')) - .where(sql`true`), - indx3: index('indx3') - .on(sql`lower(${t.id})`) - .where(sql`true`), - indx4: index('indx4') - .on(sql`lower(id)`) - .where(sql`true`), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas( - schema1, - schema2, - [], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP INDEX "indx";', - 'DROP INDEX "indx1";', - 'DROP INDEX "indx2";', - 'DROP INDEX "indx3";', - 'CREATE INDEX "indx4" ON "users" USING btree (lower(id)) WHERE true;', - 'CREATE INDEX "indx" ON "users" USING btree ("name" DESC NULLS LAST);', - 'CREATE INDEX "indx1" ON "users" USING btree ("name" DESC NULLS LAST) WHERE false;', - 'CREATE INDEX "indx2" ON "users" USING btree ("name" test) WHERE true;', - 'CREATE INDEX "indx3" ON "users" USING btree (lower("id")) WHERE true;', - ]); - }, - - async simpleIndex() { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index() - .on(t.name.desc(), t.id.asc().nullsLast()) - .with({ fillfactor: 70 }) - .where(sql`select 1`), - indx1: index('indx1') - .using('hash', t.name.desc(), sql`${t.name}`) - .with({ fillfactor: 70 }), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'create_index_pg', - data: { - columns: [ - { - asc: false, - expression: 'name', - isExpression: false, - nulls: 'last', - opclass: '', - }, - { - asc: true, - expression: 'id', - isExpression: false, - nulls: 'last', - opclass: '', - }, - ], - concurrently: false, - isUnique: false, - method: 'btree', - name: 'users_name_id_index', - where: 'select 1', - with: { - fillfactor: 70, - }, - }, - // data: 'users_name_id_index;name,false,last,undefined,,id,true,last,undefined;false;false;btree;select 1;{"fillfactor":70}', - }); - expect(statements[1]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'create_index_pg', - data: { - columns: [ - { - asc: false, - expression: 'name', - isExpression: false, - nulls: 'last', - opclass: '', - }, - { - asc: true, - expression: '"name"', - isExpression: true, - nulls: 'last', - opclass: '', - }, - ], - concurrently: false, - isUnique: false, - method: 'hash', - name: 'indx1', - where: undefined, - with: { - fillfactor: 70, - }, - }, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `CREATE INDEX "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE INDEX "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, - ); - }, -}; - -run(pgSuite); diff --git a/drizzle-kit/tests/introspect/gel.ext.test.ts b/drizzle-kit/tests/introspect/gel.ext.test.ts deleted file mode 100644 index e17055d65c..0000000000 --- a/drizzle-kit/tests/introspect/gel.ext.test.ts +++ /dev/null @@ -1,111 +0,0 @@ -import Docker from 'dockerode'; -import { drizzle, GelJsDatabase } from 'drizzle-orm/gel'; -import fs from 'fs'; -import createClient, { type Client } from 'gel'; -import getPort from 'get-port'; -import { introspectGelToFile } from 'tests/schemaDiffer'; -import { v4 as uuidV4 } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; -import 'zx/globals'; - -if (!fs.existsSync('tests/introspect/gel')) { - fs.mkdirSync('tests/introspect/gel'); -} - -$.quiet = true; - -const ENABLE_LOGGING = false; - -let client: Client; -let db: GelJsDatabase; -const tlsSecurity: string = 'insecure'; -let dsn: string; -let container: Docker.Container | undefined; - -async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { - const docker = new Docker(); - const port = await getPort({ port: 5656 }); - const image = 'geldata/gel:6'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - const gelContainer = await docker.createContainer({ - Image: image, - Env: [ - 'GEL_CLIENT_SECURITY=insecure_dev_mode', - 'GEL_SERVER_SECURITY=insecure_dev_mode', - 'GEL_CLIENT_TLS_SECURITY=no_host_verification', - 'GEL_SERVER_PASSWORD=password', - ], - name: `drizzle-integration-tests-${uuidV4()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5656/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await gelContainer.start(); - - return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; -} - -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} - -beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - - await sleep(15 * 1000); - client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - - db = drizzle(client, { logger: ENABLE_LOGGING }); - - dsn = connectionString; -}); - -afterAll(async () => { - await client?.close().catch(console.error); - await container?.stop().catch(console.error); -}); - -test('basic introspect test', async () => { - await $`pnpm gel query 'CREATE EXTENSION pgcrypto VERSION "1.3"; - CREATE EXTENSION auth VERSION "1.0"; - CREATE TYPE default::User { - CREATE REQUIRED LINK identity: ext::auth::Identity; - CREATE REQUIRED PROPERTY email: std::str; - CREATE REQUIRED PROPERTY username: std::str; - }; - CREATE GLOBAL default::current_user := (std::assert_single((SELECT - default::User { - id, - username, - email - } - FILTER - (.identity = GLOBAL ext::auth::ClientTokenIdentity) - )));' --tls-security=${tlsSecurity} --dsn=${dsn}`; - - const path = await introspectGelToFile( - client, - 'basic-ext-introspect', - ['ext::auth', 'public'], - ); - - const result = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(true); - expect(result.exitCode).toBe(0); - fs.rmSync(path); -}); diff --git a/drizzle-kit/tests/introspect/libsql.test.ts b/drizzle-kit/tests/introspect/libsql.test.ts deleted file mode 100644 index 9211989cae..0000000000 --- a/drizzle-kit/tests/introspect/libsql.test.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { createClient } from '@libsql/client'; -import { sql } from 'drizzle-orm'; -import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; -import fs from 'fs'; -import { introspectLibSQLToFile, introspectMySQLToFile, introspectSQLiteToFile } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -if (!fs.existsSync('tests/introspect/libsql')) { - fs.mkdirSync('tests/introspect/libsql'); -} - -test('view #1', async () => { - const turso = createClient({ - url: ':memory:', - }); - - const users = sqliteTable('users', { id: int('id') }); - const testView = sqliteView('some_view', { id: int('id') }).as( - sql`SELECT * FROM ${users}`, - ); - - const schema = { - users: users, - testView, - }; - - const { statements, sqlStatements } = await introspectLibSQLToFile( - turso, - schema, - 'view-1', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); diff --git a/drizzle-kit/tests/introspect/mysql.test.ts b/drizzle-kit/tests/introspect/mysql.test.ts deleted file mode 100644 index 2db33416ba..0000000000 --- a/drizzle-kit/tests/introspect/mysql.test.ts +++ /dev/null @@ -1,319 +0,0 @@ -import 'dotenv/config'; -import Docker from 'dockerode'; -import { SQL, sql } from 'drizzle-orm'; -import { - bigint, - char, - check, - decimal, - double, - float, - int, - mediumint, - mysqlEnum, - mysqlTable, - mysqlView, - serial, - smallint, - text, - tinyint, - varchar, -} from 'drizzle-orm/mysql-core'; -import * as fs from 'fs'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { introspectMySQLToFile } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; - -let client: Connection; -let mysqlContainer: Docker.Container; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); -}); - -beforeEach(async () => { - await client.query(`drop database if exists \`drizzle\`;`); - await client.query(`create database \`drizzle\`;`); - await client.query(`use \`drizzle\`;`); -}); - -if (!fs.existsSync('tests/introspect/mysql')) { - fs.mkdirSync('tests/introspect/mysql'); -} - -test('generated always column: link to another column', async () => { - const schema = { - users: mysqlTable('users', { - id: int('id'), - email: text('email'), - generatedEmail: text('generatedEmail').generatedAlwaysAs( - (): SQL => sql`\`email\``, - ), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'generated-link-column', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('generated always column virtual: link to another column', async () => { - const schema = { - users: mysqlTable('users', { - id: int('id'), - email: text('email'), - generatedEmail: text('generatedEmail').generatedAlwaysAs( - (): SQL => sql`\`email\``, - { mode: 'virtual' }, - ), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'generated-link-column-virtual', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('Default value of character type column: char', async () => { - const schema = { - users: mysqlTable('users', { - id: int('id'), - sortKey: char('sortKey', { length: 255 }).default('0'), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'default-value-char-column', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('Default value of character type column: varchar', async () => { - const schema = { - users: mysqlTable('users', { - id: int('id'), - sortKey: varchar('sortKey', { length: 255 }).default('0'), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'default-value-varchar-column', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('introspect checks', async () => { - const schema = { - users: mysqlTable('users', { - id: serial('id'), - name: varchar('name', { length: 255 }), - age: int('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'introspect-checks', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('view #1', async () => { - const users = mysqlTable('users', { id: int('id') }); - const testView = mysqlView('some_view', { id: int('id') }).as( - sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, - ); - - const schema = { - users: users, - testView, - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'view-1', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('view #2', async () => { - const users = mysqlTable('some_users', { id: int('id') }); - const testView = mysqlView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( - sql`SELECT * FROM ${users}`, - ); - - const schema = { - users: users, - testView, - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'view-2', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('handle float type', async () => { - const schema = { - table: mysqlTable('table', { - col1: float(), - col2: float({ precision: 2 }), - col3: float({ precision: 2, scale: 1 }), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'handle-float-type', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('handle unsigned numerical types', async () => { - const schema = { - table: mysqlTable('table', { - col1: int({ unsigned: true }), - col2: tinyint({ unsigned: true }), - col3: smallint({ unsigned: true }), - col4: mediumint({ unsigned: true }), - col5: bigint({ mode: 'number', unsigned: true }), - col6: float({ unsigned: true }), - col7: float({ precision: 2, scale: 1, unsigned: true }), - col8: double({ unsigned: true }), - col9: double({ precision: 2, scale: 1, unsigned: true }), - col10: decimal({ unsigned: true }), - col11: decimal({ precision: 2, scale: 1, unsigned: true }), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'handle-unsigned-numerical-types', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('instrospect strings with single quotes', async () => { - const schema = { - columns: mysqlTable('columns', { - enum: mysqlEnum('my_enum', ['escape\'s quotes "', 'escape\'s quotes 2 "']).default('escape\'s quotes "'), - text: text('text').default('escape\'s quotes " '), - varchar: varchar('varchar', { length: 255 }).default('escape\'s quotes " '), - }), - }; - - const { statements, sqlStatements } = await introspectMySQLToFile( - client, - schema, - 'introspect-strings-with-single-quotes', - 'drizzle', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); - - await client.query(`drop table columns;`); -}); diff --git a/drizzle-kit/tests/introspect/sqlite.test.ts b/drizzle-kit/tests/introspect/sqlite.test.ts deleted file mode 100644 index de13d4e81b..0000000000 --- a/drizzle-kit/tests/introspect/sqlite.test.ts +++ /dev/null @@ -1,122 +0,0 @@ -import Database from 'better-sqlite3'; -import { SQL, sql } from 'drizzle-orm'; -import { check, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; -import * as fs from 'fs'; -import { introspectSQLiteToFile } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -if (!fs.existsSync('tests/introspect/sqlite')) { - fs.mkdirSync('tests/introspect/sqlite'); -} - -test('generated always column: link to another column', async () => { - const sqlite = new Database(':memory:'); - - const schema = { - users: sqliteTable('users', { - id: int('id'), - email: text('email'), - generatedEmail: text('generatedEmail').generatedAlwaysAs( - (): SQL => sql`\`email\``, - ), - }), - }; - - const { statements, sqlStatements } = await introspectSQLiteToFile( - sqlite, - schema, - 'generated-link-column', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('generated always column virtual: link to another column', async () => { - const sqlite = new Database(':memory:'); - - const schema = { - users: sqliteTable('users', { - id: int('id'), - email: text('email'), - generatedEmail: text('generatedEmail').generatedAlwaysAs( - (): SQL => sql`\`email\``, - { mode: 'virtual' }, - ), - }), - }; - - const { statements, sqlStatements } = await introspectSQLiteToFile( - sqlite, - schema, - 'generated-link-column-virtual', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('instrospect strings with single quotes', async () => { - const sqlite = new Database(':memory:'); - - const schema = { - columns: sqliteTable('columns', { - text: text('text').default('escape\'s quotes " '), - }), - }; - - const { statements, sqlStatements } = await introspectSQLiteToFile( - sqlite, - schema, - 'introspect-strings-with-single-quotes', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('introspect checks', async () => { - const sqlite = new Database(':memory:'); - - const schema = { - users: sqliteTable('users', { - id: int('id'), - name: text('name'), - age: int('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const { statements, sqlStatements } = await introspectSQLiteToFile( - sqlite, - schema, - 'introspect-checks', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('view #1', async () => { - const sqlite = new Database(':memory:'); - - const users = sqliteTable('users', { id: int('id') }); - const testView = sqliteView('some_view', { id: int('id') }).as( - sql`SELECT * FROM ${users}`, - ); - - const schema = { - users: users, - testView, - }; - - const { statements, sqlStatements } = await introspectSQLiteToFile( - sqlite, - schema, - 'view-1', - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); diff --git a/drizzle-kit/tests/libsql-checks.test.ts b/drizzle-kit/tests/libsql-checks.test.ts deleted file mode 100644 index 2a3abf2dc4..0000000000 --- a/drizzle-kit/tests/libsql-checks.test.ts +++ /dev/null @@ -1,308 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasLibSQL } from './schemaDiffer'; - -test('create table with check', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasLibSQL({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'integer', - notNull: true, - primaryKey: true, - autoincrement: false, - }, - { - name: 'age', - type: 'integer', - notNull: false, - primaryKey: false, - autoincrement: false, - }, - ], - compositePKs: [], - checkConstraints: ['some_check_name;"users"."age" > 21'], - referenceData: [], - uniqueConstraints: [], - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) -);\n`); -}); - -test('add check contraint to existing table', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }), - }; - - const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: ['some_check_name;"users"."age" > 21'], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('drop check contraint to existing table', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }), - }; - - const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('rename check constraint', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [`new_some_check_name;"users"."age" > 21`], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('rename check constraint', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 10`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [`some_check_name;"users"."age" > 10`], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('create checks with same names', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - name: text('name'), - }, (table) => ({ - checkConstraint1: check('some_check_name', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), - })), - }; - - await expect(diffTestSchemasLibSQL({}, to, [])).rejects.toThrowError(); -}); diff --git a/drizzle-kit/tests/libsql-statements.test.ts b/drizzle-kit/tests/libsql-statements.test.ts deleted file mode 100644 index 636496c458..0000000000 --- a/drizzle-kit/tests/libsql-statements.test.ts +++ /dev/null @@ -1,989 +0,0 @@ -import { foreignKey, index, int, integer, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core'; -import { JsonRecreateTableStatement } from 'src/jsonStatements'; -import { expect, test } from 'vitest'; -import { diffTestSchemasLibSQL } from './schemaDiffer'; - -test('drop autoincrement', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - }), - }; - - const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [{ - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('set autoincrement', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [{ - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('set not null', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_set_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, - ); -}); - -test('drop not null', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_drop_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, - ); -}); - -test('set default. set not null. add column', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull().default('name'), - age: int('age').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(3); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_set_default', - tableName: 'users', - columnName: 'name', - newDefaultValue: "'name'", - schema: '', - newDataType: 'text', - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_alter_column_set_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: "'name'", - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - }); - expect(statements[2]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'age', - type: 'integer', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL DEFAULT 'name';`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` ADD \`age\` integer NOT NULL;`, - ); -}); - -test('drop default. drop not null', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull().default('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_drop_default', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_alter_column_drop_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, - ); -}); - -test('set data type. set default', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: int('name').default(123), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_set_type', - tableName: 'users', - columnName: 'name', - newDataType: 'integer', - oldDataType: 'text', - schema: '', - columnDefault: 123, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_alter_column_set_default', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'integer', - newDefaultValue: 123, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" integer DEFAULT 123;`, - ); -}); - -test('add foriegn key', async (t) => { - const schema = { - table: sqliteTable('table', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id').references(() => schema.table.id), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_reference', - tableName: 'users', - data: 'users_table_id_table_id_fk;users;table_id;table;id;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "table_id" TO "table_id" integer REFERENCES table(id) ON DELETE no action ON UPDATE no action;`, - ); -}); - -test('drop foriegn key', async (t) => { - const schema = { - table: sqliteTable('table', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id').references(() => schema.table.id, { - onDelete: 'cascade', - }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'table_id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`table_id\` integer -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('alter foriegn key', async (t) => { - const tableRef = sqliteTable('table', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }); - const tableRef2 = sqliteTable('table2', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id').references(() => tableRef.id, { - onDelete: 'cascade', - }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - tableId: int('table_id').references(() => tableRef2.id), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'table_id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [ - { - columnsFrom: ['table_id'], - columnsTo: ['id'], - name: 'users_table_id_table2_id_fk', - onDelete: 'no action', - onUpdate: 'no action', - tableFrom: 'users', - tableTo: 'table2', - }, - ], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`table_id\` integer, -\tFOREIGN KEY (\`table_id\`) REFERENCES \`table2\`(\`id\`) ON UPDATE no action ON DELETE no action -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe( - 'DROP TABLE `users`;', - ); - expect(sqlStatements[4]).toBe( - 'ALTER TABLE `__new_users` RENAME TO `users`;', - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('add foriegn key for multiple columns', async (t) => { - const tableRef = sqliteTable('table', { - id: int('id').primaryKey({ autoIncrement: true }), - age: int('age'), - age1: int('age_1'), - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - column: int('column'), - column1: int('column_1'), - }), - tableRef, - }; - - const schema2 = { - tableRef, - users: sqliteTable( - 'users', - { - id: int('id').primaryKey({ autoIncrement: true }), - column: int('column'), - column1: int('column_1'), - }, - (table) => ({ - foreignKey: foreignKey({ - columns: [table.column, table.column1], - foreignColumns: [tableRef.age, tableRef.age1], - }), - }), - ), - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'column', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'column_1', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [ - { - columnsFrom: ['column', 'column_1'], - columnsTo: ['age', 'age_1'], - name: 'users_column_column_1_table_age_age_1_fk', - onDelete: 'no action', - onUpdate: 'no action', - tableFrom: 'users', - tableTo: 'table', - }, - ], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - } as JsonRecreateTableStatement); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe( - `CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`column\` integer, -\t\`column_1\` integer, -\tFOREIGN KEY (\`column\`,\`column_1\`) REFERENCES \`table\`(\`age\`,\`age_1\`) ON UPDATE no action ON DELETE no action -);\n`, - ); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('drop foriegn key for multiple columns', async (t) => { - const tableRef = sqliteTable('table', { - id: int('id').primaryKey({ autoIncrement: true }), - age: int('age'), - age1: int('age_1'), - }); - - const schema1 = { - users: sqliteTable( - 'users', - { - id: int('id').primaryKey({ autoIncrement: true }), - column: int('column'), - column1: int('column_1'), - }, - (table) => ({ - foreignKey: foreignKey({ - columns: [table.column, table.column1], - foreignColumns: [tableRef.age, tableRef.age1], - }), - }), - ), - tableRef, - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - column: int('column'), - column1: int('column_1'), - }), - tableRef, - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'column', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'column_1', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe( - `CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`column\` integer, -\t\`column_1\` integer -);\n`, - ); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('alter column drop generated', async (t) => { - const from = { - users: sqliteTable('table', { - id: int('id').primaryKey().notNull(), - name: text('name').generatedAlwaysAs('drizzle is the best').notNull(), - }), - }; - - const to = { - users: sqliteTable('table', { - id: int('id').primaryKey().notNull(), - name: text('name').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'table', - type: 'alter_table_alter_column_drop_generated', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`, - ); -}); - -test('recreate table with nested references', async (t) => { - let users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }); - let subscriptions = sqliteTable('subscriptions', { - id: int('id').primaryKey({ autoIncrement: true }), - userId: integer('user_id').references(() => users.id), - customerId: text('customer_id'), - }); - const schema1 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }); - const schema2 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('set not null with index', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }, (table) => ({ - someIndex: index('users_name_index').on(table.name), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }, (table) => ({ - someIndex: index('users_name_index').on(table.name), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_set_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe( - `DROP INDEX "users_name_index";`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, - ); - expect(sqlStatements[2]).toBe( - `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`, - ); -}); - -test('drop not null with two indexes', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - age: int('age').notNull(), - }, (table) => ({ - someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), - someIndex: index('users_age_index').on(table.age), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: int('age').notNull(), - }, (table) => ({ - someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), - someIndex: index('users_age_index').on(table.age), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_drop_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(5); - expect(sqlStatements[0]).toBe( - `DROP INDEX "users_name_unique";`, - ); - expect(sqlStatements[1]).toBe( - `DROP INDEX "users_age_index";`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, - ); - expect(sqlStatements[3]).toBe( - `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`, - ); - expect(sqlStatements[4]).toBe( - `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`, - ); -}); diff --git a/drizzle-kit/tests/libsql-views.test.ts b/drizzle-kit/tests/libsql-views.test.ts deleted file mode 100644 index bf5cdb04ec..0000000000 --- a/drizzle-kit/tests/libsql-views.test.ts +++ /dev/null @@ -1,218 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasLibSQL } from './schemaDiffer'; - -test('create view', async () => { - const users = sqliteTable('users', { id: int('id').default(1) }); - const view = sqliteView('view').as((qb) => qb.select().from(users)); - const to = { - users: users, - testView: view, - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [{ - autoincrement: false, - default: 1, - name: 'id', - type: 'integer', - primaryKey: false, - notNull: false, - }], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'view', - definition: 'select "id" from "users"', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( -\t\`id\` integer DEFAULT 1 -);\n`); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); -}); - -test('drop view', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `DROP VIEW \`view\`;`, - ); -}); - -test('alter view', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - name: 'view', - type: 'sqlite_create_view', - definition: 'SELECT * FROM users WHERE users.id = 1', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `DROP VIEW \`view\`;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, - ); -}); - -test('create view with existing flag', async () => { - const view = sqliteView('view', {}).existing(); - const to = { - testView: view, - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop view with existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - }; - - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view with existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).existing(), - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view and drop existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'new_view', - definition: 'SELECT * FROM users', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); -}); - -test('rename view and alter ".as"', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), - }; - const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'new_view', - definition: 'SELECT * FROM users WHERE 1=1', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); -}); diff --git a/drizzle-kit/tests/migrate/libsq-schema.ts b/drizzle-kit/tests/migrate/libsq-schema.ts deleted file mode 100644 index 5cb344d518..0000000000 --- a/drizzle-kit/tests/migrate/libsq-schema.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; - -export const users = sqliteTable('users', { - id: integer('id').primaryKey().notNull(), - name: text('name').notNull(), -}); diff --git a/drizzle-kit/tests/migrate/libsql-migrate.test.ts b/drizzle-kit/tests/migrate/libsql-migrate.test.ts deleted file mode 100644 index b937b644fd..0000000000 --- a/drizzle-kit/tests/migrate/libsql-migrate.test.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { createClient } from '@libsql/client'; -import { connectToLibSQL } from 'src/cli/connections'; -import { expect, test } from 'vitest'; - -test('validate migrate function', async () => { - const credentials = { - url: ':memory:', - }; - const { migrate, query } = await connectToLibSQL(credentials); - - await migrate({ migrationsFolder: 'tests/migrate/migrations' }); - - const res = await query(`PRAGMA table_info("users");`); - - expect(res).toStrictEqual([{ - cid: 0, - name: 'id', - type: 'INTEGER', - notnull: 0, - dflt_value: null, - pk: 0, - }, { - cid: 1, - name: 'name', - type: 'INTEGER', - notnull: 1, - dflt_value: null, - pk: 0, - }]); -}); - -// test('validate migrate function', async () => { -// const credentials = { -// url: '', -// authToken: '', -// }; -// const { migrate, query } = await connectToLibSQL(credentials); - -// await migrate({ migrationsFolder: 'tests/migrate/migrations' }); - -// const res = await query(`PRAGMA table_info("users");`); - -// expect(res).toStrictEqual([{ -// cid: 0, -// name: 'id', -// type: 'INTEGER', -// notnull: 0, -// dflt_value: null, -// pk: 0, -// }, { -// cid: 1, -// name: 'name', -// type: 'INTEGER', -// notnull: 1, -// dflt_value: null, -// pk: 0, -// }]); -// }); diff --git a/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql b/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql deleted file mode 100644 index 9de0a139df..0000000000 --- a/drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql +++ /dev/null @@ -1,4 +0,0 @@ -CREATE TABLE `users` ( - `id` integer PRIMARY KEY NOT NULL, - `name` text NOT NULL -); diff --git a/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql b/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql deleted file mode 100644 index 4309a05c2f..0000000000 --- a/drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql +++ /dev/null @@ -1,10 +0,0 @@ -PRAGMA foreign_keys=OFF;--> statement-breakpoint -CREATE TABLE `__new_users` ( - `id` integer, - `name` integer NOT NULL -); ---> statement-breakpoint -INSERT INTO `__new_users`("id", "name") SELECT "id", "name" FROM `users`;--> statement-breakpoint -DROP TABLE `users`;--> statement-breakpoint -ALTER TABLE `__new_users` RENAME TO `users`;--> statement-breakpoint -PRAGMA foreign_keys=ON; \ No newline at end of file diff --git a/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json b/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json deleted file mode 100644 index 599d02b915..0000000000 --- a/drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "2bd46776-9e41-4a6c-b617-5c600bb176f2", - "prevId": "00000000-0000-0000-0000-000000000000", - "tables": { - "users": { - "name": "users", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": true, - "notNull": true, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "text", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {} - } - }, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} \ No newline at end of file diff --git a/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json b/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json deleted file mode 100644 index e3b26ba140..0000000000 --- a/drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "version": "6", - "dialect": "sqlite", - "id": "6c0ec455-42fd-47fd-a22c-4bb4551e1358", - "prevId": "2bd46776-9e41-4a6c-b617-5c600bb176f2", - "tables": { - "users": { - "name": "users", - "columns": { - "id": { - "name": "id", - "type": "integer", - "primaryKey": false, - "notNull": false, - "autoincrement": false - }, - "name": { - "name": "name", - "type": "integer", - "primaryKey": false, - "notNull": true, - "autoincrement": false - } - }, - "indexes": {}, - "foreignKeys": {}, - "compositePrimaryKeys": {}, - "uniqueConstraints": {} - } - }, - "enums": {}, - "_meta": { - "schemas": {}, - "tables": {}, - "columns": {} - }, - "internal": { - "indexes": {} - } -} \ No newline at end of file diff --git a/drizzle-kit/tests/migrate/migrations/meta/_journal.json b/drizzle-kit/tests/migrate/migrations/meta/_journal.json deleted file mode 100644 index c836eb194b..0000000000 --- a/drizzle-kit/tests/migrate/migrations/meta/_journal.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": "7", - "dialect": "sqlite", - "entries": [ - { - "idx": 0, - "version": "6", - "when": 1725358702427, - "tag": "0000_little_blizzard", - "breakpoints": true - }, - { - "idx": 1, - "version": "6", - "when": 1725358713033, - "tag": "0001_nebulous_storm", - "breakpoints": true - } - ] -} \ No newline at end of file diff --git a/drizzle-kit/tests/mssql/columns.test.ts b/drizzle-kit/tests/mssql/columns.test.ts new file mode 100644 index 0000000000..b28c44f18f --- /dev/null +++ b/drizzle-kit/tests/mssql/columns.test.ts @@ -0,0 +1,2403 @@ +import { sql } from 'drizzle-orm'; +import { bit, check, int, mssqlSchema, mssqlTable, primaryKey, text, unique, varchar } from 'drizzle-orm/mssql-core'; +import { defaultNameForPK } from 'src/dialects/mssql/grammar'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add columns #1', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull().default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ADD [name] text NOT NULL CONSTRAINT [users_name_default] DEFAULT ('hey');`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #2', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [users] ADD [name] text;', + 'ALTER TABLE [users] ADD [email] text;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #3', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + name: varchar('name', { length: 100 }).primaryKey(), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [users] ADD [name] varchar(100) NOT NULL;', + 'ALTER TABLE [users] ADD [email] text;', + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #4. With default', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + name: varchar('name', { length: 100 }).primaryKey(), + email: text('email').default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [users] ADD [name] varchar(100) NOT NULL;', + `ALTER TABLE [users] ADD [email] text CONSTRAINT [users_email_default] DEFAULT ('hey');`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #5. With not null and with default', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + name: varchar('name', { length: 100 }).primaryKey(), + email: text('email').notNull().default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [users] ADD [name] varchar(100) NOT NULL;', + `ALTER TABLE [users] ADD [email] text NOT NULL CONSTRAINT [users_email_default] DEFAULT ('hey');`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column: change data type, add not null with default', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id]) VALUES (1), (2);`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).notNull().default('1'), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ + db, + to: to, + expectError: true, + ignoreSubsequent: true, + }); + + const st_01 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(200) NOT NULL;`, + `ALTER TABLE [users] ADD CONSTRAINT [users_name_default] DEFAULT ('1') FOR [name];`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([]); + expect(error).not.toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('column conflict duplicate name #1', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + name: varchar('name', { length: 100 }).primaryKey(), + email: text('name'), + }), + }; + + await push({ to: schema1, db, schemas: ['dbo'] }); + + await expect(diff(schema1, schema2, [])).rejects.toThrowError(); // duplicate names in columns + await expect(push({ to: schema2, db, schemas: ['dbo'] })).rejects.toThrowError(); // duplicate names in columns +}); + +test('alter column change name #1', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users.name->dbo.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames: ['dbo.users.name->dbo.users.name1'] }); + + const st0 = [`EXEC sp_rename 'users.name', [name1], 'COLUMN';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change name #2', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name1'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users.name->dbo.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames: ['dbo.users.name->dbo.users.name1'] }); + + const st0 = [ + `EXEC sp_rename 'users.name', [name1], 'COLUMN';`, + 'ALTER TABLE [users] ADD [email] text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table rename column #1', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + }), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users1', { + id: int('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'new_schema.users->new_schema.users1', + 'new_schema.users1.id->new_schema.users1.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'new_schema.users->new_schema.users1', + 'new_schema.users1.id->new_schema.users1.id1', + ], + }); + + const st0 = [ + `EXEC sp_rename 'new_schema.users', [users1];`, + `EXEC sp_rename 'new_schema.users1.id', [id1], 'COLUMN';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column #1', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + }), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + id: int('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'new_schema.users.id->new_schema.users.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'new_schema.users.id->new_schema.users.id1', + ], + }); + + const st0 = [ + `EXEC sp_rename 'new_schema.users.id', [id1], 'COLUMN';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column #2. Part of unique constraint', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id').unique(), + }), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + id: int('id1').unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'new_schema.users.id->new_schema.users.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'new_schema.users.id->new_schema.users.id1', + ], + }); + + const st0 = [ + `EXEC sp_rename 'new_schema.users.id', [id1], 'COLUMN';`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column #3. Part of check constraint', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + }, (t) => [check('hey', sql`${t.id} != 2`)]), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + id: int('id1'), + }, (t) => [check('hey', sql`${t.id} != 2`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'new_schema.users.id->new_schema.users.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints, error } = await push({ + db, + to: schema2, + renames: [ + 'new_schema.users.id->new_schema.users.id1', + ], + expectError: true, + ignoreSubsequent: true, + }); + + expect(st).toStrictEqual([ + `ALTER TABLE [new_schema].[users] DROP CONSTRAINT [hey];`, + `EXEC sp_rename 'new_schema.users.id', [id1], 'COLUMN';`, + `ALTER TABLE [new_schema].[users] ADD CONSTRAINT [hey] CHECK ([users].[id1] != 2);`, + ]); + // error expected + // since there will be changes in defintion + // push will skip alter definition and tries to rename column + expect(pst).toStrictEqual([ + `EXEC sp_rename 'new_schema.users.id', [id1], 'COLUMN';`, + ]); + expect(error).not.toBeNull(); + expect(phints).toStrictEqual([ + '· You are trying to rename column from id to id1, but it is not possible to rename a column if it is used in a check constraint on the table.' + + '\n' + + 'To rename the column, first drop the check constraint, then rename the column, and finally recreate the check constraint', + ]); +}); + +test('drop column #1. Part of check constraint', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + name: varchar('name'), + }, (t) => [check('hey', sql`${t.id} != 2`)]), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [new_schema].[users] DROP CONSTRAINT [hey];`, + `ALTER TABLE [new_schema].[users] DROP COLUMN [id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop column #2. Part of unique constraint', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + name: varchar('name'), + }, (t) => [unique('hey').on(t.id)]), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [new_schema].[users] DROP CONSTRAINT [hey];`, + `ALTER TABLE [new_schema].[users] DROP COLUMN [id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop column #3. Part of pk', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + name: varchar('name'), + }, (t) => [primaryKey({ name: 'hey', columns: [t.id] })]), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [new_schema].[users] DROP CONSTRAINT [hey];`, + `ALTER TABLE [new_schema].[users] DROP COLUMN [id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop column #4. Has default', async (t) => { + const newSchema = mssqlSchema('new_schema'); + const schema1 = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + name: varchar('name'), + }, (t) => [primaryKey({ name: 'hey', columns: [t.id] })]), + }; + + const schema2 = { + newSchema, + users: newSchema.table('users', { + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [new_schema].[users] DROP CONSTRAINT [hey];`, + `ALTER TABLE [new_schema].[users] DROP COLUMN [id];`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('with composite pks #1', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id1: int('id1'), + id2: int('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id2: int('id2'), + text: text('text'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = ['ALTER TABLE [users] ADD [text] text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column that is part of the pk. Name explicit', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id3: int('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users.id2->dbo.users.id3', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: ['dbo.users.id2->dbo.users.id3'], + }); + + const st0 = [`EXEC sp_rename 'users.id2', [id3], 'COLUMN';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column and pk #2', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id3: int('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id2->dbo.users.id3`, + `dbo.users.compositePK->dbo.users.${defaultNameForPK('users')}`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + `dbo.users.id2->dbo.users.id3`, + `dbo.users.compositePK->dbo.users.${defaultNameForPK('users')}`, + ], + }); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + `EXEC sp_rename 'users.id2', [id3], 'COLUMN';`, + `EXEC sp_rename 'compositePK', [users_pkey], 'OBJECT';`, + ]); + expect(pst).toStrictEqual([`EXEC sp_rename 'users.id2', [id3], 'COLUMN';`]); // pk name is preserved + expect(pst2).toStrictEqual([]); +}); + +test('rename column and pk #3', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id3: int('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3], name: 'compositePK1' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id2->dbo.users.id3`, + `dbo.users.compositePK->dbo.users.compositePK1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id2->dbo.users.id3`, `dbo.users.compositePK->dbo.users.compositePK1`], + }); + + const { sqlStatements: pst1 } = await push({ db, to: schema2 }); + + const st0 = [ + `EXEC sp_rename 'users.id2', [id3], 'COLUMN';`, + `EXEC sp_rename 'compositePK', [compositePK1], 'OBJECT';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(pst1).toStrictEqual([]); +}); + +test('rename column that is part of pk', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id3: int('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id2->dbo.users.id3`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id2->dbo.users.id3`], + }); + + const { sqlStatements: pst1 } = await push({ db, to: schema2 }); + + const st0 = [ + `EXEC sp_rename 'users.id2', [id3], 'COLUMN';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(pst1).toStrictEqual([]); +}); + +test('rename table should not cause rename pk. Name is not explicit', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2] })], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id1: int('id1'), + id2: int('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`], + }); + + const st0 = [`EXEC sp_rename 'users', [users2];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table should not cause rename pk. Name explicit', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePk' })], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id1: int('id1'), + id2: int('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePk' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + ]); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('move table to other schema + rename table. Should not cause rename pk. Name is not explicit', async (t) => { + const mySchema = mssqlSchema('my_schema'); + const schema1 = { + mySchema, + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2] })], + ), + }; + + const schema2 = { + mySchema, + users: mySchema.table('users2', { + id1: int('id1'), + id2: int('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->my_schema.users2`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->my_schema.users2`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `ALTER SCHEMA [my_schema] TRANSFER [dbo].[users2];\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table should not cause rename fk. Name is not explicit. #1', async (t) => { + const company = mssqlTable( + 'company', + { + id: int('id').primaryKey(), + }, + ); + const schema1 = { + company, + users: mssqlTable( + 'users', + { + id: int('id'), + companyId: int('company_id').references(() => company.id), + }, + ), + }; + + const renamedCompany = mssqlTable( + 'company2', + { + id: int('id').primaryKey(), + }, + ); + const schema2 = { + company: renamedCompany, + users: mssqlTable( + 'users', + { + id: int('id'), + companyId: int('company_id').references(() => renamedCompany.id), + }, + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.company->dbo.company2`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.company->dbo.company2`], + }); + + const st0 = [ + `EXEC sp_rename 'company', [company2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table should not cause rename fk. Name is not explicit. #2', async (t) => { + const company = mssqlTable( + 'company', + { + id: int('id').references(() => users.id), + }, + ); + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey(), + }, + ); + const schema1 = { + company, + users, + }; + + const renamedCompany = mssqlTable( + 'company2', + { + id: int('id').references(() => users.id), + }, + ); + const schema2 = { + company: renamedCompany, + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.company->dbo.company2`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.company->dbo.company2`], + }); + + const st0 = [ + `EXEC sp_rename 'company', [company2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('move table to other schema + rename table. Should not cause rename fk', async (t) => { + const mySchema = mssqlSchema('my_schema'); + + const company = mssqlTable( + 'company', + { + id: int('id').references(() => users.id), + }, + ); + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey(), + }, + ); + const schema1 = { + mySchema, + company, + users, + }; + + const renamedCompany = mySchema.table( + 'company2', + { + id: int('id').references(() => users.id), + }, + ); + const schema2 = { + mySchema, + company: renamedCompany, + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.company->my_schema.company2`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.company->my_schema.company2`], + }); + + const st0 = [ + `EXEC sp_rename 'company', [company2];`, + `ALTER SCHEMA [my_schema] TRANSFER [dbo].[company2];\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('varchar and text default values escape single quotes', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int('id').primaryKey(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int('id').primaryKey(), + text: text('text').default("escape's quotes"), + varchar: varchar('varchar', { length: 100 }).default("escape's quotes"), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [table] ADD [text] text CONSTRAINT [table_text_default] DEFAULT ('escape''s quotes');`, + `ALTER TABLE [table] ADD [varchar] varchar(100) CONSTRAINT [table_varchar_default] DEFAULT ('escape''s quotes');`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns with defaults', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int().primaryKey(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int().primaryKey(), + text1: text().default(''), + text2: text().default('text'), + int1: int().default(10), + int2: int().default(0), + int3: int().default(-10), + bool1: bit().default(true), + bool2: bit().default(false), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE [table] ADD [text1] text CONSTRAINT [table_text1_default] DEFAULT ('');`, + `ALTER TABLE [table] ADD [text2] text CONSTRAINT [table_text2_default] DEFAULT ('text');`, + `ALTER TABLE [table] ADD [int1] int CONSTRAINT [table_int1_default] DEFAULT ((10));`, + `ALTER TABLE [table] ADD [int2] int CONSTRAINT [table_int2_default] DEFAULT ((0));`, + `ALTER TABLE [table] ADD [int3] int CONSTRAINT [table_int3_default] DEFAULT ((-10));`, + `ALTER TABLE [table] ADD [bool1] bit CONSTRAINT [table_bool1_default] DEFAULT ((1));`, + `ALTER TABLE [table] ADD [bool2] bit CONSTRAINT [table_bool2_default] DEFAULT ((0));`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column should not cause rename unique. Name is not explicit', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [unique().on(t.id1)], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id3: int('id3'), // renamed + id2: int('id2'), + }, (t) => [unique().on(t.id3)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id1->dbo.users.id3`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id1->dbo.users.id3`], + }); + + const st0 = [`EXEC sp_rename 'users.id1', [id3], 'COLUMN';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column should not cause rename default. Name is not explicit', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1').default(1), + id2: int('id2'), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id3: int('id3').default(1), // renamed + id2: int('id2'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id1->dbo.users.id3`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id1->dbo.users.id3`], + }); + + const st0 = [`EXEC sp_rename 'users.id1', [id3], 'COLUMN';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column should not cause rename fk. Name is not explicit #1', async (t) => { + const table = mssqlTable('table', { + id: int().primaryKey(), + }); + const schema1 = { + table, + users: mssqlTable( + 'users', + { + id1: int('id1').references(() => table.id), + id2: int('id2'), + }, + ), + }; + + const schema2 = { + table, + users: mssqlTable('users', { + id3: int('id3').references(() => table.id), // renamed + id2: int('id2'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id1->dbo.users.id3`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id1->dbo.users.id3`], + }); + + const st0 = [ + `EXEC sp_rename 'users.id1', [id3], 'COLUMN';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column should not cause rename unique. Name is explicit #1', async (t) => { + const table = mssqlTable('table', { + id: int(), + }); + const schema1 = { + table, + users: mssqlTable( + 'users', + { + id1: int('id1').unique('unique_name'), + id2: int('id2'), + }, + ), + }; + + const schema2 = { + table, + users: mssqlTable('users', { + id3: int('id3').unique('unique_name'), // renamed + id2: int('id2'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.id1->dbo.users.id3`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.id1->dbo.users.id3`], + }); + + const st0 = [`EXEC sp_rename 'users.id1', [id3], 'COLUMN';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #1. Rename table + rename column. Add default', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1').default(1), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users->dbo.users2', + 'dbo.users2.id->dbo.users2.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: ['dbo.users->dbo.users2', 'dbo.users2.id->dbo.users2.id1'], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int CONSTRAINT [users2_id1_default] DEFAULT ((1));`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #6. Part of unique constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').unique().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_id_key];', + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_id_key] UNIQUE([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #7. Rename table. Part of unique constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').unique().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id').unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [`dbo.users->dbo.users2`]); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_id_key];', + `EXEC sp_rename 'users2.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id] int;`, + `INSERT INTO [users2] ([id]) SELECT [__old_id] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users_id_key] UNIQUE([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #8. Rename table + rename column. Part of unique constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').unique().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1').unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_id_key];', + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users_id_key] UNIQUE([id1]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #9. Rename table + rename column. Add unique', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1').unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users2_id1_key] UNIQUE([id1]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #9. Rename table + rename column. Drop unique', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').unique().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_id_key];', + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #10. Table has checks', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + }, + (t) => [check('hello_world', sql`${t.id} != 1`)], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + }, (t) => [check('hello_world', sql`${t.id} != 1`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [hello_world];', + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + 'ALTER TABLE [users] ADD CONSTRAINT [hello_world] CHECK ([users].[id] != 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// Still expect recreate here. We could not know if the column is in check definition +test('drop identity from existing column #11. Table has checks. Column is not in check', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar(), + }, + (t) => [check('hello_world', sql`${t.name} != 'Alex'`)], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + name: varchar(), + }, (t) => [check('hello_world', sql`${t.name} != 'Alex'`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [hello_world];', + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + "ALTER TABLE [users] ADD CONSTRAINT [hello_world] CHECK ([users].[name] != 'Alex');", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #12. Rename table. Table has checks', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar(), + }, + (t) => [check('hello_world', sql`${t.name} != 'Alex'`)], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id'), + name: varchar(), + }, (t) => [check('hello_world', sql`${t.name} != 'Alex'`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [`dbo.users->dbo.users2`]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] DROP CONSTRAINT [hello_world];', + `EXEC sp_rename 'users2.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id] int;`, + `INSERT INTO [users2] ([id]) SELECT [__old_id] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id];`, + "ALTER TABLE [users2] ADD CONSTRAINT [hello_world] CHECK ([users2].[name] != 'Alex');", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #13. Rename table + Rename column. Add check', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1'), + name: varchar(), + }, (t) => [check('hello_world', sql`${t.name} != 'Alex'`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + "ALTER TABLE [users2] ADD CONSTRAINT [hello_world] CHECK ([users2].[name] != 'Alex');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #14. Rename table + Rename column. Drop check', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar(), + }, + (t) => [check('hello_world', sql`${t.name} != 'Alex'`)], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1'), + name: varchar(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `ALTER TABLE [users2] DROP CONSTRAINT [hello_world];`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #15. Rename table + Rename column. Table has checks', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar(), + }, + (t) => [check('hello_world', sql`${t.name} != 'Alex'`)], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1'), + name: varchar(), + }, (t) => [check('hello_world', sql`${t.name} != 'Alex'`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `ALTER TABLE [users2] DROP CONSTRAINT [hello_world];`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + "ALTER TABLE [users2] ADD CONSTRAINT [hello_world] CHECK ([users2].[name] != 'Alex');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #16. Part of fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ); + const schema1 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => users.id), + }), + users, + }; + + const droppedIdentity = mssqlTable( + 'users', + { + id: int('id').primaryKey(), + }, + ); + const schema2 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => droppedIdentity.id), + }), + users: droppedIdentity, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [ref] DROP CONSTRAINT [ref_age_users_id_fk];\n', + `ALTER TABLE [users] DROP CONSTRAINT [users_pkey];`, + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int NOT NULL;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + `ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id]);`, + `ALTER TABLE [ref] ADD CONSTRAINT [ref_age_users_id_fk] FOREIGN KEY ([age]) REFERENCES [users]([id]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// This is really strange case. Do not think this is a real business case +// But this could be created in mssql so i checked that +// (column with identity references to other column) +test('drop identity from existing column #17. Part of fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey(), + name: varchar(), + }, + ); + const schema1 = { + users2: mssqlTable('users2', { + id: int('id').identity().references(() => users.id), + name: varchar(), + }), + users, + }; + + const schema2 = { + users2: mssqlTable('users2', { + id: int('id').references(() => users.id), // dropped identity + name: varchar(), + }), + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users2_id_users_id_fk];\n', + `EXEC sp_rename 'users2.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id] int;`, + `INSERT INTO [users2] ([id]) SELECT [__old_id] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id];`, + `ALTER TABLE [users2] ADD CONSTRAINT [users2_id_users_id_fk] FOREIGN KEY ([id]) REFERENCES [users]([id]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #18. Rename Table. Part of fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ); + const schema1 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => users.id), + }), + users, + }; + + const droppedIdentity = mssqlTable( + 'new_users', + { + id: int('id').primaryKey(), + }, + ); + const schema2 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => droppedIdentity.id), + }), + users: droppedIdentity, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, ['dbo.users->dbo.new_users']); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: ['dbo.users->dbo.new_users'], + }); + + const st0 = [ + `EXEC sp_rename 'users', [new_users];`, + 'ALTER TABLE [ref] DROP CONSTRAINT [ref_age_users_id_fk];\n', + `ALTER TABLE [new_users] DROP CONSTRAINT [users_pkey];`, + `EXEC sp_rename 'new_users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [new_users] ADD [id] int NOT NULL;`, + `INSERT INTO [new_users] ([id]) SELECT [__old_id] FROM [new_users];`, + `ALTER TABLE [new_users] DROP COLUMN [__old_id];`, + `ALTER TABLE [new_users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id]);`, + `ALTER TABLE [ref] ADD CONSTRAINT [ref_age_users_id_fk] FOREIGN KEY ([age]) REFERENCES [new_users]([id]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #19. Rename Table + Rename column. Part of fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ); + const schema1 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => users.id), + }), + users, + }; + + const droppedIdentity = mssqlTable( + 'new_users', + { + id: int('id1').primaryKey(), + }, + ); + const schema2 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => droppedIdentity.id), + }), + users: droppedIdentity, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.new_users`, + `dbo.new_users.id->dbo.new_users.id1`, + ]); + + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.new_users`, `dbo.new_users.id->dbo.new_users.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [new_users];`, + `EXEC sp_rename 'new_users.id', [id1], 'COLUMN';`, + 'ALTER TABLE [ref] DROP CONSTRAINT [ref_age_users_id_fk];\n', + `ALTER TABLE [new_users] DROP CONSTRAINT [users_pkey];`, + `EXEC sp_rename 'new_users.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [new_users] ADD [id1] int NOT NULL;`, + `INSERT INTO [new_users] ([id1]) SELECT [__old_id1] FROM [new_users];`, + `ALTER TABLE [new_users] DROP COLUMN [__old_id1];`, + `ALTER TABLE [new_users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id1]);`, + `ALTER TABLE [ref] ADD CONSTRAINT [ref_age_users_id_fk] FOREIGN KEY ([age]) REFERENCES [new_users]([id1]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #20. Rename Table + Rename column. Add fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ); + const schema1 = { + ref: mssqlTable('ref', { + age: int().unique(), + }), + users, + }; + + const droppedIdentity = mssqlTable( + 'new_users', + { + id: int('id1').primaryKey(), + }, + ); + const schema2 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => droppedIdentity.id), + }), + users: droppedIdentity, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users->dbo.new_users', + 'dbo.new_users.id->dbo.new_users.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: ['dbo.users->dbo.new_users', 'dbo.new_users.id->dbo.new_users.id1'], + }); + + const st0 = [ + `EXEC sp_rename 'users', [new_users];`, + `EXEC sp_rename 'new_users.id', [id1], 'COLUMN';`, + `ALTER TABLE [new_users] DROP CONSTRAINT [users_pkey];`, + `EXEC sp_rename 'new_users.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [new_users] ADD [id1] int NOT NULL;`, + `INSERT INTO [new_users] ([id1]) SELECT [__old_id1] FROM [new_users];`, + `ALTER TABLE [new_users] DROP COLUMN [__old_id1];`, + `ALTER TABLE [new_users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id1]);`, + `ALTER TABLE [ref] ADD CONSTRAINT [ref_age_new_users_id1_fk] FOREIGN KEY ([age]) REFERENCES [new_users]([id1]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #21. Rename Table + Rename column. Drop fk', async (t) => { + const users = mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ); + const schema1 = { + ref: mssqlTable('ref', { + age: int().unique().references(() => users.id), + }), + users, + }; + + const droppedIdentity = mssqlTable( + 'new_users', + { + id: int('id1').primaryKey(), + }, + ); + const schema2 = { + ref: mssqlTable('ref', { + age: int().unique(), + }), + users: droppedIdentity, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'dbo.users->dbo.new_users', + 'dbo.new_users.id->dbo.new_users.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: ['dbo.users->dbo.new_users', 'dbo.new_users.id->dbo.new_users.id1'], + }); + + const st0 = [ + `EXEC sp_rename 'users', [new_users];`, + `EXEC sp_rename 'new_users.id', [id1], 'COLUMN';`, + `ALTER TABLE [ref] DROP CONSTRAINT [ref_age_users_id_fk];\n`, + `ALTER TABLE [new_users] DROP CONSTRAINT [users_pkey];`, + `EXEC sp_rename 'new_users.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [new_users] ADD [id1] int NOT NULL;`, + `INSERT INTO [new_users] ([id1]) SELECT [__old_id1] FROM [new_users];`, + `ALTER TABLE [new_users] DROP COLUMN [__old_id1];`, + `ALTER TABLE [new_users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id1]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #22. Part of pk constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int NOT NULL;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #23. Rename table. Part of pk constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [`dbo.users->dbo.users2`]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_pkey];', + `EXEC sp_rename 'users2.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id] int NOT NULL;`, + `INSERT INTO [users2] ([id]) SELECT [__old_id] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #24. Rename table + rename column. Part of pk constraint', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_pkey];', + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int NOT NULL;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([id1]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #25. Rename table + rename column. Add pk', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int NOT NULL;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + 'ALTER TABLE [users2] ADD CONSTRAINT [users2_pkey] PRIMARY KEY ([id1]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #26. Rename table + rename column. Drop pk', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey().identity(), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users->dbo.users2`, + `dbo.users2.id->dbo.users2.id1`, + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users->dbo.users2`, `dbo.users2.id->dbo.users2.id1`], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id1], 'COLUMN';`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_pkey];', + `EXEC sp_rename 'users2.id1', [__old_id1], 'COLUMN';`, + `ALTER TABLE [users2] ADD [id1] int;`, + `INSERT INTO [users2] ([id1]) SELECT [__old_id1] FROM [users2];`, + `ALTER TABLE [users2] DROP COLUMN [__old_id1];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from existing column #27. Add not null and add default', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').identity(), + name: varchar({ length: 100 }), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').default(1).notNull(), + name: varchar({ length: 100 }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + + await db.query(`INSERT INTO [users] ([name]) VALUES ('Alex');`); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int NOT NULL CONSTRAINT [users_id_default] DEFAULT ((1));`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// TODO add more 'create identity' tests +test('add identity to existing column', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id'), + }, + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').identity(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int IDENTITY(1, 1);`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change data type', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [`ALTER TABLE [users] ALTER COLUMN [name] varchar;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change data type + add not null', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [`ALTER TABLE [users] ALTER COLUMN [name] varchar NOT NULL;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change data type + drop not null', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [`ALTER TABLE [users] ALTER COLUMN [name] varchar;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mssql/constraints.test.ts b/drizzle-kit/tests/mssql/constraints.test.ts new file mode 100644 index 0000000000..56d5841f4b --- /dev/null +++ b/drizzle-kit/tests/mssql/constraints.test.ts @@ -0,0 +1,2469 @@ +import { sql } from 'drizzle-orm'; +import { + AnyMsSqlColumn, + check, + foreignKey, + index, + int, + mssqlSchema, + mssqlTable, + primaryKey, + unique, + varchar, +} from 'drizzle-orm/mssql-core'; +import { defaultNameForPK } from 'src/dialects/mssql/grammar'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('drop primary key', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int().primaryKey(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int(), + }), + }; + + const { sqlStatements: st1 } = await diff(schema1, schema2, []); + + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [table] DROP CONSTRAINT [table_pkey];', + 'ALTER TABLE [table] ALTER COLUMN [id] int;', + ]; + + expect(st1).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop unique', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int().unique(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int(), + }), + }; + + const { sqlStatements: st1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + + const expSt0 = [ + `CREATE TABLE [table] ( +\t[id] int, +\tCONSTRAINT [table_id_key] UNIQUE([id]) +);\n`, + ]; + expect(st1).toStrictEqual(expSt0); + expect(pst1).toStrictEqual(expSt0); + + const { sqlStatements: st2 } = await diff(schema1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expSt1 = [ + 'ALTER TABLE [table] DROP CONSTRAINT [table_id_key];', + ]; + expect(st2).toStrictEqual(expSt1); + expect(pst2).toStrictEqual(expSt1); +}); + +test('add fk', async () => { + const table = mssqlTable('table', { + id: int().primaryKey(), + }); + const table1 = mssqlTable('table1', { + id: int(), + }); + const schema1 = { + table, + table1, + }; + + const table1WithReference = mssqlTable('table1', { + id: int().references(() => table.id), + }); + const schema2 = { + table, + table1: table1WithReference, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [table1] ADD CONSTRAINT [table1_id_table_id_fk] FOREIGN KEY ([id]) REFERENCES [table]([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop fk', async () => { + const table = mssqlTable('table', { + id: int().primaryKey(), + }); + const table1WithReference = mssqlTable('table1', { + id: int().references(() => table.id), + }); + + const schema1 = { + table, + table1: table1WithReference, + }; + + const table1 = mssqlTable('table1', { + id: int(), + }); + const schema2 = { + table, + table1, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [table1] DROP CONSTRAINT [table1_id_table_id_fk];\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename pk #1', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id1: int('id1'), + id2: int('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1'), + id2: int('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.users.compositePK->dbo.users.${defaultNameForPK('users')}`, + ]); + + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.users.compositePK->dbo.users.${defaultNameForPK('users')}`], + }); + + const st0 = [`EXEC sp_rename 'compositePK', [users_pkey], 'OBJECT';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push will not change name if changed to !explicit +}); + +test('add unique', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int().unique(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE [table] ADD CONSTRAINT [table_id_key] UNIQUE([id]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename unique', async (t) => { + const schema1 = { + table: mssqlTable('table', { + id: int().unique('old_name'), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int().unique('new_name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + `dbo.table.old_name->dbo.table.new_name`, + ]); + await push({ + db, + to: schema1, + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [`dbo.table.old_name->dbo.table.new_name`], + }); + + const st0 = [`EXEC sp_rename 'old_name', [new_name], 'OBJECT';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #1', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + `ALTER TABLE [users] ADD CONSTRAINT [users_name_key] UNIQUE([name]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #2', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).unique('unique_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + `ALTER TABLE [users] ADD CONSTRAINT [unique_name] UNIQUE([name]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #3', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + `ALTER TABLE [users] ADD CONSTRAINT [unique_name] UNIQUE([name]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #4', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + `ALTER TABLE [users] DROP CONSTRAINT [unique_name];`, + `ALTER TABLE [users] ADD CONSTRAINT [unique_name2] UNIQUE([name]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #5', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users.unique_name->dbo.users.unique_name2', + ]); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + renames: [ + 'dbo.users.unique_name->dbo.users.unique_name2', + ], + }); + + const st0 = [ + "EXEC sp_rename 'unique_name', [unique_name2], 'OBJECT';", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #6', async () => { + const mySchema = mssqlSchema('my_schema'); + const from = { + mySchema, + users: mySchema.table('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + mySchema, + users: mySchema.table('users', { + name: varchar({ length: 255 }), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'my_schema.users.unique_name->my_schema.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'my_schema.users.unique_name->my_schema.users.unique_name2', + ], + }); + + const st0 = [ + "EXEC sp_rename 'my_schema.unique_name', [unique_name2], 'OBJECT';", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #7', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + email2: varchar({ length: 255 }).unique(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users.email->dbo.users.email2', + 'dbo.users.unique_name->dbo.users.unique_name2', + ]); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + renames: [ + 'dbo.users.email->dbo.users.email2', + 'dbo.users.unique_name->dbo.users.unique_name2', + ], + }); + + const st0 = [ + `EXEC sp_rename 'users.email', [email2], 'COLUMN';`, + `EXEC sp_rename 'unique_name', [unique_name2], 'OBJECT';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* rename table */ +test('unique #8', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }), + }; + const to = { + users: mssqlTable('users2', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users->dbo.users2', + ]); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + renames: [ + 'dbo.users->dbo.users2', + ], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #9', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }), + }; + const sch2 = { + users: mssqlTable('users2', { + name: varchar({ length: 255 }), + email2: varchar({ length: 255 }).unique('users_email_key'), + }), + }; + + const sch3 = { + users: mssqlTable('users2', { + name: varchar({ length: 255 }), + email2: varchar({ length: 255 }), + }), + }; + + // sch1 -> sch2 + const { sqlStatements: st1, next: n1 } = await diff(sch1, sch2, [ + 'dbo.users->dbo.users2', + 'dbo.users2.email->dbo.users2.email2', + ]); + + await push({ db, to: sch1, schemas: ['dbo'] }); + const { sqlStatements: pst1 } = await push({ + db, + to: sch2, + schemas: ['dbo'], + renames: [ + 'dbo.users->dbo.users2', + 'dbo.users2.email->dbo.users2.email2', + ], + }); + + const st10 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.email', [email2], 'COLUMN';`, + ]; + expect(st1).toStrictEqual(st10); + expect(pst1).toStrictEqual(st10); + + // sch2 -> sch3 + const { sqlStatements: st2 } = await diff(n1, sch3, []); + + const { sqlStatements: pst2 } = await push({ + db, + to: sch3, + schemas: ['dbo'], + }); + + const st20 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users_email_key];', + ]; + expect(st2).toStrictEqual(st20); + expect(pst2).toStrictEqual(st20); +}); + +test('unique multistep #1', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const e1 = ['CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).unique(), + }), + }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.name->dbo.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const e3 = ['ALTER TABLE [users2] DROP CONSTRAINT [users_name_key];']; + + expect(pst4).toStrictEqual(e3); + expect(st4).toStrictEqual(e3); +}); + +test('unique multistep #2', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + expect(st1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + ]); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).unique(), + }), + }; + + const r1 = [ + 'dbo.users->dbo.users2', + 'dbo.users2.name->dbo.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, r1); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames: r1, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(pst2).toStrictEqual(e2); + expect(st2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); + + const sch4 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4, schemas: ['dbo'] }); + expect(st5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users_name_key];']); + expect(pst5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users_name_key];']); +}); + +test('unique multistep #3', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + ]); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).unique(), + }), + }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.name->dbo.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const e4 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users_name_key];', + 'ALTER TABLE [users2] ADD CONSTRAINT [name_unique] UNIQUE([name2]);', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4, schemas: ['dbo'] }); + expect(st5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [name_unique];']); + expect(pst5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [name_unique];']); +}); + +test('pk #1', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).notNull(), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey().notNull(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + expect(st).toStrictEqual(['ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);']); + expect(pst).toStrictEqual(['ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);']); +}); + +test('pk #2', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #3', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #4', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #5', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + 'ALTER TABLE [users] ALTER COLUMN [name] varchar(255);', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #6', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255) NOT NULL;`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk extra #1', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255) NOT NULL;`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st1).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // drop pk + // expect to drop not null because current state is without not null + // expect to drop pk + const to2 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const { sqlStatements: st2 } = await diff(n1, to2, []); + const { sqlStatements: pst2 } = await push({ db, to: to2 }); + + const st01 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255);`, + ]; + expect(st2).toStrictEqual(st01); + expect(pst2).toStrictEqual(st01); +}); + +test('pk extra #2', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255) NOT NULL;`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st1).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // drop pk but left not nutt + // expect to drop pk only + const to2 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).notNull(), + }), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, to2, []); + const { sqlStatements: pst2 } = await push({ db, to: to2 }); + + const st01 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + ]; + expect(st2).toStrictEqual(st01); + expect(pst2).toStrictEqual(st01); +}); + +test('pk extra #3', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st1, next: n1 } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255) NOT NULL;`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st1).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // drop pk + // expect to drop not null because current state is without not null + // expect to drop pk + const to2 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, to2, []); + const { sqlStatements: pst2 } = await push({ db, to: to2 }); + + const st01 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255);`, + ]; + expect(st2).toStrictEqual(st01); + expect(pst2).toStrictEqual(st01); +}); + +test('pk extra #4', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st1, next: n1 } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(255) NOT NULL;`, + 'ALTER TABLE [users] ADD CONSTRAINT [users_pkey] PRIMARY KEY ([name]);', + ]; + expect(st1).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // drop pk but left not nutt + // expect to drop pk only + const to2 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).notNull(), + }), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, to2, []); + const { sqlStatements: pst2 } = await push({ db, to: to2 }); + + const st01 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_pkey];', + ]; + expect(st2).toStrictEqual(st01); + expect(pst2).toStrictEqual(st01); +}); + +test('pk multistep #1', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const res1 = ['CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_pkey] PRIMARY KEY([name])\n);\n']; + expect(st1).toStrictEqual(res1); + expect(pst1).toStrictEqual(res1); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).primaryKey(), + }), + }; + + const renames = [ + 'dbo.users->dbo.users2', + 'dbo.users2.name->dbo.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const st04 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users_pkey];', + `ALTER TABLE [users2] ALTER COLUMN [name2] varchar(255);`, + ]; + expect(st4).toStrictEqual(st04); + expect(pst4).toStrictEqual(st04); +}); + +test('pk multistep #2', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const res1 = ['CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_pkey] PRIMARY KEY([name])\n);\n']; + expect(st1).toStrictEqual(res1); + expect(pst1).toStrictEqual(res1); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'dbo.users->dbo.users2', + 'dbo.users2.name->dbo.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const renames2 = ['dbo.users2.users_pkey->dbo.users2.users2_pk']; + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, renames2); + const { sqlStatements: pst4 } = await push({ db, to: sch3, renames: renames2, schemas: ['dbo'] }); + + expect(st4).toStrictEqual([`EXEC sp_rename 'users_pkey', [users2_pk], 'OBJECT';`]); + expect(pst4).toStrictEqual([`EXEC sp_rename 'users_pkey', [users2_pk], 'OBJECT';`]); + + const sch4 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4, schemas: ['dbo'] }); + + const st05 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users2_pk];', + `ALTER TABLE [users2] ALTER COLUMN [name2] varchar(255);`, + ]; + expect(st5).toStrictEqual(st05); + expect(pst5).toStrictEqual(st05); +}); + +test('pk multistep #3', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_pkey] PRIMARY KEY([name])\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE [users] (\n\t[name] varchar(255),\n\tCONSTRAINT [users_pkey] PRIMARY KEY([name])\n);\n', + ]); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'dbo.users->dbo.users2', + 'dbo.users2.name->dbo.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const e4 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users_pkey];', + 'ALTER TABLE [users2] ADD CONSTRAINT [users2_pk] PRIMARY KEY ([name2]);', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4, schemas: ['dbo'] }); + + const st05 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users2_pk];', + `ALTER TABLE [users2] ALTER COLUMN [name2] varchar(255);`, + ]; + expect(st5).toStrictEqual(st05); + expect(pst5).toStrictEqual(st05); +}); + +test('pk multistep #4', async () => { + const users = mssqlTable('users', { + id: int().primaryKey(), + id2: int(), + }); + + const users2 = mssqlTable('users2', { + id: int('id3').primaryKey(), + id2: int(), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const e1 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[id2] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, []); + const { sqlStatements: pst2 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + const e2 = [ + 'CREATE TABLE [users2] (\n\t[id3] int,\n\t[id2] int,\n\tCONSTRAINT [users2_pkey] PRIMARY KEY([id3])\n);\n', + 'DROP TABLE [users];', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +test('fk #1', async () => { + const users = mssqlTable('users', { + id: int().primaryKey(), + }); + const posts = mssqlTable('posts', { + id: int().primaryKey(), + authorId: int().references(() => users.id), + }); + + const to = { + posts, + users, + }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + `CREATE TABLE [posts] (\n\t[id] int,\n\t[authorId] int,\n\tCONSTRAINT [posts_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `ALTER TABLE [posts] ADD CONSTRAINT [posts_authorId_users_id_fk] FOREIGN KEY ([authorId]) REFERENCES [users]([id]);`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// exactly 128 symbols fk, fk name explicit +test('fk #2', async () => { + const users = mssqlTable('123456789_123456789_123456789_123456789_123456789_12_users', { + id3: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id3), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + `CREATE TABLE [123456789_123456789_123456789_123456789_123456789_12_users] (\n\t[id3] int,\n\t[id2] int,\n\tCONSTRAINT [123456789_123456789_123456789_123456789_123456789_12_users_pkey] PRIMARY KEY([id3])\n);\n`, + 'ALTER TABLE [123456789_123456789_123456789_123456789_123456789_12_users] ADD CONSTRAINT [123456789_123456789_123456789_123456789_123456789_12_users_id2_123456789_123456789_123456789_123456789_123456789_12_users_id3_fk] FOREIGN KEY ([id2]) REFERENCES [123456789_123456789_123456789_123456789_123456789_12_users]([id3]);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// 130 symbols fkey, fkey = table_hash_fkey +test('fk #3', async () => { + const users = mssqlTable('123456789_123456789_123456789_123456789_123456789_123_users', { + id3: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id3), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + `CREATE TABLE [123456789_123456789_123456789_123456789_123456789_123_users] (\n\t[id3] int,\n\t[id2] int,\n\tCONSTRAINT [123456789_123456789_123456789_123456789_123456789_123_users_pkey] PRIMARY KEY([id3])\n);\n`, + 'ALTER TABLE [123456789_123456789_123456789_123456789_123456789_123_users] ADD CONSTRAINT [123456789_123456789_123456789_123456789_123456789_123_users_RqTNlAl1EEx0_fk] FOREIGN KEY ([id2]) REFERENCES [123456789_123456789_123456789_123456789_123456789_123_users]([id3]);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// >=110 length table name, fkey = hash_fkey +test('fk #4', async () => { + const users = mssqlTable( + '1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_123456_users', + { + id: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id), + }, + ); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + `CREATE TABLE [1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_123456_users] (\n\t[id] int,\n\t[id2] int,\n\tCONSTRAINT [1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_123456_users_pkey] PRIMARY KEY([id])\n);\n`, + 'ALTER TABLE [1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_123456_users] ADD CONSTRAINT [1roIIPOipLA5_fk] FOREIGN KEY ([id2]) REFERENCES [1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_1234567890_123456_users]([id]);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #5', async () => { + const users = mssqlTable('users', { + id: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id), + }); + + const users2 = mssqlTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnyMsSqlColumn => users2.id), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.id->dbo.users2.id3']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, renames, schemas: ['dbo'] }); + + const e = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id3], 'COLUMN';`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #7', async () => { + const users = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id1), + }); + + const users2 = mssqlTable('users', { + id1: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ name: 'id2_id1_fk', columns: [t.id2], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['dbo.users.users_id2_users_id1_fk->dbo.users.id2_id1_fk']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, renames, schemas: ['dbo'] }); + + const e = [ + `EXEC sp_rename 'users_id2_users_id1_fk', [id2_id1_fk], 'OBJECT';`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #8', async () => { + const users = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int().references((): AnyMsSqlColumn => users.id1), + }); + + const users2 = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int().references((): AnyMsSqlColumn => users.id2), + }); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_id3_users_id1_fk];\n', + 'ALTER TABLE [users] ADD CONSTRAINT [users_id3_users_id2_fk] FOREIGN KEY ([id3]) REFERENCES [users]([id2]);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #9', async () => { + const users = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id1] })]); + + const users2 = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id2] })]); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, schemas: ['dbo'] }); + + const e = [ + `ALTER TABLE [users] DROP CONSTRAINT [fk1];\n`, + `ALTER TABLE [users] ADD CONSTRAINT [fk1] FOREIGN KEY ([id3]) REFERENCES [users]([id2]);`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #10', async () => { + const users = mssqlTable('users', { + id1: int().primaryKey(), + }); + + const users2 = mssqlTable('users2', { + id1: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users2.id1), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['dbo.users->dbo.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, renames, schemas: ['dbo'] }); + + const e = [ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] ADD [id2] int;', + 'ALTER TABLE [users2] ADD CONSTRAINT [users2_id2_users2_id1_fk] FOREIGN KEY ([id2]) REFERENCES [users2]([id1]);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #11', async () => { + const users = mssqlTable('users', { + id1: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id1), + }); + + const users2 = mssqlTable('users2', { + id1: int().primaryKey(), + id2: int(), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['dbo.users->dbo.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to, renames, schemas: ['dbo'] }); + + const e = [ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] DROP CONSTRAINT [users_id2_users_id1_fk];\n', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk multistep #1', async () => { + const users = mssqlTable('users', { + id: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id), + }); + + const users2 = mssqlTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnyMsSqlColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const e1 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[id2] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n', + 'ALTER TABLE [users] ADD CONSTRAINT [users_id2_users_id_fk] FOREIGN KEY ([id2]) REFERENCES [users]([id]);', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.id->dbo.users2.id3']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.id', [id3], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const users3 = mssqlTable('users2', { + id: int('id3').primaryKey(), + id2: int(), + }); + const sch3 = { users: users3 }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + expect(st4).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users_id2_users_id_fk];\n']); + expect(pst4).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users_id2_users_id_fk];\n']); +}); + +test('fk multistep #2', async () => { + const users = mssqlTable('users', { + id: int().primaryKey(), + id2: int().references((): AnyMsSqlColumn => users.id), + }); + + const users2 = mssqlTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnyMsSqlColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const e1 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[id2] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n', + 'ALTER TABLE [users] ADD CONSTRAINT [users_id2_users_id_fk] FOREIGN KEY ([id2]) REFERENCES [users]([id]);', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, []); + const { sqlStatements: pst2 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + const e2 = [ + 'CREATE TABLE [users2] (\n\t[id3] int,\n\t[id2] int,\n\tCONSTRAINT [users2_pkey] PRIMARY KEY([id3])\n);\n', + 'DROP TABLE [users];', + 'ALTER TABLE [users2] ADD CONSTRAINT [users2_id2_users2_id3_fk] FOREIGN KEY ([id2]) REFERENCES [users2]([id3]);', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456#issuecomment-3076042688 +test('fk multistep #3', async () => { + const foo = mssqlTable('foo', { + id: int().primaryKey(), + }); + + const bar = mssqlTable('bar', { + id: int().primaryKey(), + fooId: int().references(() => foo.id), + }); + + const schema1 = { foo, bar }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE [foo] (\n\t[id] int,\n\tCONSTRAINT [foo_pkey] PRIMARY KEY([id])\n);\n', + 'CREATE TABLE [bar] (\n\t[id] int,\n\t[fooId] int,\n\tCONSTRAINT [bar_pkey] PRIMARY KEY([id])\n);\n', + 'ALTER TABLE [bar] ADD CONSTRAINT [bar_fooId_foo_id_fk] FOREIGN KEY ([fooId]) REFERENCES [foo]([id]);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + bar: mssqlTable('bar', { + id: int().primaryKey(), + fooId: int(), + }), + }; + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE [bar] DROP CONSTRAINT [bar_fooId_foo_id_fk];\n', + 'DROP TABLE [foo];', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('add check', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int(), + }), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int(), + }, (t) => [check('new_check', sql`${t.id} != 10`), check('new_check2', sql`${t.id} != 10`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [table] ADD CONSTRAINT [new_check] CHECK ([table].[id] != 10);', + 'ALTER TABLE [table] ADD CONSTRAINT [new_check2] CHECK ([table].[id] != 10);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check', async () => { + const schema1 = { + table: mssqlTable('table', { + id: int(), + }, (t) => [check('new_check', sql`${t.id} != 10`)]), + }; + + const schema2 = { + table: mssqlTable('table', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + 'ALTER TABLE [table] DROP CONSTRAINT [new_check];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with check', async (t) => { + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, schemas: ['dbo'] }); + + const st0 = [`CREATE TABLE [users] ( +\t[id] int, +\t[age] int, +\tCONSTRAINT [users_pkey] PRIMARY KEY([id]), +\tCONSTRAINT [some_check_name] CHECK ([users].[age] > 21) +);\n`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check contraint to existing table', async (t) => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] ADD CONSTRAINT [some_check_name] CHECK ([users].[age] > 21);`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check contraint in existing table', async (t) => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] DROP CONSTRAINT [some_check_name];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('recreate check constraint (renamed)', async (t) => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements } = await diff(from, to, []); + + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE [users] DROP CONSTRAINT [some_check_name];`, + `ALTER TABLE [users] ADD CONSTRAINT [new_check_name] CHECK ([users].[age] > 21);`, + ]); +}); + +test('rename check constraint', async (t) => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.users.some_check_name->dbo.users.new_check_name']); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: to, + schemas: ['dbo'], + renames: ['dbo.users.some_check_name->dbo.users.new_check_name'], + }); + + const st0 = [ + `EXEC sp_rename 'some_check_name', [new_check_name], 'OBJECT';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter check constraint (definition)', async (t) => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 10`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: to, schemas: ['dbo'] }); + + expect(st).toStrictEqual([ + `ALTER TABLE [users] DROP CONSTRAINT [some_check_name];`, + `ALTER TABLE [users] ADD CONSTRAINT [some_check_name] CHECK ([users].[age] > 10);`, + ]); + expect(pst).toStrictEqual([]); +}); + +test('alter multiple check constraints (rename)', async (t) => { + const from = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + age: int('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_1', sql`${table.age} > 21`), + check('some_check_name_2', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const to = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + age: int('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_3', sql`${table.age} > 21`), + check('some_check_name_4', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: to, schemas: ['dbo'] }); + + const st0 = [ + `ALTER TABLE [users] DROP CONSTRAINT [some_check_name_1];`, + `ALTER TABLE [users] DROP CONSTRAINT [some_check_name_2];`, + `ALTER TABLE [users] ADD CONSTRAINT [some_check_name_3] CHECK ([users].[age] > 21);`, + `ALTER TABLE [users] ADD CONSTRAINT [some_check_name_4] CHECK ([users].[name] != 'Alex');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create checks with same names', async (t) => { + const to = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + age: int('age'), + name: varchar('name'), + }, + ( + table, + ) => [check('some_check_name', sql`${table.age} > 21`), check('some_check_name', sql`${table.name} != 'Alex'`)], + ), + }; + + // 'constraint_name_duplicate' + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to: to, schemas: ['dbo'] })).rejects.toThrow(); +}); + +test('rename table. Table has checks', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id'), + name: varchar(), + }, + (t) => [check('hello_world', sql`${t.name} != 'Alex'`)], + ), + }; + + const schema2 = { + users: mssqlTable('users2', { + id: int('id'), + name: varchar(), + }, (t) => [check('hello_world', sql`${t.name} != 'Alex'`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [`dbo.users->dbo.users2`]); + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'], renames: [`dbo.users->dbo.users2`] }); + + expect(st).toStrictEqual([ + `EXEC sp_rename 'users', [users2];`, + 'ALTER TABLE [users2] DROP CONSTRAINT [hello_world];', + "ALTER TABLE [users2] ADD CONSTRAINT [hello_world] CHECK ([users2].[name] != 'Alex');", + ]); + expect(pst).toStrictEqual([`EXEC sp_rename 'users', [users2];`]); // do not trigger on definition change when using push +}); + +test('add composite pks on existing table', async (t) => { + const schema1 = { + users: mssqlTable('users', { + id1: int('id1').notNull(), + id2: int('id2').notNull(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id1: int('id1').notNull(), + id2: int('id2').notNull(), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = ['ALTER TABLE [users] ADD CONSTRAINT [compositePK] PRIMARY KEY ([id1],[id2]);']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default #1', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + `ALTER TABLE [users] ADD CONSTRAINT [users_name_default] DEFAULT ('hey') FOR [name];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default #2', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default #3', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey1'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + renames: [], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP CONSTRAINT [users_name_default];', + "ALTER TABLE [users] ADD CONSTRAINT [users_name_default] DEFAULT ('hey1') FOR [name];", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default #4', async () => { + const mySchema = mssqlSchema('my_schema'); + const from = { + mySchema, + users: mySchema.table('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + const to = { + mySchema, + users: mySchema.table('users', { + name: varchar('name2', { length: 255 }).default('hey'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'my_schema.users.name->my_schema.users.name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'my_schema.users.name->my_schema.users.name2', + ], + }); + + const st0 = [ + "EXEC sp_rename 'my_schema.users.name', [name2], 'COLUMN';", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* rename table */ +test('default #5', async () => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }), + }; + const to = { + users: mssqlTable('users2', { + name: varchar({ length: 255 }), + email: varchar({ length: 255 }).unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users->dbo.users2', + ]); + + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + renames: [ + 'dbo.users->dbo.users2', + ], + }); + + const st0 = [ + `EXEC sp_rename 'users', [users2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default multistep #1', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const e1 = [ + "CREATE TABLE [users] (\n\t[name] varchar(255) CONSTRAINT [users_name_default] DEFAULT ('hey')\n);\n", + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).default('hey'), + }), + }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.name->dbo.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const e3 = ['ALTER TABLE [users2] DROP CONSTRAINT [users_name_default];']; + + expect(pst4).toStrictEqual(e3); + expect(st4).toStrictEqual(e3); +}); + +test('default multistep #2', async () => { + const sch1 = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).default('hey'), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + expect(st1).toStrictEqual([ + "CREATE TABLE [users] (\n\t[name] varchar(255) CONSTRAINT [users_name_default] DEFAULT ('hey')\n);\n", + ]); + expect(pst1).toStrictEqual([ + "CREATE TABLE [users] (\n\t[name] varchar(255) CONSTRAINT [users_name_default] DEFAULT ('hey')\n);\n", + ]); + + const sch2 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).default('hey'), + }), + }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.name->dbo.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames, schemas: ['dbo'] }); + + const e2 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }).default('hey1'), + }), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const e4 = [ + 'ALTER TABLE [users2] DROP CONSTRAINT [users_name_default];', + "ALTER TABLE [users2] ADD CONSTRAINT [users2_name2_default] DEFAULT ('hey1') FOR [name2];", + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: mssqlTable('users2', { + name: varchar('name2', { length: 255 }), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4, schemas: ['dbo'] }); + expect(st5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users2_name2_default];']); + expect(pst5).toStrictEqual(['ALTER TABLE [users2] DROP CONSTRAINT [users2_name2_default];']); +}); + +test('unique duplicate name', async (t) => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + age: int(), + }), + users2: mssqlTable('users2', { + name: varchar({ length: 255 }), + age: int(), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + age: int(), + }, (t) => [unique('test').on(t.name)]), + users2: mssqlTable('users2', { + name: varchar({ length: 255 }), + age: int(), + }, (t) => [unique('test').on(t.name)]), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('pk duplicate name', async (t) => { + const from = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + age: int(), + }), + users2: mssqlTable('users2', { + name: varchar({ length: 255 }), + age: int(), + }), + }; + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }), + age: int(), + }, (t) => [primaryKey({ name: 'test', columns: [t.name] })]), + users2: mssqlTable('users2', { + name: varchar({ length: 255 }), + age: int(), + }, (t) => [primaryKey({ name: 'test', columns: [t.name] })]), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('fk duplicate name', async (t) => { + const users = mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + age: int().unique(), + }); + const from = { + users, + users2: mssqlTable('users2', { + name: varchar({ length: 255 }), + age: int(), + }), + }; + const to = { + users, + users2: mssqlTable( + 'users2', + { + name: varchar({ length: 255 }), + age: int(), + }, + ( + t, + ) => [ + foreignKey({ name: 'test', columns: [t.age], foreignColumns: [users.age] }), + foreignKey({ name: 'test', columns: [t.name], foreignColumns: [users.name] }), + ], + ), + }; + + await push({ db, to: from }); + + await expect(diff(from, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('index duplicate name', async (t) => { + const to = { + users: mssqlTable('users', { + name: varchar({ length: 255 }).primaryKey(), + age: int().unique(), + }, (t) => [index('test').on(t.age), index('test').on(t.name)]), + }; + + await expect(diff({}, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456 +test('drop column with pk and add pk to another column #1', async () => { + const schema1 = { + authors: mssqlTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE [authors] (\n\t[publication_id] varchar(64),\n\t[author_id] varchar(10),' + + '\n\tCONSTRAINT [authors_pkey] PRIMARY KEY([publication_id],[author_id])\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + authors: mssqlTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + orcidId: varchar('orcid_id', { length: 64 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID, table.orcidId] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE [authors] DROP CONSTRAINT [authors_pkey];', + /* + HAS TO BE NOT NULL, othervise: + + ALTER TABLE [authors] ADD CONSTRAINT [authors_pkey] PRIMARY KEY ([publication_id],[author_id],[orcid_id]); + Error: Could not create constraint or index. See previous errors. + */ + 'ALTER TABLE [authors] ADD [orcid_id] varchar(64) NOT NULL;', + 'ALTER TABLE [authors] ADD CONSTRAINT [authors_pkey] PRIMARY KEY ([publication_id],[author_id],[orcid_id]);', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); diff --git a/drizzle-kit/tests/mssql/defaults.test.ts b/drizzle-kit/tests/mssql/defaults.test.ts new file mode 100644 index 0000000000..fac89fdfaf --- /dev/null +++ b/drizzle-kit/tests/mssql/defaults.test.ts @@ -0,0 +1,1006 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + nchar, + ntext, + numeric, + nvarchar, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, +} from 'drizzle-orm/mssql-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, expect, test } from 'vitest'; +import { diffDefault, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +test('int', async () => { + const res1 = await diffDefault(_, int().default(10), '((10))'); + const res2 = await diffDefault(_, int().default(0), '((0))'); + const res3 = await diffDefault(_, int().default(-10), '((-10))'); + const res4 = await diffDefault(_, int().default(1e4), '((10000))'); + const res5 = await diffDefault(_, int().default(-1e4), '((-10000))'); + + const res6 = await diffDefault(_, int().default(sql`10`), '(10)'); + const res7 = await diffDefault(_, int().default(sql`((10))`), '((10))'); + const res8 = await diffDefault(_, int().default(sql`'10'`), "('10')"); + const res9 = await diffDefault(_, int().default(sql`('10')`), "('10')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); +}); + +test('smallint', async () => { + // 2^15 - 1 + const res1 = await diffDefault(_, smallint().default(32767), '((32767))'); + // -2^15 + const res2 = await diffDefault(_, smallint().default(-32768), '((-32768))'); + + const res3 = await diffDefault(_, smallint().default(sql`10`), '(10)'); + const res4 = await diffDefault(_, smallint().default(sql`(10)`), '(10)'); + const res5 = await diffDefault(_, smallint().default(sql`'10'`), "('10')"); + const res6 = await diffDefault(_, smallint().default(sql`('10')`), "('10')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('tinyint', async () => { + const res1 = await diffDefault(_, tinyint().default(123), '((123))'); + const res2 = await diffDefault(_, tinyint().default(0), '((0))'); + const res3 = await diffDefault(_, tinyint().default(1), '((1))'); + const res4 = await diffDefault(_, tinyint().default(sql`10`), '(10)'); + const res5 = await diffDefault(_, tinyint().default(sql`(10)`), '(10)'); + const res6 = await diffDefault(_, tinyint().default(sql`'10'`), "('10')"); + const res7 = await diffDefault(_, tinyint().default(sql`('10')`), "('10')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('bigint', async () => { + const res0 = await diffDefault(_, bigint({ mode: 'number' }).default(2147483647), '((2147483647))'); + // 2^53 + const res1 = await diffDefault(_, bigint({ mode: 'number' }).default(9007199254740991), '((9007199254740991))'); + const res2 = await diffDefault(_, bigint({ mode: 'number' }).default(-9007199254740991), '((-9007199254740991))'); + // 2^63 - 1; + const res3 = await diffDefault( + _, + bigint({ mode: 'bigint' }).default(9223372036854775807n), + '((9223372036854775807))', + ); + // -2^63 + const res4 = await diffDefault( + _, + bigint({ mode: 'bigint' }).default(-9223372036854775808n), + '((-9223372036854775808))', + ); + + const res5 = await diffDefault(_, bigint({ mode: 'number' }).default(sql`9007199254740991`), '(9007199254740991)'); + const res6 = await diffDefault(_, bigint({ mode: 'number' }).default(sql`-9007199254740991`), '(-9007199254740991)'); + + const res9 = await diffDefault( + _, + bigint({ mode: 'bigint' }).default(sql`-9223372036854775808`), + '(-9223372036854775808)', + ); + + expect.soft(res0).toStrictEqual([]); + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); +}); + +test('numeric', async () => { + const res1 = await diffDefault(_, numeric().default('10.123'), '((10.123))'); + + const res2 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '((9223372036854775807))', + ); + const res3 = await diffDefault(_, numeric({ mode: 'number' }).default(9007199254740991), '((9007199254740991))'); + const res4 = await diffDefault(_, numeric({ mode: 'string' }).default('10.123'), '((10.123))'); + + const res5 = await diffDefault(_, numeric({ precision: 6 }).default('10.123'), '((10.123))'); + const res6 = await diffDefault(_, numeric({ precision: 6, scale: 2 }).default('10.123'), '((10.123))'); + const res7 = await diffDefault(_, numeric({ precision: 6, scale: 3 }).default('10.12'), '((10.12))'); + + const res8 = await diffDefault(_, numeric({ mode: 'string', scale: 2 }).default('10.123'), '((10.123))'); + const res9 = await diffDefault(_, numeric({ mode: 'string', precision: 6 }).default('10.123'), '((10.123))'); + const res10 = await diffDefault( + _, + numeric({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), + '((10.123))', + ); + const res11 = await diffDefault(_, numeric({ mode: 'string', precision: 6, scale: 3 }).default('10.12'), '((10.12))'); + + const res12 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '((9223372036854775807))', + ); + const res13 = await diffDefault(_, numeric({ mode: 'number', precision: 6, scale: 2 }).default(10.123), '((10.123))'); + const res14 = await diffDefault(_, numeric({ mode: 'number', scale: 2 }).default(10.123), '((10.123))'); + const res15 = await diffDefault(_, numeric({ mode: 'number', precision: 6 }).default(10.123), '((10.123))'); + + const res16 = await diffDefault(_, numeric().default(sql`10.123`), '(10.123)'); + const res17 = await diffDefault(_, numeric().default(sql`(10.123)`), '(10.123)'); + const res18 = await diffDefault(_, numeric().default(sql`'10.123'`), "('10.123')"); + const res19 = await diffDefault(_, numeric().default(sql`('10.123')`), "('10.123')"); + const res20 = await diffDefault(_, numeric().default(sql`('9007199254740991')`), "('9007199254740991')"); + const res21 = await diffDefault(_, numeric().default(sql`9007199254740991`), '(9007199254740991)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); + expect.soft(res18).toStrictEqual([]); + expect.soft(res19).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + expect.soft(res21).toStrictEqual([]); +}); + +test('decimal', async () => { + const res1 = await diffDefault(_, decimal().default('10.123'), '((10.123))'); + + const res2 = await diffDefault( + _, + decimal({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '((9223372036854775807))', + ); + const res3 = await diffDefault(_, decimal({ mode: 'number' }).default(9007199254740991), '((9007199254740991))'); + const res4 = await diffDefault(_, decimal({ mode: 'string' }).default('10.123'), '((10.123))'); + + const res5 = await diffDefault(_, decimal({ precision: 6 }).default('10.123'), '((10.123))'); + const res6 = await diffDefault(_, decimal({ precision: 6, scale: 2 }).default('10.123'), '((10.123))'); + const res7 = await diffDefault(_, decimal({ precision: 6, scale: 3 }).default('10.12'), '((10.12))'); + + const res8 = await diffDefault(_, decimal({ mode: 'string', scale: 2 }).default('10.123'), '((10.123))'); + const res9 = await diffDefault(_, decimal({ mode: 'string', precision: 6 }).default('10.123'), '((10.123))'); + const res10 = await diffDefault( + _, + decimal({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), + '((10.123))', + ); + const res11 = await diffDefault(_, decimal({ mode: 'string', precision: 6, scale: 3 }).default('10.12'), '((10.12))'); + + const res12 = await diffDefault( + _, + decimal({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '((9223372036854775807))', + ); + const res13 = await diffDefault(_, decimal({ mode: 'number', precision: 6, scale: 2 }).default(10.123), '((10.123))'); + const res14 = await diffDefault(_, decimal({ mode: 'number', scale: 2 }).default(10.123), '((10.123))'); + const res15 = await diffDefault(_, decimal({ mode: 'number', precision: 6 }).default(10.123), '((10.123))'); + + const res16 = await diffDefault(_, decimal().default(sql`10.123`), '(10.123)'); + const res17 = await diffDefault(_, decimal().default(sql`(10.123)`), '(10.123)'); + const res18 = await diffDefault(_, decimal().default(sql`'10.123'`), "('10.123')"); + const res19 = await diffDefault(_, decimal().default(sql`('10.123')`), "('10.123')"); + const res20 = await diffDefault(_, decimal().default(sql`('9007199254740991')`), "('9007199254740991')"); + const res21 = await diffDefault(_, decimal().default(sql`9007199254740991`), '(9007199254740991)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); + expect.soft(res18).toStrictEqual([]); + expect.soft(res19).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + expect.soft(res21).toStrictEqual([]); +}); + +test('real', async () => { + const res1 = await diffDefault(_, real().default(1000.123), '((1000.123))'); + const res2 = await diffDefault(_, real().default(1000), '((1000))'); + const res3 = await diffDefault(_, real().default(2147483647), '((2147483647))'); + const res4 = await diffDefault(_, real().default(2147483648), '((2147483648))'); + const res5 = await diffDefault(_, real().default(-2147483648), '((-2147483648))'); + const res6 = await diffDefault(_, real().default(-2147483649), '((-2147483649))'); + const res7 = await diffDefault(_, real().default(sql`10`), '(10)'); + const res8 = await diffDefault(_, real().default(sql`(10)`), '(10)'); + const res9 = await diffDefault(_, real().default(sql`'10'`), "('10')"); + const res10 = await diffDefault(_, real().default(sql`('10')`), "('10')"); + + const res11 = await diffDefault(_, real().default(sql`'10.123'`), "('10.123')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); +}); + +test('float', async () => { + const res1 = await diffDefault(_, float().default(10000.123), '((10000.123))'); + const res1_0 = await diffDefault(_, float().default(10000), '((10000))'); + const res1_1 = await diffDefault(_, float().default(2147483647), '((2147483647))'); + const res1_2 = await diffDefault(_, float().default(2147483648), '((2147483648))'); + const res1_3 = await diffDefault(_, float().default(-2147483648), '((-2147483648))'); + const res1_4 = await diffDefault(_, float().default(-2147483649), '((-2147483649))'); + + const res2 = await diffDefault(_, float({ precision: 45 }).default(10000.123), '((10000.123))'); + const res2_0 = await diffDefault(_, float({ precision: 45 }).default(10000), '((10000))'); + const res2_1 = await diffDefault(_, float({ precision: 45 }).default(2147483647), '((2147483647))'); + const res2_2 = await diffDefault(_, float({ precision: 45 }).default(2147483648), '((2147483648))'); + const res2_3 = await diffDefault(_, float({ precision: 45 }).default(-2147483648), '((-2147483648))'); + const res2_4 = await diffDefault(_, float({ precision: 45 }).default(-2147483649), '((-2147483649))'); + + const res3 = await diffDefault(_, float({ precision: 10 }).default(10000.123), '((10000.123))'); + const res3_0 = await diffDefault(_, float({ precision: 10 }).default(10000), '((10000))'); + const res3_1 = await diffDefault(_, float({ precision: 10 }).default(2147483647), '((2147483647))'); + const res3_2 = await diffDefault(_, float({ precision: 10 }).default(2147483648), '((2147483648))'); + const res3_3 = await diffDefault(_, float({ precision: 10 }).default(-2147483648), '((-2147483648))'); + const res3_4 = await diffDefault(_, float({ precision: 10 }).default(-2147483649), '((-2147483649))'); + + const res4 = await diffDefault(_, float({ precision: 10 }).default(sql`(10000.123)`), '(10000.123)'); + const res4_0 = await diffDefault(_, float({ precision: 10 }).default(sql`(2147483648)`), '(2147483648)'); + const res4_1 = await diffDefault(_, float({ precision: 10 }).default(sql`-2147483649`), '(-2147483649)'); + + const res5 = await diffDefault(_, float({ precision: 45 }).default(sql`'10000.123'`), "('10000.123')"); + const res5_0 = await diffDefault(_, float({ precision: 45 }).default(sql`(2147483648)`), '(2147483648)'); + const res5_1 = await diffDefault(_, float({ precision: 45 }).default(sql`-2147483649`), '(-2147483649)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_0).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res1_2).toStrictEqual([]); + expect.soft(res1_3).toStrictEqual([]); + expect.soft(res1_4).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_0).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res2_2).toStrictEqual([]); + expect.soft(res2_3).toStrictEqual([]); + expect.soft(res2_4).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res3_0).toStrictEqual([]); + expect.soft(res3_1).toStrictEqual([]); + expect.soft(res3_2).toStrictEqual([]); + expect.soft(res3_3).toStrictEqual([]); + expect.soft(res3_4).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res4_0).toStrictEqual([]); + expect.soft(res4_1).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res5_0).toStrictEqual([]); + expect.soft(res5_1).toStrictEqual([]); +}); + +test('bit', async () => { + const res1 = await diffDefault(_, bit().default(true), '((1))'); + const res2 = await diffDefault(_, bit().default(false), '((0))'); + const res3 = await diffDefault(_, bit().default(sql`1`), '(1)'); + const res4 = await diffDefault(_, bit().default(sql`1.`), '(1.)'); + const res5 = await diffDefault(_, bit().default(sql`'1'`), "('1')"); + + const res6 = await diffDefault(_, bit().default(sql`'2'`), "('2')"); + const res7 = await diffDefault(_, bit().default(sql`2`), '(2)'); + + const res8 = await diffDefault(_, bit().default(sql`TRY_CAST('true' AS [bit])`), "(TRY_CAST('true' AS [bit]))"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); +}); + +test('char', async () => { + const res1 = await diffDefault(_, char({ length: 256 }).default('text'), `('text')`); + const res2 = await diffDefault(_, char({ length: 256 }).default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, char({ length: 256 }).default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, char({ length: 256, enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + char({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default( + `mo''",\`}{od`, + ), + `('mo''''\",\`}{od')`, + ); + + const res6 = await diffDefault(_, char({ length: 10 }).default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, char({ length: 10 }).default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, char({ length: 10 }).default(''), `('')`); + const res9 = await diffDefault(_, char({ length: 10 }).default('""'), `('""')`); + const res10 = await diffDefault(_, char({ length: 10 }).default(sql`''`), `('')`); + + const res11 = await diffDefault(_, char({ length: 10 }).default(sql`'text'+'text'`), `('text'+'text')`); + + const res12 = await diffDefault(_, char({ length: 10 }).default("'"), `('''')`); + const res13 = await diffDefault(_, char({ length: 10 }).default('"'), `('"')`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); +}); + +test('varchar', async () => { + const res0 = await diffDefault(_, varchar({ length: 100 }).default('text'), `('text')`); + const res01 = await diffDefault(_, varchar({ length: 'max' }).default('text'), `('text')`); + const res1 = await diffDefault(_, varchar({ length: 256 }).default('text'), `('text')`); + const res2 = await diffDefault(_, varchar({ length: 256 }).default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, varchar({ length: 256 }).default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, varchar({ length: 256, enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + varchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default( + `mo''",\`}{od`, + ), + `('mo''''",\`}{od')`, + ); + + const res6 = await diffDefault(_, varchar({ length: 10 }).default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, varchar({ length: 10 }).default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, varchar({ length: 10 }).default(''), `('')`); + const res9 = await diffDefault(_, varchar({ length: 10 }).default(sql`''`), `('')`); + + const res10 = await diffDefault(_, varchar({ length: 10 }).default(sql`'text'+'text'`), `('text'+'text')`); + + const res11 = await diffDefault(_, varchar({ length: 10 }).default("'"), `('''')`); + const res12 = await diffDefault(_, varchar({ length: 10 }).default('"'), `('"')`); + + expect.soft(res0).toStrictEqual([]); + expect.soft(res01).toStrictEqual([]); + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('text', async () => { + const res1 = await diffDefault(_, text().default('text'), `('text')`); + const res2 = await diffDefault(_, text().default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, text().default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, text({ enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + text({ enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default(`mo''",\`}{od`), + `('mo''''",\`}{od')`, + ); + + const res6 = await diffDefault(_, text().default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, text().default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, text().default(''), `('')`); + const res9 = await diffDefault(_, text().default(sql`''`), `('')`); + + const res10 = await diffDefault(_, text().default(sql`'text'+'text'`), `('text'+'text')`); + + const res11 = await diffDefault(_, text().default("'"), `('''')`); + const res12 = await diffDefault(_, text().default('"'), `('"')`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('nchar ', async () => { + const res0 = await diffDefault(_, nchar({ length: 10 }).default('text'), `('text')`); + const res1 = await diffDefault(_, nchar({ length: 256 }).default('text'), `('text')`); + const res2 = await diffDefault(_, nchar({ length: 256 }).default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, nchar({ length: 256 }).default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, nchar({ length: 256, enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + nchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default( + `mo''",\`}{od`, + ), + `('mo''''\",\`}{od')`, + ); + + const res6 = await diffDefault(_, nchar({ length: 10 }).default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, nchar({ length: 10 }).default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, nchar({ length: 10 }).default(''), `('')`); + const res9 = await diffDefault(_, nchar({ length: 10 }).default(sql`''`), `('')`); + + const res10 = await diffDefault(_, nchar({ length: 10 }).default(sql`'text'+'text'`), `('text'+'text')`); + + const res11 = await diffDefault(_, nchar({ length: 10 }).default("'"), `('''')`); + const res12 = await diffDefault(_, nchar({ length: 10 }).default('"'), `('"')`); + + expect.soft(res0).toStrictEqual([]); + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('nvarchar', async () => { + const res0 = await diffDefault(_, nvarchar({ length: 10 }).default('text'), `('text')`); + const res1 = await diffDefault(_, nvarchar({ length: 256 }).default('text'), `('text')`); + const res2 = await diffDefault(_, nvarchar({ length: 256 }).default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, nvarchar({ length: 256 }).default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, nvarchar({ length: 256, enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + nvarchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default( + `mo''",\`}{od`, + ), + `('mo''''",\`}{od')`, + ); + + const res6 = await diffDefault(_, nvarchar({ length: 10 }).default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, nvarchar({ length: 10 }).default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, nvarchar({ length: 10 }).default(''), `('')`); + const res9 = await diffDefault(_, nvarchar({ length: 10 }).default(sql`''`), `('')`); + + const res10 = await diffDefault(_, nvarchar({ length: 10 }).default(sql`'text'+'text'`), `('text'+'text')`); + + const res11 = await diffDefault( + _, + nvarchar({ mode: 'json', length: 'max' }).default({ key: 'value' }), + `('{"key":"value"}')`, + ); + const res12 = await diffDefault( + _, + nvarchar({ mode: 'json', length: 'max' }).default({ key: 9223372036854775807n }), + `('{"key":9223372036854775807}')`, + ); + const res13 = await diffDefault( + _, + nvarchar({ mode: 'json', length: 'max' }).default(sql`'{"key":9223372036854775807}'`), + `('{"key":9223372036854775807}')`, + ); + const res14 = await diffDefault( + _, + nvarchar({ mode: 'json', length: 'max' }).default([9223372036854775807n, 9223372036854775806n]), + `('[9223372036854775807,9223372036854775806]')`, + ); + const res15 = await diffDefault( + _, + nvarchar({ mode: 'json', length: 'max' }).default({ key: 'value\\\'"' }), + `('{"key":"value\\\\''\\""}')`, + ); + + const res16 = await diffDefault(_, nvarchar({ length: 10 }).default("'"), `('''')`); + const res17 = await diffDefault(_, nvarchar({ length: 10 }).default('"'), `('"')`); + + expect.soft(res0).toStrictEqual([]); + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); +}); + +test('ntext', async () => { + const res1 = await diffDefault(_, ntext().default('text'), `('text')`); + const res2 = await diffDefault(_, ntext().default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, ntext().default('text\'text"'), "('text''text\"')"); + const res4 = await diffDefault(_, ntext({ enum: ['one', 'two', 'three'] }).default('one'), "('one')"); + const res5 = await diffDefault( + _, + ntext({ enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).default(`mo''",\`}{od`), + `('mo''''",\`}{od')`, + ); + + const res6 = await diffDefault(_, ntext().default(sql`'text'`), `('text')`); + const res7 = await diffDefault(_, ntext().default(sql`('text')`), `('text')`); + + const res8 = await diffDefault(_, ntext().default(''), `('')`); + const res9 = await diffDefault(_, ntext().default(sql`''`), `('')`); + + const res10 = await diffDefault(_, ntext().default(sql`'text'+'text'`), `('text'+'text')`); + + const res11 = await diffDefault(_, ntext().default("'"), `('''')`); + const res12 = await diffDefault(_, ntext().default('"'), `('"')`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('datetime', async () => { + const res1 = await diffDefault( + _, + datetime({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.111Z')), + `('2025-05-23 12:53:53.111')`, + ); + const res2 = await diffDefault( + _, + datetime({ mode: 'string' }).default('2025-05-23T12:53:53.112Z'), + `('2025-05-23T12:53:53.112Z')`, + ); + const res3 = await diffDefault( + _, + datetime({ mode: 'string' }).default(sql`'2025-05-23T12:53:53.113Z'`), + `('2025-05-23T12:53:53.113Z')`, + ); + + const res4 = await diffDefault(_, datetime().defaultGetDate(), `(getdate())`); + const res5 = await diffDefault(_, datetime().default(sql`getdate()`), `(getdate())`); + + const res6 = await diffDefault( + _, + datetime({ mode: 'date' }).default(sql`dateadd(day,(7),getdate())`), + `(dateadd(day,(7),getdate()))`, + ); + + const res7 = await diffDefault( + _, + datetime({ mode: 'string' }).default(`2025-05-23`), + `('2025-05-23')`, + ); + const res8 = await diffDefault( + _, + datetime({ mode: 'string' }).default(`12:53:53.113`), + `('12:53:53.113')`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); +}); + +test('datetime2', async () => { + const res1 = await diffDefault( + _, + datetime2({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `('2025-05-23 12:53:53.115')`, + ); + const res10 = await diffDefault( + _, + datetime2({ mode: 'date', precision: 4 }).default(new Date('2025-05-23T12:53:53.115Z')), + `('2025-05-23 12:53:53.115')`, + ); + const res2 = await diffDefault( + _, + datetime2({ mode: 'string' }).default('2025-05-23T12:53:53.115Z'), + `('2025-05-23T12:53:53.115Z')`, + ); + const res2_0 = await diffDefault( + _, + datetime2({ mode: 'string', precision: 4 }).default('2025-05-23T12:53:53.115Z'), + `('2025-05-23T12:53:53.115Z')`, + ); + const res2_1 = await diffDefault( + _, + datetime2({ mode: 'string', precision: 4 }).default('2025-05-23 12:53:53.115'), + `('2025-05-23 12:53:53.115')`, + ); + const res3 = await diffDefault( + _, + datetime2({ mode: 'string', precision: 4 }).default(sql`('2025-05-23T12:53:53.115Z')`), + `('2025-05-23T12:53:53.115Z')`, + ); + const res4 = await diffDefault(_, datetime2().defaultGetDate(), `(getdate())`); + const res40 = await diffDefault(_, datetime2({ precision: 4 }).defaultGetDate(), `(getdate())`); + const res5 = await diffDefault(_, datetime2().default(sql`getdate()`), `(getdate())`); + const res50 = await diffDefault(_, datetime2({ precision: 4 }).default(sql`getdate()`), `(getdate())`); + + const res6 = await diffDefault( + _, + datetime2({ mode: 'date' }).default(sql`dateadd(day,(7),getdate())`), + `(dateadd(day,(7),getdate()))`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + + expect.soft(res10).toStrictEqual([]); + expect.soft(res2_0).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res40).toStrictEqual([]); + expect.soft(res50).toStrictEqual([]); +}); + +test('datetimeoffset', async () => { + const res1 = await diffDefault( + _, + datetimeoffset({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `('2025-05-23T12:53:53.115Z')`, + ); + const res2 = await diffDefault( + _, + datetimeoffset({ mode: 'date', precision: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + `('2025-05-23T12:53:53.115Z')`, + ); + const res3 = await diffDefault( + _, + datetimeoffset({ mode: 'string' }).default('2025-05-23T12:53:53.115+03:00'), + `('2025-05-23T12:53:53.115+03:00')`, + ); + const res4 = await diffDefault( + _, + datetimeoffset({ mode: 'string', precision: 3 }).default('2025-05-23 12:53:53.115'), + `('2025-05-23 12:53:53.115')`, + ); + const res5 = await diffDefault(_, datetimeoffset().defaultGetDate(), `(getdate())`); + + const res30 = await diffDefault( + _, + datetimeoffset({ mode: 'string' }).default(sql`'2025-05-23T12:53:53.115+03:00'`), + `('2025-05-23T12:53:53.115+03:00')`, + ); + const res40 = await diffDefault( + _, + datetimeoffset({ mode: 'string', precision: 3 }).default(sql`('2025-05-23 12:53:53.115')`), + `('2025-05-23 12:53:53.115')`, + ); + + const res6 = await diffDefault( + _, + datetimeoffset({ mode: 'date' }).default(sql`dateadd(day,(7),getdate())`), + `(dateadd(day,(7),getdate()))`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res30).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res40).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('time', async () => { + const res1 = await diffDefault(_, time().default(new Date('2025-05-23T12:53:53.115Z')), `('12:53:53.115')`); + const res10 = await diffDefault( + _, + time({ mode: 'string', precision: 2 }).default('15:50:33.12342'), + `('15:50:33.12342')`, + ); + const res2 = await diffDefault( + _, + time({ mode: 'string', precision: 2 }).default('2025-05-23T12:53:53.115Z'), + `('2025-05-23T12:53:53.115Z')`, + ); + + const res3 = await diffDefault( + _, + time({ mode: 'string', precision: 2 }).default(sql`('15:50:33.12342')`), + `('15:50:33.12342')`, + ); + const res4 = await diffDefault( + _, + time({ mode: 'string', precision: 2 }).default(sql`('2025-05-23T12:53:53.115Z')`), + `('2025-05-23T12:53:53.115Z')`, + ); + + const res5 = await diffDefault( + _, + time({ mode: 'date' }).default(sql`dateadd(day,(7),getdate())`), + `(dateadd(day,(7),getdate()))`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('date', async () => { + const res1 = await diffDefault(_, date({ mode: 'string' }).default('2025-05-23'), `('2025-05-23')`); + const res10 = await diffDefault( + _, + date({ mode: 'string' }).default('2025-05-23T12:53:53.115Z'), + `('2025-05-23T12:53:53.115Z')`, + ); + const res2 = await diffDefault(_, date({ mode: 'date' }).default(new Date('2025-05-23')), `('2025-05-23')`); + const res3 = await diffDefault(_, date({ mode: 'string' }).defaultGetDate(), `(getdate())`); + const res30 = await diffDefault(_, date({ mode: 'date' }).defaultGetDate(), `(getdate())`); + + const res4 = await diffDefault(_, date({ mode: 'date' }).default(sql`getdate()`), `(getdate())`); + const res6 = await diffDefault(_, date({ mode: 'string' }).default(sql`'2025-05-23'`), `('2025-05-23')`); + const res7 = await diffDefault(_, date({ mode: 'date' }).default(sql`'2025-05-23'`), `('2025-05-23')`); + + const res8 = await diffDefault( + _, + date({ mode: 'date' }).default(sql`dateadd(day,(7),getdate())`), + `(dateadd(day,(7),getdate()))`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res30).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); +}); + +function toBinary(str: string) { + return '(' + '0x' + Buffer.from(str, 'utf8').toString('hex').toUpperCase() + ')'; +} +test('binary + varbinary', async () => { + const res1 = await diffDefault( + _, + binary({ length: 100 }).default(Buffer.from('hello world')), + toBinary('hello world'), + ); + const res1_1 = await diffDefault( + _, + varbinary({ length: 100 }).default(Buffer.from('hello world')), + toBinary('hello world'), + ); + const res1_2 = await diffDefault( + _, + binary({ length: 100 }).default(sql`hashbytes('SHA1','password')`), + "(hashbytes('SHA1','password'))", + ); + const res1_3 = await diffDefault(_, binary({ length: 100 }).default(sql`0xFF`), '(0xFF)'); + const res1_4 = await diffDefault( + _, + varbinary({ length: 100 }).default(sql`hashbytes('SHA1','password')`), + "(hashbytes('SHA1','password'))", + ); + const res1_5 = await diffDefault(_, varbinary({ length: 100 }).default(sql`0xFF`), '(0xFF)'); + + const res2 = await diffDefault( + _, + binary({ length: 100 }).default(Buffer.from('hello world')), + toBinary('hello world'), + ); + const res2_1 = await diffDefault( + _, + varbinary({ length: 'max' }).default(Buffer.from('hello world')), + toBinary('hello world'), + ); + const res2_2 = await diffDefault( + _, + varbinary({ length: 100 }).default(Buffer.from('hello world')), + toBinary('hello world'), + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res1_2).toStrictEqual([]); + expect.soft(res1_3).toStrictEqual([]); + expect.soft(res1_4).toStrictEqual([]); + expect.soft(res1_5).toStrictEqual([]); + + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res2_2).toStrictEqual([]); +}); + +// Probably most of the tests should be handled in `push.test.ts` +// User-friendly warning should be shown if there are changes in default expressions +// This just needs to be handled right for typescript (values must be in sql``, not .default()) +test.skip('corner cases', async () => { + const res1 = await diffDefault(_, char().default(sql`('text' + 'text')`), `('text' + 'text')`); + const res2 = await diffDefault(_, char().default(sql`(CONVERT([char](2),N'A+'))`), `(CONVERT([char](2),N'A+'))`); + const res3 = await diffDefault( + _, + char().default(sql`IIF(DAY(GETDATE()) % 2 = 0, 'Y', 'N')`), + `(IIF(DAY(GETDATE()) % 2 = 0, 'Y', 'N'))`, + ); + const res4 = await diffDefault( + _, + char().default(sql`CASE + WHEN DATEPART(HOUR, GETDATE()) < 12 THEN 'M' + ELSE 'A' + END`), + `(CASE + WHEN DATEPART(HOUR, GETDATE()) < 12 THEN 'M' + ELSE 'A' + END)`, + ); + + const res5 = await diffDefault(_, int().default(sql`10 + 10`), '10 + 10'); + const res6 = await diffDefault(_, int().default(sql`(10) + 10`), '(10) + 10'); + const res7 = await diffDefault(_, int().default(sql`((10) + 10)`), '((10) + 10)'); + const res8 = await diffDefault( + _, + int().default(sql`(10) + (10 + 15) + 9007199254740992`), + '(10) + (10 + 15) + 9007199254740992', + ); + const res9 = await diffDefault( + _, + int().default(sql`(10) + (10 + 15) / 9007199254740992 - '11'`), + "(10) + (10 + 15) / 9007199254740992 - '11'", + ); + + const res10 = await diffDefault( + _, + bigint({ mode: 'number' }).default(sql`'9007199254740991.'`), + "('9007199254740991.')", + ); + const res11 = await diffDefault(_, bigint({ mode: 'number' }).default(sql`9007199254740991.`), '(9007199254740991.)'); + + const res12 = await diffDefault(_, numeric({ mode: 'number', precision: 6 }).default(10), '10.'); + const res13 = await diffDefault(_, numeric({ mode: 'number' }).default(sql`'6.73' + '4.2'`), "'6.73' + '4.2'"); + const res14 = await diffDefault(_, numeric({ mode: 'number' }).default(sql`(6.73 + 4.)`), '6.73 + 4.'); + const res15 = await diffDefault(_, numeric({ mode: 'number' }).default(sql`'6.73' + '4.2'`), "'6.73' + '4.2'"); + + const res16 = await diffDefault(_, real().default(sql`('10.')`), "('10.')"); + const res17 = await diffDefault(_, real().default(sql`(10.)`), '(10.)'); + const res18 = await diffDefault(_, real().default(sql`10.`), '(10.)'); + const res19 = await diffDefault(_, real().default(sql`10.123`), '(10.123)'); + + const res20 = await diffDefault(_, float().default(sql`10000.`), '(10000.)'); + const res21 = await diffDefault(_, float().default(sql`'10000.'`), "('10000.')"); + const res22 = await diffDefault(_, float({ precision: 45 }).default(sql`10000.`), '(10000.)'); + const res23 = await diffDefault(_, float({ precision: 10 }).default(sql`(10000.)`), '(10000.)'); + + const res24 = await diffDefault(_, bit().default(sql`TRY_CAST('true' AS [bit])`), "(TRY_CAST('true' AS [bit]))"); + const res25 = await diffDefault( + _, + bit().default(sql`CASE WHEN 1 + 1 - 1 + 1= 2 THEN 1 ELSE 0 END`), + 'CASE WHEN 1 + 1 - 1 + 1= 2 THEN 1 ELSE 0 END', + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); + expect.soft(res18).toStrictEqual([]); + expect.soft(res19).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + expect.soft(res21).toStrictEqual([]); + expect.soft(res22).toStrictEqual([]); + expect.soft(res23).toStrictEqual([]); + expect.soft(res24).toStrictEqual([]); + expect.soft(res25).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/mssql/generated.test.ts b/drizzle-kit/tests/mssql/generated.test.ts new file mode 100644 index 0000000000..1e3e11b28a --- /dev/null +++ b/drizzle-kit/tests/mssql/generated.test.ts @@ -0,0 +1,1040 @@ +import { SQL, sql } from 'drizzle-orm'; +import { int, mssqlSchema, mssqlTable, text, varchar } from 'drizzle-orm/mssql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('generated as callback: add column with generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: add generated constraint to an exisiting column as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} + 'to add'`, { + mode: 'persisted', + }), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add') PERSISTED;", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs((): SQL => sql`${from.users.name} + 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: drop generated constraint as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} + 'to delete'`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = ['ALTER TABLE [users] DROP COLUMN [gen_name];', 'ALTER TABLE [users] ADD [gen_name] text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: drop generated constraint as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name} + 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = ['ALTER TABLE [users] DROP COLUMN [gen_name];', 'ALTER TABLE [users] ADD [gen_name] text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: change generated constraint type from virtual to PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); // push is triggered cause mode changed +}); + +test('generated as callback: change generated constraint type from PERSISTED to virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} + 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); // push will not be ignored cause type changed +}); + +test('generated as callback: change generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${from.users.name}`, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${to.users.name} + 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push will be ignored cause type was not changed +}); + +// --- + +test('generated as sql: add column with generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = ["ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: add generated constraint to an exisiting column as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`[users].[name] + 'to add'`, { + mode: 'persisted', + }), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(sql`[users].[name] + 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: drop generated constraint as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'to delete'`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = ['ALTER TABLE [users] DROP COLUMN [gen_name];', 'ALTER TABLE [users] ADD [gen_name] text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: drop generated constraint as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + 'ALTER TABLE [users] ADD [gen_name] text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: change generated constraint type from virtual to PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name]`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: change generated constraint type from PERSISTED to virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name]`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as sql: change generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name]`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + sql`[users].[name] + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +// --- + +test('generated as string: add column with generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: add generated constraint to an exisiting column as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`[users].[name] + 'to add'`, { + mode: 'persisted', + }), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add') PERSISTED;", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: add generated constraint to an exisiting column as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name') + .notNull() + .generatedAlwaysAs(`[users].[name] + 'to add'`, { + mode: 'virtual', + }), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'to add');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: drop generated constraint as PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'to delete'`, + { mode: 'persisted' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + 'ALTER TABLE [users] ADD [gen_name] text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: drop generated constraint as virtual', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'to delete'`, + { mode: 'virtual' }, + ), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName1: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + 'ALTER TABLE [users] ADD [gen_name] text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: change generated constraint type from virtual to PERSISTED', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs(`[users].[name]`, { + mode: 'virtual', + }), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'hello'`, + { mode: 'persisted' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello') PERSISTED;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: change generated constraint type from PERSISTED to virtual', async () => { + const newSchema = mssqlSchema('new_schema'); + const from = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs(`[users].[name]`, { mode: 'persisted' }), + }), + }; + const to = { + newSchema, + users: newSchema.table('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE [new_schema].[users] DROP COLUMN [gen_name];', + "ALTER TABLE [new_schema].[users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as string: change generated constraint', async () => { + const from = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs(`[users].[name]`), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: varchar('name', { length: 255 }), + generatedName: text('gen_name').generatedAlwaysAs( + `[users].[name] + 'hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + await push({ db, to: from, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to, + schemas: ['dbo'], + }); + + const st0 = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignores definition changes +}); + +test('alter generated constraint', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), + }), + }; + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} + 'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0: string[] = [ + 'ALTER TABLE [users] DROP COLUMN [gen_name];', + "ALTER TABLE [users] ADD [gen_name] AS ([users].[name] + 'hello');", + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignores definition changes +}); diff --git a/drizzle-kit/tests/mssql/indexes.test.ts b/drizzle-kit/tests/mssql/indexes.test.ts new file mode 100644 index 0000000000..2c9be05af6 --- /dev/null +++ b/drizzle-kit/tests/mssql/indexes.test.ts @@ -0,0 +1,235 @@ +import { sql } from 'drizzle-orm'; +import { bit, index, int, mssqlTable, text, varchar } from 'drizzle-orm/mssql-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('indexes #0', async (t) => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + name: varchar('name', { length: 3000 }), + }, + ( + t, + ) => [ + index('changeName').on(t.name), + index('removeColumn').on(t.name, t.id), + index('addColumn').on(t.name), + index('removeWhere').on(t.name).where(sql`${t.name} != 'name'`), + index('addWhere').on(t.name), + ], + ), + }; + + const schema2 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + name: varchar('name', { length: 3000 }), + }, + (t) => [ + index('newName').on(t.name), + index('removeColumn').on(t.name), + index('addColumn').on(t.name, t.id), + index('removeWhere').on(t.name), + index('addWhere').on(t.name).where(sql`${t.name} != 'name'`), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + expect(st).toStrictEqual([ + 'DROP INDEX [changeName] ON [users];', + 'DROP INDEX [removeColumn] ON [users];', + 'DROP INDEX [addColumn] ON [users];', + 'DROP INDEX [removeWhere] ON [users];', + 'DROP INDEX [addWhere] ON [users];', + 'CREATE INDEX [newName] ON [users] ([name]);', + 'CREATE INDEX [removeColumn] ON [users] ([name]);', + 'CREATE INDEX [addColumn] ON [users] ([name],[id]);', + 'CREATE INDEX [removeWhere] ON [users] ([name]);', + "CREATE INDEX [addWhere] ON [users] ([name]) WHERE [users].[name] != 'name';", + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX [changeName] ON [users];', + 'DROP INDEX [addColumn] ON [users];', + 'DROP INDEX [addWhere] ON [users];', + 'DROP INDEX [removeColumn] ON [users];', + 'DROP INDEX [removeWhere] ON [users];', + 'CREATE INDEX [newName] ON [users] ([name]);', + 'CREATE INDEX [addColumn] ON [users] ([name],[id]);', + "CREATE INDEX [addWhere] ON [users] ([name]) WHERE [users].[name] != 'name';", + 'CREATE INDEX [removeColumn] ON [users] ([name]);', + 'CREATE INDEX [removeWhere] ON [users] ([name]);', + ]); +}); + +test('adding basic indexes', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 1000 }), + }), + }; + + const schema2 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + name: varchar('name', { length: 1000 }), + }, + (t) => [ + index('indx1') + .on(t.name) + .where(sql`name != 'alex'`), + index('indx2').on(t.id), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [ + `CREATE INDEX [indx1] ON [users] ([name]) WHERE name != 'alex';`, + `CREATE INDEX [indx2] ON [users] ([id]);`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('dropping basic index', async () => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: int('id').primaryKey(), + name: varchar('name', { length: 100 }), + }, + (t) => [index('indx1').on(t.name, t.id)], + ), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 100 }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0 = [`DROP INDEX [indx1] ON [users];`]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('indexes test case #1', async () => { + const schema1 = { + users: mssqlTable( + 'users', + { + id: varchar('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: bit('in_stock').default(true), + }, + (t) => [ + index('indx').on(t.id), + index('indx4').on(t.id), + ], + ), + }; + + const schema2 = { + users: mssqlTable( + 'users', + { + id: varchar('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: bit('in_stock').default(true), + }, + (t) => [ + index('indx').on(t.id), + index('indx4').on(t.id), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('Alter where property', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 1000 }), + }, (t) => [ + index('indx2').on(t.name).where(sql`name != 'alex'`), + ]), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 1000 }), + }, (t) => [ + index('indx2').on(t.name).where(sql`name != 'alex2'`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas: ['dbo'] }); + + expect(st).toStrictEqual([ + 'DROP INDEX [indx2] ON [users];', + "CREATE INDEX [indx2] ON [users] ([name]) WHERE name != 'alex2';", + ]); + expect(pst).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/mssql/mocks.ts b/drizzle-kit/tests/mssql/mocks.ts new file mode 100644 index 0000000000..032abd834b --- /dev/null +++ b/drizzle-kit/tests/mssql/mocks.ts @@ -0,0 +1,505 @@ +import { is } from 'drizzle-orm'; +import { int, MsSqlColumnBuilder, MsSqlSchema, MsSqlTable, mssqlTable, MsSqlView } from 'drizzle-orm/mssql-core'; +import { CasingType } from 'src/cli/validations/common'; +import { interimToDDL, MssqlDDL, SchemaError } from 'src/dialects/mssql/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/mssql/diff'; +import { defaultFromColumn, fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/mssql/drizzle'; +import { mockResolver } from 'src/utils/mocks'; +import '../../src/@types/utils'; +import Docker from 'dockerode'; +import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs'; +import getPort from 'get-port'; +import mssql from 'mssql'; +import { introspect } from 'src/cli/commands/pull-mssql'; +import { EmptyProgressView } from 'src/cli/views'; +import { createDDL } from 'src/dialects/mssql/ddl'; +import { defaultNameForDefault } from 'src/dialects/mssql/grammar'; +import { fromDatabaseForDrizzle } from 'src/dialects/mssql/introspect'; +import { ddlToTypeScript } from 'src/dialects/mssql/typescript'; +import { DB } from 'src/utils'; +import { v4 as uuid } from 'uuid'; +import 'zx/globals'; +import { suggestions } from 'src/cli/commands/push-mssql'; +import { EntitiesFilter, EntitiesFilterConfig } from 'src/cli/validations/cli'; +import { hash } from 'src/dialects/common'; +import { extractMssqlExisting } from 'src/dialects/drizzle'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { tsc } from 'tests/utils'; +import { expect } from 'vitest'; + +export type MssqlDBSchema = Record< + string, + | MsSqlTable + | MsSqlSchema + | MsSqlView +>; + +class MockError extends Error { + constructor(readonly errors: SchemaError[]) { + super(); + } +} + +export const drizzleToDDL = ( + schema: MssqlDBSchema, + casing: CasingType | undefined, + filterConfig: EntitiesFilterConfig = { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, +) => { + const tables = Object.values(schema).filter((it) => is(it, MsSqlTable)) as MsSqlTable[]; + const schemas = Object.values(schema).filter((it) => is(it, MsSqlSchema)) as MsSqlSchema[]; + const views = Object.values(schema).filter((it) => is(it, MsSqlView)) as MsSqlView[]; + + const existing = extractMssqlExisting(schemas, views); + const filter = prepareEntityFilter('mssql', filterConfig, existing); + const { schema: res, errors } = fromDrizzleSchema( + { schemas, tables, views }, + casing, + filter, + ); + + if (errors.length > 0) { + throw new Error(); + } + + return { ...interimToDDL(res), existing }; +}; + +// 2 schemas -> 2 ddls -> diff +export const diff = async ( + left: MssqlDBSchema | MssqlDDL, + right: MssqlDBSchema, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1, errors: err1 } = 'entities' in left && '_' in left + ? { ddl: left as MssqlDDL, errors: [] } + : drizzleToDDL(left, casing); + const { ddl: ddl2, errors: err2 } = drizzleToDDL(right, casing); + + if (err1.length > 0 || err2.length > 0) { + throw new MockError([...err1, ...err2]); + } + + const renames = new Set(renamesArr); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), // uniques + mockResolver(renames), // indexes + mockResolver(renames), // checks + mockResolver(renames), // pks + mockResolver(renames), // fks + mockResolver(renames), // defaults + 'default', + ); + + return { sqlStatements, statements, groupedStatements, next: ddl2 }; +}; + +export const diffIntrospect = async ( + db: DB, + initSchema: MssqlDBSchema, + testName: string, + schemas: string[] = [], + entities?: EntitiesFilter, + casing?: CasingType | undefined, +) => { + const filterConfig: EntitiesFilterConfig = { + schemas, + entities, + tables: [], + extensions: [], + }; + + const { ddl: initDDL, existing } = drizzleToDDL(initSchema, casing, filterConfig); + const { sqlStatements: init } = await ddlDiffDry(createDDL(), initDDL, 'default'); + + for (const st of init) await db.query(st); + + const filter = prepareEntityFilter('mssql', filterConfig, existing); + + const schema = await fromDatabaseForDrizzle(db, filter); + + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel'); + + const filePath = `tests/mssql/tmp/${testName}.ts`; + + writeFileSync(filePath, file.file); + await tsc(file.file); + + const typeCheckResult = await $`pnpm exec tsc --noEmit --skipLibCheck ${filePath}`.nothrow(); + if (typeCheckResult.exitCode !== 0) { + throw new Error(typeCheckResult.stderr || typeCheckResult.stdout); + } + + // generate snapshot from ts file + const response = await prepareFromSchemaFiles([ + filePath, + ]); + + const { schema: schema2, errors: e2 } = fromDrizzleSchema(response, casing, filter); + const { ddl: ddl2, errors: e3 } = interimToDDL(schema2); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await ddlDiffDry(ddl1, ddl2, 'push'); + + rmSync(`tests/mssql/tmp/${testName}.ts`); + + return { + introspectDDL: ddl1, + fromFileDDL: ddl2, + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +// init schema flush to db -> introspect db to ddl -> compare ddl with destination schema +export const push = async (config: { + db: DB; + to: MssqlDBSchema | MssqlDDL; + renames?: string[]; + schemas?: string[]; + casing?: CasingType; + log?: 'statements' | 'none'; + force?: boolean; + expectError?: boolean; + ignoreSubsequent?: boolean; +}) => { + const { db, to, force, expectError, log } = config; + const casing = config.casing ?? 'camelCase'; + + const filterConfig: EntitiesFilterConfig = { + schemas: config.schemas, + entities: undefined, + tables: [], + extensions: [], + }; + const { ddl: ddl2, errors: err2, existing } = 'entities' in to && '_' in to + ? { ddl: to as MssqlDDL, errors: [], existing: [] } + : drizzleToDDL(to, casing, filterConfig); + + const filter = prepareEntityFilter('mssql', filterConfig, existing); + + const { schema } = await introspect(db, filter, new EmptyProgressView()); + + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + + if (err2.length > 0) { + throw new MockError(err2); + } + + if (err3.length > 0) { + throw new MockError(err3); + } + + const renames = new Set(config.renames ?? []); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), // views + mockResolver(renames), // uniques + mockResolver(renames), // indexes + mockResolver(renames), // checks + mockResolver(renames), // pks + mockResolver(renames), // fks + mockResolver(renames), // defaults + 'push', + ); + + const { hints, losses } = await suggestions(db, statements, ddl2); + + if (force) { + for (const st of losses) { + await db.query(st); + } + } + + let error: Error | null = null; + for (const sql of sqlStatements) { + if (log === 'statements') console.log(sql); + try { + await db.query(sql); + } catch (e) { + if (!expectError) throw e; + error = e as Error; + break; + } + } + + // subsequent push + if (!config.ignoreSubsequent) { + { + const { schema } = await introspect(db, filter, new EmptyProgressView()); + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + if (sqlStatements.length > 0) { + console.error('---- subsequent push is not empty ----'); + expect(sqlStatements.join('\n')).toBe(''); + } + } + } + + return { sqlStatements, statements, hints, losses, error }; +}; + +export type TestDatabase = { + db: DB; + close: () => Promise; + clear: () => Promise; +}; + +export const diffDefault = async ( + kit: TestDatabase, + builder: T, + expectedDefault: string, + pre: MssqlDBSchema | null = null, +) => { + await kit.clear(); + + const config = (builder as any).config; + const def = config['default']; + const tableName = 'table'; + const column = mssqlTable(tableName, { column: builder }).column; + const sqlType = column.getSQLType(); + + const columnDefault = defaultFromColumn(column, 'camelCase'); + + const res = [] as string[]; + if (columnDefault !== expectedDefault) { + res.push(`Unexpected sql: \n${columnDefault}\n${expectedDefault}`); + } + + const init = { + ...pre, + table: mssqlTable(tableName, { column: builder }), + }; + + const { db, clear } = kit; + if (pre) await push({ db, to: pre, ignoreSubsequent: true }); + const { sqlStatements: st1 } = await push({ db, to: init, ignoreSubsequent: true }); + const { sqlStatements: st2 } = await push({ db, to: init, ignoreSubsequent: true }); + + const expectedInit = `CREATE TABLE [${tableName}] (\n\t[${column.name}] ${sqlType} CONSTRAINT [${ + defaultNameForDefault(tableName, column.name) + }] DEFAULT ${expectedDefault}\n);\n`; + if (st1.length !== 1 || st1[0] !== expectedInit) res.push(`Unexpected init:\n${st1}\n\n${expectedInit}`); + if (st2.length > 0) res.push(`Unexpected subsequent init:\n${st2}`); + + await db.query('INSERT INTO [table] ([column]) VALUES (default);'); + + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, () => true); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel'); + mkdirSync(`tests/mssql/tmp`, { recursive: true }); + const path = `tests/mssql/tmp/temp-${hash(String(Math.random()))}.ts`; + + if (existsSync(path)) rmSync(path); + writeFileSync(path, file.file); + await tsc(file.file); + + const response = await prepareFromSchemaFiles([path]); + const { schema: sch, errors: e2 } = fromDrizzleSchema(response, 'camelCase', () => true); + const { ddl: ddl2, errors: e3 } = interimToDDL(sch); + + const { sqlStatements: afterFileSqlStatements } = await ddlDiffDry(ddl1, ddl2, 'push'); + if (afterFileSqlStatements.length === 0) { + rmSync(path); + } else { + console.log(afterFileSqlStatements); + console.log(`./${path}`); + res.push(`Default type mismatch after diff:\n${`./${path}`}`); + } + + await clear(); + + config.hasDefault = false; + config.default = undefined; + const schema1 = { + ...pre, + table: mssqlTable('table', { column: builder }), + }; + + config.hasDefault = true; + config.default = def; + const schema2 = { + ...pre, + table: mssqlTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent: true }); + await push({ db, to: schema1, ignoreSubsequent: true }); + const { sqlStatements: st3 } = await push({ db, to: schema2, ignoreSubsequent: true }); + + const expectedAlter = `ALTER TABLE [${tableName}] ADD CONSTRAINT [${ + defaultNameForDefault(tableName, column.name) + }] DEFAULT ${expectedDefault} FOR [${column.name}];`; + if (st3.length !== 1 || st3[0] !== expectedAlter) res.push(`Unexpected default alter:\n${st3}\n\n${expectedAlter}`); + + await clear(); + + const schema3 = { + ...pre, + table: mssqlTable('table', { id: int().identity() }), + }; + + const schema4 = { + ...pre, + table: mssqlTable('table', { id: int().identity(), column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent: true }); + await push({ db, to: schema3, ignoreSubsequent: true }); + const { sqlStatements: st4 } = await push({ db, to: schema4, ignoreSubsequent: true }); + + const expectedAddColumn = `ALTER TABLE [${tableName}] ADD [${column.name}] ${sqlType} CONSTRAINT [${ + defaultNameForDefault(tableName, column.name) + }] DEFAULT ${expectedDefault};`; + if (st4.length !== 1 || st4[0] !== expectedAddColumn) { + res.push(`Unexpected add column:\n${st4[0]}\n\n${expectedAddColumn}`); + } + + return res; +}; + +export function parseMssqlUrl(urlString: string) { + const url = new URL(urlString); + return { + user: url.username, + password: url.password, + server: url.hostname, + port: parseInt(url.port, 10), + database: url.pathname.replace(/^\//, ''), + options: { + encrypt: url.searchParams.get('encrypt') === 'true', + trustServerCertificate: url.searchParams.get('trustServerCertificate') === 'true', + }, + }; +} + +export const prepareTestDatabase = async (): Promise => { + const envUrl = process.env.MSSQL_CONNECTION_STRING; + const { url, container } = envUrl ? { url: envUrl, container: null } : await createDockerDB(); + const params = parseMssqlUrl(url); + + const sleep = 1000; + let timeLeft = 20000; + do { + try { + const client = await mssql.connect({ + ...params, + pool: { max: 1 }, + requestTimeout: 30_000, + }); + + await client.query(`use [master];`); + await client.query(`drop database if exists [drizzle];`); + await client.query(`create database [drizzle];`); + await client.query(`use [drizzle];`); + + let tx = client.transaction(); + let req = new mssql.Request(tx); + await tx.begin(); + + const db = { + query: async (sql: string, params: any[] = []) => { + const res = await req.query(sql).catch((e) => { + throw new Error(e.message); + }); + return res.recordset as any[]; + }, + }; + const close = async () => { + await tx.rollback().catch((e) => {}); + await client?.close().catch(console.error); + await container?.stop().catch(console.error); + }; + + const clear = async () => { + try { + await tx.rollback(); + await tx.begin(); + } catch { + tx = client.transaction(); + await tx.begin(); + req = new mssql.Request(tx); + } + }; + return { db, close, clear }; + } catch (e) { + console.error(e); + throw e; + // await new Promise((resolve) => setTimeout(resolve, sleep)); + // timeLeft -= sleep; + } + } while (timeLeft > 0); + + throw new Error(); +}; + +export async function createDockerDB(): Promise< + { container: Docker.Container; url: string } +> { + let mssqlContainer: Docker.Container; + + const docker = new Docker(); + const port = await getPort({ port: 1433 }); + const image = 'mcr.microsoft.com/azure-sql-edge'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + mssqlContainer = await docker.createContainer({ + Image: image, + Env: ['ACCEPT_EULA=1', 'MSSQL_SA_PASSWORD=drizzle123PASSWORD!'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '1433/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mssqlContainer.start(); + return { + url: 'mssql://SA:drizzle123PASSWORD!@127.0.0.1:1433?encrypt=true&trustServerCertificate=true', + container: mssqlContainer, + }; +} diff --git a/drizzle-kit/tests/mssql/pull.test.ts b/drizzle-kit/tests/mssql/pull.test.ts new file mode 100644 index 0000000000..365ead8f2f --- /dev/null +++ b/drizzle-kit/tests/mssql/pull.test.ts @@ -0,0 +1,454 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + binary, + bit, + char, + check, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + index, + int, + mssqlSchema, + mssqlTable, + mssqlView, + nchar, + ntext, + numeric, + nvarchar, + real, + smallint, + text, + time, + tinyint, + uniqueIndex, + varbinary, + varchar, +} from 'drizzle-orm/mssql-core'; +import fs from 'fs'; +import { DB } from 'src/utils'; +import { diffIntrospect, prepareTestDatabase, TestDatabase } from 'tests/mssql/mocks'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; + +// @vitest-environment-options {"max-concurrency":1} + +if (!fs.existsSync('tests/mssql/tmp')) { + fs.mkdirSync(`tests/mssql/tmp`, { recursive: true }); +} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('basic introspect test', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').notNull(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-introspect'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic identity always test', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity({ increment: 1, seed: 2 }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-identity-always-introspect'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic identity by default test', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity({ increment: 1, seed: 2 }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-identity-default-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic index test', async () => { + const schema = { + users: mssqlTable('users', { + firstName: nvarchar('first_name', { length: 244 }), + lastName: nvarchar('last_name', { length: 244 }), + data: nvarchar('data', { mode: 'json' }), + }, (table) => [ + index('single_column').on(table.firstName), + index('multi_column').on(table.firstName, table.lastName), + ]), + }; + + const { sqlStatements } = await diffIntrospect( + db, + schema, + 'basic-index-introspect', + ); + + expect(sqlStatements).toStrictEqual([]); +}); + +test('identity always test: few params', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity({ + seed: 100, + increment: 1, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-always-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity by default test: few params', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity({ + seed: 10000, + increment: 1, + }), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-default-few-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity always test: all params', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-always-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('identity by default test: all params', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity(), + email: text('email'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'identity-default-all-params-introspect', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('generated column: link to another column', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id').identity(), + email: varchar({ length: 255 }), + generatedEmail: varchar('generatedEmail').generatedAlwaysAs( + (): SQL => sql`[email]`, + ), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'generated-link-column', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect all column types', async () => { + const schema = { + columns: mssqlTable('columns', { + bigint: bigint({ mode: 'number' }).default(1), + bigint1: bigint({ mode: 'bigint' }).default(BigInt(1)), + bigint2: bigint({ mode: 'string' }).default('1'), + + binary: binary({ length: 123 }).default(Buffer.from('hello, world')), + + bit: bit().default(false), + bit1: bit().default(true), + + char: char({ length: 2 }).default('1'), + nChar: nchar({ length: 2 }).default('1'), + + date: date({ mode: 'date' }).default(new Date()), + date1: date({ mode: 'string' }).default('2023-05-05'), + date2: date({ mode: 'string' }).defaultGetDate(), + + datetime: datetime({ mode: 'date' }).default(new Date()), + datetime1: datetime({ mode: 'string' }).default('2023-05-05'), + datetime12: datetime({ mode: 'string' }).defaultGetDate(), + + datetime2: datetime2({ mode: 'date' }).default(new Date()), + datetime21: datetime2({ mode: 'string' }).default('2023-05-05'), + datetime22: datetime2({ mode: 'string' }).defaultGetDate(), + + datetimeoffset: datetimeoffset({ mode: 'date' }).default(new Date()), + datetimeoffset1: datetimeoffset({ mode: 'string' }).default('2023-05-05'), + datetimeoffset2: datetimeoffset({ mode: 'string' }).defaultGetDate(), + + decimal: decimal({ precision: 3, scale: 1 }).default('32.1'), + + float: float({ precision: 3 }).default(32.1), + + int: int().default(32), + + numeric: numeric({ precision: 3, scale: 1 }).default('32.1'), + + real: real().default(32.4), + + smallint: smallint().default(3), + + text: text().default('hey'), + nText: ntext().default('hey'), + + time: time({ mode: 'date', precision: 2 }).default(new Date()), + time1: time({ mode: 'string', precision: 2 }).default('14:53:00.000'), + + tinyint: tinyint().default(123), + + varbinary: varbinary({ length: 213 }).default(Buffer.from('hey')), + + varchar: varchar({ length: 213 }).default('hey'), + nvarchar: nvarchar({ length: 213 }).default('hey'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-all-columns-types', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect columns with name with non-alphanumeric characters', async () => { + const schema = { + users: mssqlTable('users', { + 'not:allowed': int('not:allowed'), + 'nuh--uh': int('nuh-uh'), + '1_nope': int('1_nope'), + valid: int('valid'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-column-with-name-with-non-alphanumeric-characters', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect strings with single quotes', async () => { + const schema = { + columns: mssqlTable('columns', { + text: text('text').default('escape\'s quotes " '), + varchar: varchar('varchar').default('escape\'s quotes " '), + ntext: ntext('ntext').default('escape\'s quotes " '), + nvarchar: nvarchar('nvarchar').default('escape\'s quotes " '), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-strings-with-single-quotes', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect checks', async () => { + const schema = { + users: mssqlTable('users', { + id: int('id'), + name: varchar('name'), + age: int('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-checks', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect checks from different schemas with same names', async () => { + const mySchema = mssqlSchema('schema2'); + const schema = { + mySchema, + users: mssqlTable('users', { + id: int('id'), + age: int('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + usersInMySchema: mySchema.table('users', { + id: int('id'), + age: int('age'), + }, (table) => [check('some_check', sql`${table.age} < 1`)]), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-checks-diff-schema-same-names', + ['dbo', 'schema2'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = mssqlView('some_view').as((qb) => qb.select().from(users)); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-view', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect view #2', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + name: varchar('users'), + }); + + const view = mssqlView('some_view', { id: int('asd') }).with({ checkOption: true }).as( + sql`SELECT * FROM ${users}`, + ); + const schema = { + view, + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-view-2', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect primary key with unqiue', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('users'), + }, (t) => [ + index('some_name').on(t.name), + uniqueIndex('some_name1').on(t.name), + ]); + + const schema = { + users, + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-pk', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect empty db', async () => { + const { introspectDDL } = await diffIntrospect( + db, + {}, + 'introspect-empty-db', + ); + + expect(introspectDDL.entities.list().length).toBe(0); +}); diff --git a/drizzle-kit/tests/mssql/push.test.ts b/drizzle-kit/tests/mssql/push.test.ts new file mode 100644 index 0000000000..dd3a7f61c8 --- /dev/null +++ b/drizzle-kit/tests/mssql/push.test.ts @@ -0,0 +1,1194 @@ +import { + bigint, + check, + foreignKey, + int, + mssqlSchema, + mssqlTable, + mssqlView, + smallint, + text, + varchar, +} from 'drizzle-orm/mssql-core'; +import { eq, sql } from 'drizzle-orm/sql'; +// import { suggestions } from 'src/cli/commands/push-mssql'; +import { DB } from 'src/utils'; +import { diff, prepareTestDatabase, push, TestDatabase } from 'tests/mssql/mocks'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +// identity push tests +test('create table: identity - no params', async () => { + const schema1 = {}; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').identity(), + id1: bigint('id1', { mode: 'number' }), + id2: smallint('id2'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + `CREATE TABLE [users] ( +\t[id] int IDENTITY(1, 1), +\t[id1] bigint, +\t[id2] smallint +);\n`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view encryption', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const schema = { + users, + view: mssqlView('some_view').with({ encryption: true }) + .as(( + qb, + ) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(schema, schema, []); + + await push({ db, to: schema }); + const { sqlStatements: pst } = await push({ db, to: schema }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('create table: identity always/by default - with params', async () => { + const schema1 = {}; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').identity({ + increment: 4, + seed: 3, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + `CREATE TABLE [users] ( +\t[id] int IDENTITY(3, 4) +);\n`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diff: identity always/by default - no params', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').identity(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').identity(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diff: identity always/by default - all params', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').identity({ + seed: 1, + increment: 1, + }), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id').identity({ + seed: 1, + increment: 1, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from a column - no params', async () => { + const schema1 = { + users: mssqlTable('users', { + id: int('id').identity(), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + id: int('id'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + `EXEC sp_rename 'users.id', [__old_id], 'COLUMN';`, + `ALTER TABLE [users] ADD [id] int;`, + `INSERT INTO [users] ([id]) SELECT [__old_id] FROM [users];`, + `ALTER TABLE [users] DROP COLUMN [__old_id];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column with identity - no params', async () => { + const schema1 = { + users: mssqlTable('users', { + email: text('email'), + }), + }; + + const schema2 = { + users: mssqlTable('users', { + email: text('email'), + id: int('id').identity(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + 'ALTER TABLE [users] ADD [id] int IDENTITY(1, 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view', async () => { + const table = mssqlTable('test', { + id: int('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: mssqlView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + 'CREATE VIEW [view] AS (select distinct [id] from [test]);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check constraint to table', async () => { + const schema1 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(2), + }), + }; + const schema2 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(2), + }, (table) => [ + check('some_check1', sql`${table.values} < 100`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + 'ALTER TABLE [test] ADD CONSTRAINT [some_check1] CHECK ([test].[values] < 100);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check constraint', async () => { + const schema1 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [ + check('some_check', sql`${table.values} < 100`), + ]), + }; + const schema2 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + 'ALTER TABLE [test] DROP CONSTRAINT [some_check];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter check constraint', async () => { + const schema1 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [ + check('some_check', sql`${table.values} < 100`), + ]), + }; + const schema2 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [ + check('some_check', sql`${table.values} < 10`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + // Only diff should find changes + expect(st).toStrictEqual([ + 'ALTER TABLE [test] DROP CONSTRAINT [some_check];', + 'ALTER TABLE [test] ADD CONSTRAINT [some_check] CHECK ([test].[values] < 10);', + ]); + expect(pst).toStrictEqual([]); +}); + +test('db has checks. Push with same names', async () => { + const schema1 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [check('some_check', sql`${table.values} < 100`)]), + }; + const schema2 = { + test: mssqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [check('some_check', sql`1=1`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + expect(st).toStrictEqual([ + `ALTER TABLE [test] DROP CONSTRAINT [some_check];`, + `ALTER TABLE [test] ADD CONSTRAINT [some_check] CHECK (1=1);`, + ]); + expect(pst).toStrictEqual([]); +}); + +test('drop view', async () => { + const table = mssqlTable('test', { + id: int('id').primaryKey(), + }); + const schema1 = { + test: table, + view: mssqlView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + const st0: string[] = [ + 'DROP VIEW [view];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter view definition', async () => { + const table = mssqlTable('test', { + id: int('id').primaryKey(), + }); + const schema1 = { + test: table, + view: mssqlView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: mssqlView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + expect(st).toStrictEqual([ + `ALTER VIEW [view] AS (select distinct [id] from [test] where [test].[id] = 1);`, + ]); + expect(pst).toStrictEqual([]); +}); + +test('drop view with data', async () => { + const table = mssqlTable('table', { + id: int('id').primaryKey(), + }); + const schema1 = { + test: table, + view: mssqlView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const seedStatements = [`INSERT INTO [table] ([id]) VALUES (1), (2), (3)`]; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, schemas: ['dbo'] }); + const { sqlStatements: pst, hints: phints } = await push({ + db, + to: schema2, + schemas: ['dbo'], + }); + + // seeding + for (const seedSt of seedStatements) { + await db.query(seedSt); + } + + const st0: string[] = [ + `DROP VIEW [view];`, + ]; + // const hints0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + // expect(phints).toStrictEqual(hints0); +}); + +test('fk multistep #1', async (t) => { + const refTable = mssqlTable('ref', { + id: int().identity(), + name: varchar().unique(), + }); + const sch1 = { + refTable, + users: mssqlTable('users', { + name: varchar().unique().references(() => refTable.name), + }), + }; + + const { sqlStatements: diffSt1 } = await diff({}, sch1, []); + const { sqlStatements: st1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const st01 = [ + 'CREATE TABLE [ref] (\n\t[id] int IDENTITY(1, 1),\n\t[name] varchar,\n\tCONSTRAINT [ref_name_key] UNIQUE([name])\n);\n', + 'CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + 'ALTER TABLE [users] ADD CONSTRAINT [users_name_ref_name_fk] FOREIGN KEY ([name]) REFERENCES [ref]([name]);', + ]; + + expect(st1).toStrictEqual(st01); + expect(diffSt1).toStrictEqual(st01); + + const sch2 = { + refTable, + users: mssqlTable('users2', { + name: varchar('name2').unique().references(() => refTable.name), + }), + }; + + const renames = ['dbo.users->dbo.users2', 'dbo.users2.name->dbo.users2.name2']; + const { sqlStatements: diffSt2 } = await diff(sch1, sch2, renames); + const { sqlStatements: st2 } = await push({ + db, + to: sch2, + renames, + schemas: ['dbo'], + }); + + const st02 = [ + `EXEC sp_rename 'users', [users2];`, + `EXEC sp_rename 'users2.name', [name2], 'COLUMN';`, + ]; + + expect(st2).toStrictEqual(st02); + expect(diffSt2).toStrictEqual(st02); + + const { sqlStatements: diffSt3 } = await diff(sch2, sch2, []); + const { sqlStatements: st3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(diffSt3).toStrictEqual([]); + + const sch3 = { + refTable, + users: mssqlTable('users2', { + name: varchar('name2').unique(), + }), + }; + + // TODO should we check diff here? + // const { sqlStatements: diffSt4 } = await diff(sch2, sch3, []); + const { sqlStatements: st4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const st04 = ['ALTER TABLE [users2] DROP CONSTRAINT [users_name_ref_name_fk];\n']; + + expect(st4).toStrictEqual(st04); + // expect(diffSt4).toStrictEqual(st04); +}); + +test('fk multistep #2', async (t) => { + const refTable = mssqlTable('ref', { + id: int().identity(), + name: varchar().unique(), + }); + const sch1 = { + refTable, + users: mssqlTable('users', { + name: varchar().unique().references(() => refTable.name), + }), + }; + + const { sqlStatements: diffSt1 } = await diff({}, sch1, []); + const { sqlStatements: st1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const st01 = [ + 'CREATE TABLE [ref] (\n\t[id] int IDENTITY(1, 1),\n\t[name] varchar,\n\tCONSTRAINT [ref_name_key] UNIQUE([name])\n);\n', + 'CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + 'ALTER TABLE [users] ADD CONSTRAINT [users_name_ref_name_fk] FOREIGN KEY ([name]) REFERENCES [ref]([name]);', + ]; + + expect(st1).toStrictEqual(st01); + expect(diffSt1).toStrictEqual(st01); + + const refTableRenamed = mssqlTable('ref2', { + id: int().identity(), + name: varchar('name2').unique(), + }); + const sch2 = { + refTable: refTableRenamed, + users: mssqlTable('users', { + name: varchar().unique().references(() => refTableRenamed.name), + }), + }; + + const renames = ['dbo.ref->dbo.ref2', 'dbo.ref2.name->dbo.ref2.name2']; + const { sqlStatements: diffSt2 } = await diff(sch1, sch2, renames); + const { sqlStatements: st2 } = await push({ + db, + to: sch2, + renames, + schemas: ['dbo'], + }); + + const st02 = [ + `EXEC sp_rename 'ref', [ref2];`, + `EXEC sp_rename 'ref2.name', [name2], 'COLUMN';`, + ]; + + expect(st2).toStrictEqual(st02); + expect(diffSt2).toStrictEqual(st02); + + const { sqlStatements: diffSt3 } = await diff(sch2, sch2, []); + const { sqlStatements: st3 } = await push({ db, to: sch2, schemas: ['dbo'] }); + + expect(st3).toStrictEqual([]); + expect(diffSt3).toStrictEqual([]); + + const sch3 = { + refTable: refTableRenamed, + users: mssqlTable('users', { + name: varchar('name').unique(), + }), + }; + + // TODO should we check diff here? + // const { sqlStatements: diffSt4 } = await diff(sch2, sch3, []); + const { sqlStatements: st4 } = await push({ db, to: sch3, schemas: ['dbo'] }); + + const st04 = ['ALTER TABLE [users] DROP CONSTRAINT [users_name_ref_name_fk];\n']; + + expect(st4).toStrictEqual(st04); + // expect(diffSt4).toStrictEqual(st04); +}); + +test('rename fk', async (t) => { + const refTable = mssqlTable('ref', { + id: int().identity(), + name: varchar().unique(), + }); + + const sch1 = { + refTable, + users: mssqlTable('users', { + name: varchar().unique(), + }, (t) => [foreignKey({ name: 'some', columns: [t.name], foreignColumns: [refTable.name] })]), + }; + + const { sqlStatements: diffSt1 } = await diff({}, sch1, []); + const { sqlStatements: st1 } = await push({ db, to: sch1, schemas: ['dbo'] }); + + const st01 = [ + 'CREATE TABLE [ref] (\n\t[id] int IDENTITY(1, 1),\n\t[name] varchar,\n\tCONSTRAINT [ref_name_key] UNIQUE([name])\n);\n', + 'CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n', + 'ALTER TABLE [users] ADD CONSTRAINT [some] FOREIGN KEY ([name]) REFERENCES [ref]([name]);', + ]; + + expect(st1).toStrictEqual(st01); + expect(diffSt1).toStrictEqual(st01); + + const sch2 = { + refTable, + users: mssqlTable('users', { + name: varchar().unique(), + }, (t) => [foreignKey({ name: 'some_new', columns: [t.name], foreignColumns: [refTable.name] })]), // renamed fk + }; + + const renames = ['dbo.users.some->dbo.users.some_new']; + const { sqlStatements: diffSt2 } = await diff(sch1, sch2, renames); + const { sqlStatements: st2 } = await push({ + db, + to: sch2, + renames, + schemas: ['dbo'], + }); + + const st02 = [ + `EXEC sp_rename 'some', [some_new], 'OBJECT';`, + ]; + + expect(st2).toStrictEqual(st02); + expect(diffSt2).toStrictEqual(st02); +}); + +test('hints + losses: drop table that is not empty', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, from, []); + const { sqlStatements: pst1 } = await push({ db, to: from }); + + const st_01 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[name] varchar(200)\n);\n', + ]; + + expect(st1).toStrictEqual(st_01); + expect(pst1).toStrictEqual(st_01); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const { sqlStatements: st2 } = await diff(n1, {}, []); + const { sqlStatements: pst2, hints, losses, error } = await push({ db, to: {} }); + + const st_02 = [ + 'DROP TABLE [users];', + ]; + + expect(st2).toStrictEqual(st_02); + expect(pst2).toStrictEqual(st_02); + expect(hints).toStrictEqual(["· You're about to delete non-empty [users] table"]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: drop column that is not empty', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, from, []); + const { sqlStatements: pst1 } = await push({ db, to: from }); + + const st_01 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[name] varchar(200)\n);\n', + ]; + + expect(st1).toStrictEqual(st_01); + expect(pst1).toStrictEqual(st_01); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + }), + }; + + const { sqlStatements: st2 } = await diff(from, to, []); + const { sqlStatements: pst2, hints, losses, error } = await push({ db, to: to }); + + const st_02 = [ + 'ALTER TABLE [users] DROP COLUMN [name];', + ]; + + expect(st2).toStrictEqual(st_02); + expect(pst2).toStrictEqual(st_02); + expect(hints).toStrictEqual(["· You're about to delete non-empty [name] column in [users] table"]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: drop column that is empty', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, from, []); + const { sqlStatements: pst1 } = await push({ db, to: from }); + + const st_01 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\t[name] varchar(200)\n);\n', + ]; + + expect(st1).toStrictEqual(st_01); + expect(pst1).toStrictEqual(st_01); + + const to = { + users: mssqlTable('users', { + id: int(), + }), + }; + + const { sqlStatements: st2 } = await diff(from, to, []); + const { sqlStatements: pst2, hints, losses, error } = await push({ db, to: to }); + + const st_02 = [ + 'ALTER TABLE [users] DROP COLUMN [name];', + ]; + + expect(st2).toStrictEqual(st_02); + expect(pst2).toStrictEqual(st_02); + expect(hints).toStrictEqual([]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: drop schema', async (t) => { + const users = mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }); + const from = { + mySchema: mssqlSchema('my_schema'), + users, + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, from, []); + const { sqlStatements: pst1 } = await push({ db, to: from }); + + const st_01 = [ + `CREATE SCHEMA [my_schema];\n`, + 'CREATE TABLE [users] (\n\t[id] int,\n\t[name] varchar(200)\n);\n', + ]; + + expect(st1).toStrictEqual(st_01); + expect(pst1).toStrictEqual(st_01); + + const to = { + users, + }; + + const { sqlStatements: st2 } = await diff(from, to, []); + const { sqlStatements: pst2, hints, losses, error } = await push({ db, to: to }); + + const st_02 = [ + 'DROP SCHEMA [my_schema];\n', + ]; + + expect(st2).toStrictEqual(st_02); + expect(pst2).toStrictEqual(st_02); + expect(hints).toStrictEqual([]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: drop schema with tables', async (t) => { + // user has a schema in db with table + await db.query(`CREATE SCHEMA test;`); + await db.query(`CREATE TABLE test.test(id int);`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + const { sqlStatements: pst1, hints, losses, error } = await push({ db, to: to }); + + const st_01 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\t[name] varchar(200)\n);\n`, + 'DROP TABLE [test].[test];', + `DROP SCHEMA [test];\n`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([`· You're about to delete [test] schema with 1 table`]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: add column', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + age: int(), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ db, to: to }); + + const st_01 = [ + `ALTER TABLE [users] ADD [age] int;`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: add column with not null without default', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + age: int().notNull(), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ + db, + to: to, + expectError: true, + ignoreSubsequent: true, + }); + + const st_01 = [ + `ALTER TABLE [users] ADD [age] int NOT NULL;`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([ + `· You're about to add not-null [age] column without default value to a non-empty [users] table`, + ]); + expect(error).not.toBeNull(); + expect(losses).toStrictEqual([`DELETE FROM [users];`]); + + // await expect(push({ db, to: to, force: true, ignoreSubsequent: true })).resolves.not.toThrowError(); +}); +test('hints + losses: add column with not null without default #2', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + age: int().notNull(), + }), + }; + // const { sqlStatements: pst1, hints, losses, error } = await push({ + // db, + // to: to, + // expectError: true, + // ignoreSubsequent: true, + // }); + + // const st_01 = [ + // `ALTER TABLE [users] ADD [age] int NOT NULL;`, + // ]; + + // expect(pst1).toStrictEqual(st_01); + // expect(hints).toStrictEqual([ + // `· You're about to add not-null [age] column without default value to a non-empty [users] table`, + // ]); + // expect(error).not.toBeNull(); + // expect(losses).toStrictEqual([`DELETE FROM [users];`]); + + await expect(push({ db, to: to, force: true, ignoreSubsequent: true })).resolves.not.toThrowError(); +}); + +test('hints + losses: add column with not null with default', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + age: int().notNull().default(1), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ db, to: to }); + + const st_01 = [ + `ALTER TABLE [users] ADD [age] int NOT NULL CONSTRAINT [users_age_default] DEFAULT ((1));`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([]); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: alter column add not null without default', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id]) VALUES (1), (2);`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).notNull(), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ + db, + to: to, + expectError: true, + ignoreSubsequent: true, + }); + + const st_01 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(200) NOT NULL;`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([ + `· You're about to add not-null to [name] column without default value to a non-empty [users] table`, + ]); + expect(error).not.toBeNull(); + expect(losses).toStrictEqual([`DELETE FROM [users];`]); + + // await expect(push({ db, to: to, force: true, ignoreSubsequent: true })).resolves.not.toThrowError(); +}); + +test('hints + losses: alter column add not null without default #2', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id]) VALUES (1), (2);`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).notNull(), + }), + }; + // const { sqlStatements: pst1, hints, losses, error } = await push({ + // db, + // to: to, + // expectError: true, + // ignoreSubsequent: true, + // }); + + // const st_01 = [ + // `ALTER TABLE [users] ALTER COLUMN [name] varchar(200) NOT NULL;`, + // ]; + + // expect(pst1).toStrictEqual(st_01); + // expect(hints).toStrictEqual([ + // `· You're about to add not-null to [name] column without default value to a non-empty [users] table`, + // ]); + // expect(error).not.toBeNull(); + // expect(losses).toStrictEqual([`DELETE FROM [users];`]); + + await expect(push({ db, to: to, force: true, ignoreSubsequent: true })).resolves.not.toThrowError(); +}); + +// TODO +// this should definitely fail +// MSSQL does not support altering column for adding default +// not possible +// !CONSTRAINT DEFAULT ...!; +// +// Even if to try change data type + add default + add not null +// MSSQL will not update existing NULLS to defaults, so this will not work +// Should add hints i believe for generate and push +test('hints + losses: alter column add not null with default', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id]) VALUES (1), (2);`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).notNull().default('1'), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ + db, + to: to, + expectError: true, + ignoreSubsequent: true, + }); + + const st_01 = [ + `ALTER TABLE [users] ALTER COLUMN [name] varchar(200) NOT NULL;`, + `ALTER TABLE [users] ADD CONSTRAINT [users_name_default] DEFAULT ('1') FOR [name];`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(hints).toStrictEqual([]); + expect(error).not.toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: add unique to column #1', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Andrew');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).unique(), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ db, to: to }); + + const st_01 = [ + `ALTER TABLE [users] ADD CONSTRAINT [users_name_key] UNIQUE([name]);`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(error).toBeNull(); + expect(losses).toStrictEqual([]); +}); + +test('hints + losses: add unique to column #2', async (t) => { + const from = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }), + }), + }; + + await push({ db, to: from }); + + await db.query(`INSERT INTO [users] ([id], [name]) VALUES (1, 'Alex'), (2, 'Alex');`); + + const to = { + users: mssqlTable('users', { + id: int(), + name: varchar({ length: 200 }).unique(), + }), + }; + const { sqlStatements: pst1, hints, losses, error } = await push({ + db, + to: to, + expectError: true, + ignoreSubsequent: true, + }); + + const st_01 = [ + `ALTER TABLE [users] ADD CONSTRAINT [users_name_key] UNIQUE([name]);`, + ]; + + expect(pst1).toStrictEqual(st_01); + expect(error).not.toBeNull(); + expect(losses).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/mssql/schemas.test.ts b/drizzle-kit/tests/mssql/schemas.test.ts new file mode 100644 index 0000000000..c267961a78 --- /dev/null +++ b/drizzle-kit/tests/mssql/schemas.test.ts @@ -0,0 +1,137 @@ +import { mssqlSchema } from 'drizzle-orm/mssql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add schema #1', async () => { + const to = { + devSchema: mssqlSchema('dev'), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = ['CREATE SCHEMA [dev];\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: mssqlSchema('dev'), + }; + const to = { + devSchema: mssqlSchema('dev'), + devSchema2: mssqlSchema('dev2'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = ['CREATE SCHEMA [dev2];\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: mssqlSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: {} }); + + const st0 = ['DROP SCHEMA [dev];\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: mssqlSchema('dev'), + devSchema2: mssqlSchema('dev2'), + }; + const to = { + devSchema: mssqlSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = ['DROP SCHEMA [dev2];\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(pst); +}); + +// TODO add log to console that it is not possible? +test('rename schema #1', async () => { + const from = { + devSchema: mssqlSchema('dev'), + }; + + const to = { + devSchema2: mssqlSchema('dev2'), + }; + + const { sqlStatements: st } = await diff(from, to, ['dev->dev2']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dev->dev2'], ignoreSubsequent: true }); + + const st0 = [`/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: mssqlSchema('dev'), + devSchema1: mssqlSchema('dev1'), + }; + const to = { + devSchema: mssqlSchema('dev'), + devSchema2: mssqlSchema('dev2'), + }; + + const { sqlStatements: st } = await diff(from, to, ['dev1->dev2']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dev1->dev2'], ignoreSubsequent: true }); + + const st0 = [`/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mssql/tables.test.ts b/drizzle-kit/tests/mssql/tables.test.ts new file mode 100644 index 0000000000..41a001f368 --- /dev/null +++ b/drizzle-kit/tests/mssql/tables.test.ts @@ -0,0 +1,953 @@ +import { sql } from 'drizzle-orm'; +import { + foreignKey, + index, + int, + mssqlSchema, + mssqlTable, + mssqlTableCreator, + primaryKey, + text, + unique, + uniqueIndex, + varchar, +} from 'drizzle-orm/mssql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add table #1', async () => { + const to = { + users: mssqlTable('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = ['CREATE TABLE [users] (\n\t[id] int\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #2', async () => { + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #3', async () => { + const to = { + users: mssqlTable('users', { + id: int('id'), + }, (t) => [primaryKey({ name: 'users_pk', columns: [t.id] })]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [users] (\n' + + '\t[id] int,\n' + + '\tCONSTRAINT [users_pk] PRIMARY KEY([id])\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #4', async () => { + const to = { + users: mssqlTable('users', { id: int() }), + posts: mssqlTable('posts', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [users] (\n\t[id] int\n);\n', + 'CREATE TABLE [posts] (\n\t[id] int\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #5', async () => { + const schema = mssqlSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [folder].[users] (\n\t[id] int\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #6', async () => { + const from = { + users1: mssqlTable('users1', { id: int() }), + }; + + const to = { + users2: mssqlTable('users2', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [users2] (\n\t[id] int\n);\n', + 'DROP TABLE [users1];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #7', async () => { + const from = { + users1: mssqlTable('users1', { id: int() }), + }; + + const to = { + users: mssqlTable('users', { id: int() }), + users2: mssqlTable('users2', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users1->dbo.users2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.users1->dbo.users2'] }); + + const st0 = [ + 'CREATE TABLE [users] (\n\t[id] int\n);\n', + `EXEC sp_rename 'users1', [users2];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline */ +test('add table #9', async () => { + const to = { + users: mssqlTable('users', { + name: varchar().unique(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'CREATE TABLE [users] (\n' + + '\t[name] varchar,\n' + + '\tCONSTRAINT [users_name_key] UNIQUE([name])\n' + + ');\n', + ]); + expect(pst).toStrictEqual([ + 'CREATE TABLE [users] (\n' + + '\t[name] varchar,\n' + + '\tCONSTRAINT [users_name_key] UNIQUE([name])\n' + + ');\n', + ]); +}); + +/* unique inline named */ +test('add table #10', async () => { + const from = {}; + const to = { + users: mssqlTable('users', { + name: varchar().unique('name_unique'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + `CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [name_unique] UNIQUE([name])\n);\n`, + ]); + expect(pst).toStrictEqual([ + `CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [name_unique] UNIQUE([name])\n);\n`, + ]); +}); + +/* unique default-named */ +test('add table #13', async () => { + const to = { + users: mssqlTable('users', { + name: varchar(), + }, (t) => [unique('users_name_key').on(t.name)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + `CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n`, + ]); + expect(pst).toStrictEqual([ + `CREATE TABLE [users] (\n\t[name] varchar,\n\tCONSTRAINT [users_name_key] UNIQUE([name])\n);\n`, + ]); +}); + +// reference +test('add table #14', async () => { + const company = mssqlTable('company', { + id: int().primaryKey(), + name: text(), + }); + + const to = { + company, + users: mssqlTable('users', { + company_id: int().references(() => company.id), + name: text(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, [], 'snake_case'); + const { sqlStatements: pst } = await push({ db, to: to, casing: 'snake_case' }); + + const st0 = [ + `CREATE TABLE [company] ( +\t[id] int, +\t[name] text, +\tCONSTRAINT [company_pkey] PRIMARY KEY([id]) +);\n`, + `CREATE TABLE [users] ( +\t[company_id] int, +\t[name] text +);\n`, + `ALTER TABLE [users] ADD CONSTRAINT [users_company_id_company_id_fk] FOREIGN KEY ([company_id]) REFERENCES [company]([id]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('multiproject schema add table #1', async () => { + const table = mssqlTableCreator((name) => `prefix_${name}`); + + const to = { + users: table('users', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'CREATE TABLE [prefix_users] (\n\t[id] int,\n\tCONSTRAINT [prefix_users_pkey] PRIMARY KEY([id])\n);\n', + ]); + expect(pst).toStrictEqual([ + 'CREATE TABLE [prefix_users] (\n\t[id] int,\n\tCONSTRAINT [prefix_users_pkey] PRIMARY KEY([id])\n);\n', + ]); +}); + +test('multiproject schema drop table #1', async () => { + const table = mssqlTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: {} }); + + expect(st).toStrictEqual(['DROP TABLE [prefix_users];']); + expect(pst).toStrictEqual(['DROP TABLE [prefix_users];']); +}); + +test('multiproject schema alter table name #1', async () => { + const table = mssqlTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: int('id').primaryKey(), + }), + }; + const to = { + users1: table('users1', { + id: int('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.prefix_users->dbo.prefix_users1', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.prefix_users->dbo.prefix_users1'] }); + + const st0 = [ + "EXEC sp_rename 'prefix_users', [prefix_users1];", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add schema + table #1', async () => { + const schema = mssqlSchema('folder'); + + const to = { + schema, + users: schema.table('users', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'CREATE SCHEMA [folder];\n', + 'CREATE TABLE [folder].[users] (\n\t[id] int\n);\n', + ]); + expect(pst).toStrictEqual([ + 'CREATE SCHEMA [folder];\n', + 'CREATE TABLE [folder].[users] (\n\t[id] int\n);\n', + ]); +}); + +test('change schema with tables #1', async () => { + const schema = mssqlSchema('folder'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema, + users: schema.table('users', { id: int() }), + }; + const to = { + schema2, + users: schema2.table('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, ['folder->folder2']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder->folder2'], ignoreSubsequent: true }); + + expect(st).toStrictEqual([`/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`]); + expect(pst).toStrictEqual([`/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`]); +}); + +test('change table schema #1', async () => { + const schema = mssqlSchema('folder'); + const from = { + schema, + users: mssqlTable('users', { id: int() }), + }; + const to = { + schema, + users: schema.table('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'dbo.users->folder.users', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.users->folder.users'] }); + + expect(st).toStrictEqual([`ALTER SCHEMA [folder] TRANSFER [dbo].[users];\n`]); + expect(pst).toStrictEqual([`ALTER SCHEMA [folder] TRANSFER [dbo].[users];\n`]); +}); + +test('change table schema #2', async () => { + const schema = mssqlSchema('folder'); + const from = { + schema, + users: schema.table('users', { id: int() }), + }; + const to = { + schema, + users: mssqlTable('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder.users->dbo.users', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder.users->dbo.users'] }); + + expect(st).toStrictEqual(['ALTER SCHEMA [dbo] TRANSFER [folder].[users];\n']); + expect(pst).toStrictEqual(['ALTER SCHEMA [dbo] TRANSFER [folder].[users];\n']); +}); + +test('change table schema #3', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.users->folder2.users', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder1.users->folder2.users'] }); + + expect(st).toStrictEqual(['ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n']); + expect(pst).toStrictEqual(['ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n']); +}); + +test('change table schema #4', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', { id: int() }), // move table + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.users->folder2.users', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder1.users->folder2.users'] }); + + expect(st).toStrictEqual([ + 'CREATE SCHEMA [folder2];\n', + 'ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n', + ]); + expect(pst).toStrictEqual([ + 'CREATE SCHEMA [folder2];\n', + 'ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n', + ]); +}); + +test('change table schema #5', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', { id: int() }), + }; + const to = { + schema2, // add schema + users: schema2.table('users', { id: int() }), // move table + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.users->folder2.users', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder1.users->folder2.users'] }); + + expect(st).toStrictEqual([ + 'CREATE SCHEMA [folder2];\n', + 'ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n', + 'DROP SCHEMA [folder1];\n', + ]); + expect(pst).toStrictEqual([ + 'CREATE SCHEMA [folder2];\n', + 'ALTER SCHEMA [folder2] TRANSFER [folder1].[users];\n', + 'DROP SCHEMA [folder1];\n', + ]); +}); + +test('change table schema #6', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', { id: int() }), // rename and move table + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.users->folder2.users2', + ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['folder1.users->folder2.users2'] }); + + expect(st).toStrictEqual([ + `EXEC sp_rename 'folder1.users', [users2];`, + `ALTER SCHEMA [folder2] TRANSFER [folder1].[users2];\n`, + ]); + expect(pst).toStrictEqual([ + `EXEC sp_rename 'folder1.users', [users2];`, + `ALTER SCHEMA [folder2] TRANSFER [folder1].[users2];\n`, + ]); +}); + +test('change table schema #7', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', { id: int() }), // rename table + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]); + await push({ db, to: from }); + + expect(st).toStrictEqual([ + `/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`, + `EXEC sp_rename 'folder2.users', [users2];`, + ]); + await expect(push({ + db, + to: to, + renames: ['folder1->folder2', 'folder2.users->folder2.users2'], + })).rejects.toThrowError(); // no folder2.users to rename +}); + +test('drop table + rename schema #1', async () => { + const schema1 = mssqlSchema('folder1'); + const schema2 = mssqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema2, // rename schema + // drop table + }; + + const { sqlStatements: st } = await diff(from, to, ['folder1->folder2']); + await push({ db, to: from }); + + expect(st).toStrictEqual([ + `/** + * ⚠️ Renaming schemas is not supported in SQL Server (MSSQL), + * and therefore is not supported in Drizzle ORM at this time + * + * SQL Server does not provide a built-in command to rename a schema directly. + * Workarounds involve creating a new schema and migrating objects manually + */`, + `DROP TABLE [folder2].[users];`, + ]); + await expect(push({ + db, + to: to, + renames: ['folder1->folder2', 'folder2.users->folder2.users2'], + })).rejects.toThrowError(); // no folder2.users to drop +}); + +test('drop tables with fk constraint', async () => { + const table1 = mssqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mssqlTable('table2', { + column1: int().primaryKey(), + column2: int().references(() => table1.column1), + }); + const schema1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE [table1] (\n\t[column1] int,\n\tCONSTRAINT [table1_pkey] PRIMARY KEY([column1])\n);\n', + 'CREATE TABLE [table2] (\n\t[column1] int,\n\t[column2] int,\n\tCONSTRAINT [table2_pkey] PRIMARY KEY([column1])\n);\n', + 'ALTER TABLE [table2] ADD CONSTRAINT [table2_column2_table1_column1_fk] FOREIGN KEY ([column2]) REFERENCES [table1]([column1]);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, {}, []); + const { sqlStatements: pst2 } = await push({ db, to: {} }); + + const expectedSt2 = [ + 'ALTER TABLE [table2] DROP CONSTRAINT [table2_column2_table1_column1_fk];\n', + 'DROP TABLE [table1];', + 'DROP TABLE [table2];', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('composite primary key', async () => { + const from = {}; + const to = { + table: mssqlTable('works_to_creators', { + workId: int('work_id').notNull(), + creatorId: int('creator_id').notNull(), + classification: varchar('classification').notNull(), + }, (t) => [ + primaryKey({ columns: [t.workId, t.creatorId, t.classification] }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + 'CREATE TABLE [works_to_creators] (\n\t[work_id] int,\n\t[creator_id] int,\n\t[classification] varchar,\n\tCONSTRAINT [works_to_creators_pkey] PRIMARY KEY([work_id],[creator_id],[classification])\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column before creating unique constraint', async () => { + const from = { + table: mssqlTable('table', { + id: int('id').primaryKey(), + }), + }; + const to = { + table: mssqlTable('table', { + id: int('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + }, (t) => [unique('uq').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'ALTER TABLE [table] ADD [name] varchar(255) NOT NULL;', + 'ALTER TABLE [table] ADD CONSTRAINT [uq] UNIQUE([name]);', + ]); + + expect(pst).toStrictEqual([ + 'ALTER TABLE [table] ADD [name] varchar(255) NOT NULL;', + 'ALTER TABLE [table] ADD CONSTRAINT [uq] UNIQUE([name]);', + ]); +}); + +test('alter composite primary key', async () => { + const from = { + table: mssqlTable('table', { + col1: int('col1').notNull(), + col2: int('col2').notNull(), + col3: varchar('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col1, t.col2], + }), + ]), + }; + const to = { + table: mssqlTable('table', { + col1: int('col1').notNull(), + col2: int('col2').notNull(), + col3: varchar('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col2, t.col3], + }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'ALTER TABLE [table] DROP CONSTRAINT [table_pk];', + 'ALTER TABLE [table] ADD CONSTRAINT [table_pk] PRIMARY KEY ([col2],[col3]);', + ]); + expect(pst).toStrictEqual([ + 'ALTER TABLE [table] DROP CONSTRAINT [table_pk];', + 'ALTER TABLE [table] ADD CONSTRAINT [table_pk] PRIMARY KEY ([col2],[col3]);', + ]); +}); + +test('add index', async () => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + }, (t) => [index('some_index_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'CREATE INDEX [some_index_name] ON [users] ([name]);', + ]); + expect(pst).toStrictEqual([ + 'CREATE INDEX [some_index_name] ON [users] ([name]);', + ]); +}); + +test('add unique index', async () => { + const from = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + }), + }; + const to = { + users: mssqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + }, (t) => [uniqueIndex('some_index_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([ + 'CREATE UNIQUE INDEX [some_index_name] ON [users] ([name]);', + ]); + expect(pst).toStrictEqual([ + 'CREATE UNIQUE INDEX [some_index_name] ON [users] ([name]);', + ]); +}); + +test('optional db aliases (snake case)', async () => { + const from = {}; + + const t1 = mssqlTable( + 't1', + { + t1Id1: int().notNull().primaryKey(), + t1Col2: int().notNull(), + t1Col3: int().notNull(), + t2Ref: int().notNull().references(() => t2.t2Id), + t1Uni: int().notNull(), + t1UniIdx: int().notNull(), + t1Idx: int().notNull(), + }, + (table) => [ + unique('t1_uni').on(table.t1Uni), + uniqueIndex('t1_uni_idx').on(table.t1UniIdx), + index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`), + foreignKey({ + columns: [table.t1Col2, table.t1Col3], + foreignColumns: [t3.t3Id1, t3.t3Id2], + }), + ], + ); + + const t2 = mssqlTable( + 't2', + { + t2Id: int().primaryKey(), + }, + ); + + const t3 = mssqlTable( + 't3', + { + t3Id1: int(), + t3Id2: int(), + }, + (table) => [primaryKey({ columns: [table.t3Id1, table.t3Id2] })], + ); + + const to = { + t1, + t2, + t3, + }; + + const { sqlStatements: st } = await diff(from, to, [], 'snake_case'); + await push({ db, to: from, casing: 'snake_case' }); + const { sqlStatements: pst } = await push({ db, to: to, casing: 'snake_case' }); + + const st1 = `CREATE TABLE [t1] ( + [t1_id1] int, + [t1_col2] int NOT NULL, + [t1_col3] int NOT NULL, + [t2_ref] int NOT NULL, + [t1_uni] int NOT NULL, + [t1_uni_idx] int NOT NULL, + [t1_idx] int NOT NULL, + CONSTRAINT [t1_pkey] PRIMARY KEY([t1_id1]), + CONSTRAINT [t1_uni] UNIQUE([t1_uni]) +); +`; + + const st2 = `CREATE TABLE [t2] ( + [t2_id] int, + CONSTRAINT [t2_pkey] PRIMARY KEY([t2_id]) +); +`; + + const st3 = `CREATE TABLE [t3] ( + [t3_id1] int, + [t3_id2] int, + CONSTRAINT [t3_pkey] PRIMARY KEY([t3_id1],[t3_id2]) +); +`; + + const st4 = + `ALTER TABLE [t1] ADD CONSTRAINT [t1_t2_ref_t2_t2_id_fk] FOREIGN KEY ([t2_ref]) REFERENCES [t2]([t2_id]);`; + const st5 = + `ALTER TABLE [t1] ADD CONSTRAINT [t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk] FOREIGN KEY ([t1_col2],[t1_col3]) REFERENCES [t3]([t3_id1],[t3_id2]);`; + + const st6 = `CREATE UNIQUE INDEX [t1_uni_idx] ON [t1] ([t1_uni_idx]);`; + + const st7 = `CREATE INDEX [t1_idx] ON [t1] ([t1_idx]) WHERE [t1].[t1_idx] > 0;`; + + expect(st).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); + expect(pst).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); +}); + +test('optional db aliases (camel case)', async () => { + const from = {}; + + const t1 = mssqlTable('t1', { + t1_id1: int().notNull().primaryKey(), + t1_col2: int().notNull(), + t1_col3: int().notNull(), + t2_ref: int().notNull().references(() => t2.t2_id), + t1_uni: int().notNull(), + t1_uni_idx: int().notNull(), + t1_idx: int().notNull(), + }, (table) => [ + unique('t1Uni').on(table.t1_uni), + uniqueIndex('t1UniIdx').on(table.t1_uni_idx), + index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`), + foreignKey({ + columns: [table.t1_col2, table.t1_col3], + foreignColumns: [t3.t3_id1, t3.t3_id2], + }), + ]); + + const t2 = mssqlTable('t2', { + t2_id: int().primaryKey(), + }); + + const t3 = mssqlTable('t3', { + t3_id1: int(), + t3_id2: int(), + }, (table) => [primaryKey({ columns: [table.t3_id1, table.t3_id2] })]); + + const to = { + t1, + t2, + t3, + }; + + const { sqlStatements: st } = await diff(from, to, [], 'camelCase'); + await push({ db, to: from, casing: 'camelCase' }); + const { sqlStatements: pst } = await push({ db, to: to, casing: 'camelCase' }); + + const st1 = `CREATE TABLE [t1] ( + [t1Id1] int, + [t1Col2] int NOT NULL, + [t1Col3] int NOT NULL, + [t2Ref] int NOT NULL, + [t1Uni] int NOT NULL, + [t1UniIdx] int NOT NULL, + [t1Idx] int NOT NULL, + CONSTRAINT [t1_pkey] PRIMARY KEY([t1Id1]), + CONSTRAINT [t1Uni] UNIQUE([t1Uni]) +); +`; + + const st2 = `CREATE TABLE [t2] ( + [t2Id] int, + CONSTRAINT [t2_pkey] PRIMARY KEY([t2Id]) +); +`; + + const st3 = `CREATE TABLE [t3] ( + [t3Id1] int, + [t3Id2] int, + CONSTRAINT [t3_pkey] PRIMARY KEY([t3Id1],[t3Id2]) +); +`; + + const st4 = `ALTER TABLE [t1] ADD CONSTRAINT [t1_t2Ref_t2_t2Id_fk] FOREIGN KEY ([t2Ref]) REFERENCES [t2]([t2Id]);`; + const st5 = + `ALTER TABLE [t1] ADD CONSTRAINT [t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk] FOREIGN KEY ([t1Col2],[t1Col3]) REFERENCES [t3]([t3Id1],[t3Id2]);`; + + const st6 = `CREATE UNIQUE INDEX [t1UniIdx] ON [t1] ([t1UniIdx]);`; + + const st7 = `CREATE INDEX [t1Idx] ON [t1] ([t1Idx]) WHERE [t1].[t1Idx] > 0;`; + + expect(st).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); + expect(pst).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); +}); diff --git a/drizzle-kit/tests/mssql/views.test.ts b/drizzle-kit/tests/mssql/views.test.ts new file mode 100644 index 0000000000..9ec215776e --- /dev/null +++ b/drizzle-kit/tests/mssql/views.test.ts @@ -0,0 +1,892 @@ +import { sql } from 'drizzle-orm'; +import { int, mssqlSchema, mssqlTable, mssqlView } from 'drizzle-orm/mssql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table and view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: mssqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [some_view] AS (select [id] from [users]);`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #2', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [some_view] AS (SELECT * FROM [users]);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #3', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: mssqlView('some_view1', { id: int('id') }).with({ + checkOption: true, + encryption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); // because of encryption + + const st0 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [some_view1]\nWITH ENCRYPTION, SCHEMABINDING, VIEW_METADATA AS (SELECT [users].[id] FROM [dbo].[users])\nWITH CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #3_1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: mssqlView('some_view1', { id: int('id') }).with({ + checkOption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); // because of encryption + + const st0 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [some_view1]\nWITH SCHEMABINDING, VIEW_METADATA AS (SELECT [users].[id] FROM [dbo].[users])\nWITH CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #4', async () => { + const schema = mssqlSchema('new_schema'); + + const users = schema.table('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: int('id') }).with({ + checkOption: true, + encryption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); // because of encryption + + const st0 = [ + `CREATE SCHEMA [new_schema];\n`, + `CREATE TABLE [new_schema].[users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [new_schema].[some_view1]\nWITH ENCRYPTION, SCHEMABINDING, VIEW_METADATA AS (SELECT [new_schema].[users].[id] FROM [new_schema].[users])\nWITH CHECK OPTION;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #4_1', async () => { + const schema = mssqlSchema('new_schema'); + + const users = schema.table('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: int('id') }).with({ + checkOption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); // because of encryption + + const st0 = [ + `CREATE SCHEMA [new_schema];\n`, + `CREATE TABLE [new_schema].[users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [new_schema].[some_view1]\nWITH SCHEMABINDING, VIEW_METADATA AS (SELECT [new_schema].[users].[id] FROM [new_schema].[users])\nWITH CHECK OPTION;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #4', async () => { + const schema = mssqlSchema('new_schema'); + + const users = schema.table('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: int('id') }).with({ + checkOption: true, + encryption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); + + const st0 = [ + `CREATE SCHEMA [new_schema];\n`, + `CREATE TABLE [new_schema].[users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [new_schema].[some_view1]\nWITH ENCRYPTION, SCHEMABINDING, VIEW_METADATA AS (SELECT [new_schema].[users].[id] FROM [new_schema].[users])\nWITH CHECK OPTION;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #4_1', async () => { + const schema = mssqlSchema('new_schema'); + + const users = schema.table('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: int('id') }).with({ + checkOption: true, + schemaBinding: true, + viewMetadata: true, + }).as(sql`SELECT ${users.id} FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to, ignoreSubsequent: true }); + + const st0 = [ + `CREATE SCHEMA [new_schema];\n`, + `CREATE TABLE [new_schema].[users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [new_schema].[some_view1]\nWITH SCHEMABINDING, VIEW_METADATA AS (SELECT [new_schema].[users].[id] FROM [new_schema].[users])\nWITH CHECK OPTION;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #5', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM ${users}`), + view2: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM ${users}`), + }; + + // view_name_duplicate + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to: to })).rejects.toThrow(); +}); + +test('create table and view #6', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: mssqlView('some_view', { id: int('id') }).with({ checkOption: true }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [ + `CREATE TABLE [users] (\n\t[id] int,\n\tCONSTRAINT [users_pkey] PRIMARY KEY([id])\n);\n`, + `CREATE VIEW [some_view] AS (SELECT * FROM [users])\nWITH CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view with existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: mssqlView('some_view', { id: int('id') }).with({ checkOption: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('drop view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`DROP VIEW [some_view];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view with existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('rename view #1', async () => { + const users = mssqlTable('users', { id: int() }); + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const to = { + users, + view: mssqlView('new_some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->dbo.new_some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->dbo.new_some_view'] }); + + const st0 = [`EXEC sp_rename 'some_view', [new_some_view];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view with existing flag', async () => { + const from = { + view: mssqlView('some_view', { id: int('id') }).existing(), + }; + + const to = { + view: mssqlView('new_some_view', { id: int('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->dbo.new_some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->dbo.new_some_view'] }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('view alter schema', async () => { + const schema = mssqlSchema('new_schema'); + const users = mssqlTable('users', { id: int() }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const to = { + users, + schema, + view: schema.view('some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->new_schema.some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->new_schema.some_view'] }); + + const st0 = [`CREATE SCHEMA [new_schema];\n`, `ALTER SCHEMA [new_schema] TRANSFER [some_view];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view alter schema with existing flag', async () => { + const schema = mssqlSchema('new_schema'); + + const from = { + view: mssqlView('some_view', { id: int('id') }).existing(), + }; + + const to = { + schema, + view: schema.view('some_view', { id: int('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->new_schema.some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->new_schema.some_view'] }); + + const st0 = [`CREATE SCHEMA [new_schema];\n`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').with({ encryption: true }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view]\nWITH ENCRYPTION AS (select [id] from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to view with existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', {}).existing(), + }; + + const to = { + users, + view: mssqlView('some_view', {}).with({ schemaBinding: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('drop with option from view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with({ checkOption: true, schemaBinding: true }).as(( + qb, + ) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select [id] from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter definition', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with().as(( + qb, + ) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').as((qb) => qb.select().from(users).where(sql`1=1`)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select [id] from [users] where 1=1);`]; + expect(st).toStrictEqual(st0); + // no changes on definition alter for push + expect(pst).toStrictEqual([]); +}); + +test('alter options multistep', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with({ checkOption: true, schemaBinding: true, viewMetadata: true }) + .as(( + qb, + ) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st, next: n1 } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select [id] from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const to2 = { + users, + view: mssqlView('some_view').with({ checkOption: true, schemaBinding: true, viewMetadata: true }) + .as(( + qb, + ) => qb.select().from(users)), + }; + const { sqlStatements: st_2, next: n2 } = await diff(n1, to2, []); + const { sqlStatements: pst_2 } = await push({ db, to: to2 }); + + const st2 = [ + `ALTER VIEW [some_view]\nWITH SCHEMABINDING, VIEW_METADATA AS (select [id] from [dbo].[users])\nWITH CHECK OPTION;`, + ]; + expect(st_2).toStrictEqual(st2); + expect(pst_2).toStrictEqual(st2); + + // Alter definition + const to3 = { + users, + view: mssqlView('some_view').with({ checkOption: true, schemaBinding: true, viewMetadata: true }) + .as(( + qb, + ) => qb.select().from(users).where(sql`1=1`)), + }; + const { sqlStatements: st_3 } = await diff(n2, to3, []); + const { sqlStatements: pst_3 } = await push({ db, to: to3 }); + + const st3 = [ + `ALTER VIEW [some_view]\nWITH SCHEMABINDING, VIEW_METADATA AS (select [id] from [dbo].[users] where 1=1)\nWITH CHECK OPTION;`, + ]; + expect(st_3).toStrictEqual(st3); + expect(pst_3).toStrictEqual([]); +}); + +test('alter view_metadata', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with({ viewMetadata: true }) + .as(( + qb, + ) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st, next: n1 } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select [id] from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(n1.views.list()).toStrictEqual([ + { + checkOption: false, + definition: 'select [id] from [users]', + encryption: false, + entityType: 'views', + name: 'some_view', + schema: 'dbo', + schemaBinding: false, + viewMetadata: false, + }, + ]); + + const to2 = { + users, + view: mssqlView('some_view').with({ viewMetadata: true }) + .as(( + qb, + ) => qb.select().from(users)), + }; + const { sqlStatements: st_2, next: n2 } = await diff(n1, to2, []); + const { sqlStatements: pst_2 } = await push({ db, to: to2 }); + + const st2 = [ + `ALTER VIEW [some_view]\nWITH VIEW_METADATA AS (select [id] from [users]);`, + ]; + expect(st_2).toStrictEqual(st2); + expect(pst_2).toStrictEqual(st2); + expect(n2.views.list()).toStrictEqual([{ + checkOption: false, + definition: 'select [id] from [users]', + encryption: false, + entityType: 'views', + name: 'some_view', + schema: 'dbo', + schemaBinding: false, + viewMetadata: true, + }]); +}); + +test('drop with option from view with existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', {}).with({ encryption: true }) + .existing(), + }; + + const to = { + users, + view: mssqlView('some_view', {}).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('alter with option in view #1', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with({ checkOption: true, viewMetadata: true }).as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').with({ checkOption: true }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select [id] from [users])\nWITH CHECK OPTION;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in view with existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', {}).with({ checkOption: true, schemaBinding: true }).existing(), + }; + + const to = { + users, + view: mssqlView('some_view', {}).with({ checkOption: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('alter with option in view #2', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view').with({ checkOption: true }).as((qb) => qb.selectDistinct().from(users)), + }; + + const to = { + users, + view: mssqlView('some_view').with({ checkOption: false }).as(( + qb, + ) => qb.selectDistinct().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`ALTER VIEW [some_view] AS (select distinct [id] from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter view ".as" value', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).with().as(sql`SELECT * from ${users}`), + }; + + const to = { + users, + view: mssqlView('some_view', { id: int('id') }).with().as(sql`SELECT [id] from ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = ['ALTER VIEW [some_view] AS (SELECT [id] from [users]);']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // do not trigger on push +}); + +test('existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).with().existing(), + }; + + const to = { + users, + view: mssqlView('some_view', { id: int('id') }).with().existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('drop existing flag', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).existing(), + }; + + const to = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * from [users]`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`CREATE VIEW [some_view] AS (SELECT * from [users]);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('set existing', async () => { + const users = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * from [users]`), + }; + + const to = { + users, + view: mssqlView('some_view', { id: int('id') }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to }); + + const st0 = [`DROP VIEW [some_view];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view and alter view', async () => { + const users = mssqlTable('users', { + id: int(), + }); + + const from = { + users, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const to = { + users, + view: mssqlView('new_some_view', { id: int('id') }).with({ checkOption: true }).as( + sql`SELECT * FROM [users]`, + ), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->dbo.new_some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->dbo.new_some_view'] }); + + const st0 = [ + `EXEC sp_rename 'some_view', [new_some_view];`, + `ALTER VIEW [new_some_view] AS (SELECT * FROM [users])\nWITH CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('moved schema and alter view', async () => { + const schema = mssqlSchema('my_schema'); + const users = mssqlTable('users', { + id: int(), + }); + + const from = { + users, + schema, + view: mssqlView('some_view', { id: int('id') }).as(sql`SELECT * FROM [users]`), + }; + + const to = { + users, + schema, + view: schema.view('some_view', { id: int('id') }).with({ checkOption: true }).as( + sql`SELECT * FROM [users]`, + ), + }; + + const { sqlStatements: st } = await diff(from, to, ['dbo.some_view->my_schema.some_view']); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['dbo.some_view->my_schema.some_view'] }); + + const st0 = [ + `ALTER SCHEMA [my_schema] TRANSFER [some_view];`, + `ALTER VIEW [my_schema].[some_view] AS (SELECT * FROM [users])\nWITH CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mysql-checks.test.ts b/drizzle-kit/tests/mysql-checks.test.ts deleted file mode 100644 index 82e7a51047..0000000000 --- a/drizzle-kit/tests/mysql-checks.test.ts +++ /dev/null @@ -1,291 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { check, int, mysqlTable, serial, varchar } from 'drizzle-orm/mysql-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasMysql } from './schemaDiffer'; - -test('create table with check', async (t) => { - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'serial', - notNull: true, - primaryKey: false, - autoincrement: true, - }, - { - name: 'age', - type: 'int', - notNull: false, - primaryKey: false, - autoincrement: false, - }, - ], - compositePKs: [ - 'users_id;id', - ], - checkConstraints: ['some_check_name;\`users\`.\`age\` > 21'], - compositePkName: 'users_id', - uniqueConstraints: [], - schema: undefined, - internals: { - tables: {}, - indexes: {}, - }, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( -\t\`id\` serial AUTO_INCREMENT NOT NULL, -\t\`age\` int, -\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`), -\tCONSTRAINT \`some_check_name\` CHECK(\`users\`.\`age\` > 21) -);\n`); -}); - -test('add check contraint to existing table', async (t) => { - const from = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }), - }; - - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_check_constraint', - tableName: 'users', - data: 'some_check_name;\`users\`.\`age\` > 21', - schema: '', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name\` CHECK (\`users\`.\`age\` > 21);`, - ); -}); - -test('drop check contraint in existing table', async (t) => { - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }), - }; - - const from = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'delete_check_constraint', - tableName: 'users', - schema: '', - constraintName: 'some_check_name', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, - ); -}); - -test('rename check constraint', async (t) => { - const from = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('new_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - data: 'new_check_name;\`users\`.\`age\` > 21', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 21);`, - ); -}); - -test('alter check constraint', async (t) => { - const from = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('new_check_name', sql`${table.age} > 10`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - data: 'new_check_name;\`users\`.\`age\` > 10', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 10);`, - ); -}); - -test('alter multiple check constraints', async (t) => { - const from = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - name: varchar('name', { length: 255 }), - }, (table) => ({ - checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), - })), - }; - - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - name: varchar('name', { length: 255 }), - }, (table) => ({ - checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); - expect(statements.length).toBe(4); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name_1', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - constraintName: 'some_check_name_2', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[2]).toStrictEqual({ - data: 'some_check_name_3;\`users\`.\`age\` > 21', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - expect(statements[3]).toStrictEqual({ - data: "some_check_name_4;\`users\`.\`name\` != 'Alex'", - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_1\`;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_2\`;`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_3\` CHECK (\`users\`.\`age\` > 21);`, - ); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_4\` CHECK (\`users\`.\`name\` != \'Alex\');`, - ); -}); - -test('create checks with same names', async (t) => { - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - age: int('age'), - name: varchar('name', { length: 255 }), - }, (table) => ({ - checkConstraint1: check('some_check_name', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), - })), - }; - - await expect(diffTestSchemasMysql({}, to, [])).rejects.toThrowError(); -}); diff --git a/drizzle-kit/tests/mysql-schemas.test.ts b/drizzle-kit/tests/mysql-schemas.test.ts deleted file mode 100644 index 6776700e3e..0000000000 --- a/drizzle-kit/tests/mysql-schemas.test.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { mysqlSchema, mysqlTable } from 'drizzle-orm/mysql-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasMysql } from './schemaDiffer'; - -// We don't manage databases(schemas) in MySQL with Drizzle Kit -test('add schema #1', async () => { - const to = { - devSchema: mysqlSchema('dev'), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(0); -}); - -test('add schema #2', async () => { - const from = { - devSchema: mysqlSchema('dev'), - }; - const to = { - devSchema: mysqlSchema('dev'), - devSchema2: mysqlSchema('dev2'), - }; - - const { statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); -}); - -test('delete schema #1', async () => { - const from = { - devSchema: mysqlSchema('dev'), - }; - - const { statements } = await diffTestSchemasMysql(from, {}, []); - - expect(statements.length).toBe(0); -}); - -test('delete schema #2', async () => { - const from = { - devSchema: mysqlSchema('dev'), - devSchema2: mysqlSchema('dev2'), - }; - const to = { - devSchema: mysqlSchema('dev'), - }; - - const { statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); -}); - -test('rename schema #1', async () => { - const from = { - devSchema: mysqlSchema('dev'), - }; - const to = { - devSchema2: mysqlSchema('dev2'), - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev->dev2']); - - expect(statements.length).toBe(0); -}); - -test('rename schema #2', async () => { - const from = { - devSchema: mysqlSchema('dev'), - devSchema1: mysqlSchema('dev1'), - }; - const to = { - devSchema: mysqlSchema('dev'), - devSchema2: mysqlSchema('dev2'), - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(0); -}); - -test('add table to schema #1', async () => { - const dev = mysqlSchema('dev'); - const from = {}; - const to = { - dev, - users: dev.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(0); -}); - -test('add table to schema #2', async () => { - const dev = mysqlSchema('dev'); - const from = { dev }; - const to = { - dev, - users: dev.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(0); -}); - -test('add table to schema #3', async () => { - const dev = mysqlSchema('dev'); - const from = { dev }; - const to = { - dev, - usersInDev: dev.table('users', {}), - users: mysqlTable('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - uniqueConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePkName: '', - compositePKs: [], - checkConstraints: [], - }); -}); - -test('remove table from schema #1', async () => { - const dev = mysqlSchema('dev'); - const from = { dev, users: dev.table('users', {}) }; - const to = { - dev, - }; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(0); -}); - -test('remove table from schema #2', async () => { - const dev = mysqlSchema('dev'); - const from = { dev, users: dev.table('users', {}) }; - const to = {}; - - const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(0); -}); diff --git a/drizzle-kit/tests/mysql-views.test.ts b/drizzle-kit/tests/mysql-views.test.ts deleted file mode 100644 index 39cd6c09e1..0000000000 --- a/drizzle-kit/tests/mysql-views.test.ts +++ /dev/null @@ -1,553 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasMysql } from './schemaDiffer'; - -test('create view #1', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - }; - const to = { - users: users, - view: mysqlView('some_view').as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'mysql_create_view', - name: 'some_view', - algorithm: 'undefined', - replace: false, - definition: 'select `id` from `users`', - withCheckOption: undefined, - sqlSecurity: 'definer', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = undefined -SQL SECURITY definer -VIEW \`some_view\` AS (select \`id\` from \`users\`);`); -}); - -test('create view #2', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'mysql_create_view', - name: 'some_view', - algorithm: 'merge', - replace: false, - definition: 'SELECT * FROM \`users\`', - withCheckOption: 'cascaded', - sqlSecurity: 'definer', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = merge -SQL SECURITY definer -VIEW \`some_view\` AS (SELECT * FROM \`users\`) -WITH cascaded CHECK OPTION;`); -}); - -test('create view with existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - }; - const to = { - users: users, - view: mysqlView('some_view', {}).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop view', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_view', - name: 'some_view', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP VIEW \`some_view\`;`); -}); - -test('drop view with existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_view', - nameFrom: 'some_view', - nameTo: 'new_some_view', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); -}); - -test('rename view and alter meta options', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_view', - nameFrom: 'some_view', - nameTo: 'new_some_view', - }); - expect(statements[1]).toStrictEqual({ - algorithm: 'undefined', - columns: {}, - definition: 'SELECT * FROM `users`', - isExisting: false, - name: 'new_some_view', - sqlSecurity: 'definer', - type: 'alter_mysql_view', - withCheckOption: 'cascaded', - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); - expect(sqlStatements[1]).toBe(`ALTER ALGORITHM = undefined -SQL SECURITY definer -VIEW \`new_some_view\` AS SELECT * FROM \`users\` -WITH cascaded CHECK OPTION;`); -}); - -test('rename view with existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('add meta to view', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - algorithm: 'merge', - columns: {}, - definition: 'SELECT * FROM `users`', - isExisting: false, - name: 'some_view', - sqlSecurity: 'definer', - type: 'alter_mysql_view', - withCheckOption: 'cascaded', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge -SQL SECURITY definer -VIEW \`some_view\` AS SELECT * FROM \`users\` -WITH cascaded CHECK OPTION;`); -}); - -test('add meta to view with existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).existing(), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter meta to view', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - algorithm: 'merge', - columns: {}, - definition: 'SELECT * FROM `users`', - isExisting: false, - name: 'some_view', - sqlSecurity: 'definer', - type: 'alter_mysql_view', - withCheckOption: 'cascaded', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge -SQL SECURITY definer -VIEW \`some_view\` AS SELECT * FROM \`users\` -WITH cascaded CHECK OPTION;`); -}); - -test('alter meta to view with existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').existing(), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop meta from view', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - algorithm: 'undefined', - columns: {}, - definition: 'SELECT * FROM `users`', - isExisting: false, - name: 'some_view', - sqlSecurity: 'definer', - type: 'alter_mysql_view', - withCheckOption: undefined, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = undefined -SQL SECURITY definer -VIEW \`some_view\` AS SELECT * FROM \`users\`;`); -}); - -test('drop meta from view existing flag', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - - view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') - .withCheckOption('cascaded').existing(), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter view ".as" value', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - algorithm: 'temptable', - definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', - name: 'some_view', - sqlSecurity: 'invoker', - type: 'mysql_create_view', - withCheckOption: 'cascaded', - replace: true, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE OR REPLACE ALGORITHM = temptable -SQL SECURITY invoker -VIEW \`some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) -WITH cascaded CHECK OPTION;`); -}); - -test('rename and alter view ".as" value', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - nameFrom: 'some_view', - nameTo: 'new_some_view', - type: 'rename_view', - }); - expect(statements[1]).toStrictEqual({ - algorithm: 'temptable', - definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', - name: 'new_some_view', - sqlSecurity: 'invoker', - type: 'mysql_create_view', - withCheckOption: 'cascaded', - replace: true, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); - expect(sqlStatements[1]).toBe(`CREATE OR REPLACE ALGORITHM = temptable -SQL SECURITY invoker -VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) -WITH cascaded CHECK OPTION;`); -}); - -test('set existing', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop existing', async () => { - const users = mysqlTable('users', { - id: int('id').primaryKey().notNull(), - }); - - const from = { - users: users, - view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').existing(), - }; - const to = { - users: users, - view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') - .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), - }; - - const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ - 'public.some_view->public.new_some_view', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'new_some_view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - algorithm: 'temptable', - definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', - name: 'new_some_view', - sqlSecurity: 'invoker', - type: 'mysql_create_view', - withCheckOption: 'cascaded', - replace: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`DROP VIEW \`new_some_view\`;`); - expect(sqlStatements[1]).toBe(`CREATE ALGORITHM = temptable -SQL SECURITY invoker -VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) -WITH cascaded CHECK OPTION;`); -}); diff --git a/drizzle-kit/tests/mysql.test.ts b/drizzle-kit/tests/mysql.test.ts deleted file mode 100644 index 23781f41d6..0000000000 --- a/drizzle-kit/tests/mysql.test.ts +++ /dev/null @@ -1,898 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { - foreignKey, - index, - int, - json, - mysqlEnum, - mysqlSchema, - mysqlTable, - primaryKey, - serial, - text, - unique, - uniqueIndex, - varchar, -} from 'drizzle-orm/mysql-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasMysql } from './schemaDiffer'; - -test('add table #1', async () => { - const to = { - users: mysqlTable('users', {}), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); -}); - -test('add table #2', async () => { - const to = { - users: mysqlTable('users', { - id: serial('id').primaryKey(), - }), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [ - { - name: 'id', - notNull: true, - primaryKey: false, - type: 'serial', - autoincrement: true, - }, - ], - compositePKs: ['users_id;id'], - compositePkName: 'users_id', - uniqueConstraints: [], - checkConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - }); -}); - -test('add table #3', async () => { - const to = { - users: mysqlTable( - 'users', - { - id: serial('id'), - }, - (t) => { - return { - pk: primaryKey({ - name: 'users_pk', - columns: [t.id], - }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [ - { - name: 'id', - notNull: true, - primaryKey: false, - type: 'serial', - autoincrement: true, - }, - ], - compositePKs: ['users_pk;id'], - uniqueConstraints: [], - compositePkName: 'users_pk', - checkConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - }); -}); - -test('add table #4', async () => { - const to = { - users: mysqlTable('users', {}), - posts: mysqlTable('posts', {}), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'posts', - schema: undefined, - columns: [], - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); -}); - -test('add table #5', async () => { - const schema = mysqlSchema('folder'); - const from = { - schema, - }; - - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(0); -}); - -test('add table #6', async () => { - const from = { - users1: mysqlTable('users1', {}), - }; - - const to = { - users2: mysqlTable('users2', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users2', - schema: undefined, - columns: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'drop_table', - policies: [], - tableName: 'users1', - schema: undefined, - }); -}); - -test('add table #7', async () => { - const from = { - users1: mysqlTable('users1', {}), - }; - - const to = { - users: mysqlTable('users', {}), - users2: mysqlTable('users2', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'public.users1->public.users2', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - compositePKs: [], - uniqueConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePkName: '', - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users1', - tableNameTo: 'users2', - fromSchema: undefined, - toSchema: undefined, - }); -}); - -test('add schema + table #1', async () => { - const schema = mysqlSchema('folder'); - - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(0); -}); - -test('change schema with tables #1', async () => { - const schema = mysqlSchema('folder'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema, - users: schema.table('users', {}), - }; - const to = { - schema2, - users: schema2.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder->folder2', - ]); - - expect(statements.length).toBe(0); -}); - -test('change table schema #1', async () => { - const schema = mysqlSchema('folder'); - const from = { - schema, - users: mysqlTable('users', {}), - }; - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'public.users->folder.users', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_table', - policies: [], - tableName: 'users', - schema: undefined, - }); -}); - -test('change table schema #2', async () => { - const schema = mysqlSchema('folder'); - const from = { - schema, - users: schema.table('users', {}), - }; - const to = { - schema, - users: mysqlTable('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder.users->public.users', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - uniqueConstraints: [], - compositePkName: '', - compositePKs: [], - checkConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - }); -}); - -test('change table schema #3', async () => { - const schema1 = mysqlSchema('folder1'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema1, - schema2, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, - users: schema2.table('users', {}), - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(0); -}); - -test('change table schema #4', async () => { - const schema1 = mysqlSchema('folder1'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema1, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, // add schema - users: schema2.table('users', {}), // move table - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(0); -}); - -test('change table schema #5', async () => { - const schema1 = mysqlSchema('folder1'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema1, // remove schema - users: schema1.table('users', {}), - }; - const to = { - schema2, // add schema - users: schema2.table('users', {}), // move table - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(0); -}); - -test('change table schema #5', async () => { - const schema1 = mysqlSchema('folder1'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema1, - schema2, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, - users: schema2.table('users2', {}), // rename and move table - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder1.users->folder2.users2', - ]); - - expect(statements.length).toBe(0); -}); - -test('change table schema #6', async () => { - const schema1 = mysqlSchema('folder1'); - const schema2 = mysqlSchema('folder2'); - const from = { - schema1, - users: schema1.table('users', {}), - }; - const to = { - schema2, // rename schema - users: schema2.table('users2', {}), // rename table - }; - - const { statements } = await diffTestSchemasMysql(from, to, [ - 'folder1->folder2', - 'folder2.users->folder2.users2', - ]); - - expect(statements.length).toBe(0); -}); - -test('add table #10', async () => { - const to = { - users: mysqlTable('table', { - json: json('json').default({}), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` json DEFAULT ('{}')\n);\n", - ); -}); - -test('add table #11', async () => { - const to = { - users: mysqlTable('table', { - json: json('json').default([]), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[]')\n);\n", - ); -}); - -test('add table #12', async () => { - const to = { - users: mysqlTable('table', { - json: json('json').default([1, 2, 3]), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[1,2,3]')\n);\n", - ); -}); - -test('add table #13', async () => { - const to = { - users: mysqlTable('table', { - json: json('json').default({ key: 'value' }), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value"}\')\n);\n', - ); -}); - -test('add table #14', async () => { - const to = { - users: mysqlTable('table', { - json: json('json').default({ - key: 'value', - arr: [1, 2, 3], - }), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value","arr":[1,2,3]}\')\n);\n', - ); -}); - -test('drop index', async () => { - const from = { - users: mysqlTable( - 'table', - { - name: text('name'), - }, - (t) => { - return { - idx: index('name_idx').on(t.name), - }; - }, - ), - }; - - const to = { - users: mysqlTable('table', { - name: text('name'), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); -}); - -test('drop unique constraint', async () => { - const from = { - users: mysqlTable( - 'table', - { - name: text('name'), - }, - (t) => { - return { - uq: unique('name_uq').on(t.name), - }; - }, - ), - }; - - const to = { - users: mysqlTable('table', { - name: text('name'), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe('ALTER TABLE `table` DROP INDEX `name_uq`;'); -}); - -test('add table with indexes', async () => { - const from = {}; - - const to = { - users: mysqlTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - email: text('email'), - }, - (t) => ({ - uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), - indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), - indexExprMultiple: index('indexExprMultiple').on( - sql`(lower(${t.email}))`, - sql`(lower(${t.email}))`, - ), - - uniqueCol: uniqueIndex('uniqueCol').on(t.email), - indexCol: index('indexCol').on(t.email), - indexColMultiple: index('indexColMultiple').on(t.email, t.email), - - indexColExpr: index('indexColExpr').on( - sql`(lower(${t.email}))`, - t.email, - ), - }), - ), - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, []); - expect(sqlStatements.length).toBe(6); - expect(sqlStatements).toStrictEqual([ - `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) -); -`, - 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', - 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', - 'CREATE INDEX `indexCol` ON `users` (`email`);', - 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', - 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', - ]); -}); - -test('varchar and text default values escape single quotes', async (t) => { - const schema1 = { - table: mysqlTable('table', { - id: serial('id').primaryKey(), - }), - }; - - const schem2 = { - table: mysqlTable('table', { - id: serial('id').primaryKey(), - enum: mysqlEnum('enum', ["escape's quotes", "escape's quotes 2"]).default("escape's quotes"), - text: text('text').default("escape's quotes"), - varchar: varchar('varchar', { length: 255 }).default("escape's quotes"), - }), - }; - - const { sqlStatements } = await diffTestSchemasMysql(schema1, schem2, []); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toStrictEqual( - "ALTER TABLE `table` ADD `enum` enum('escape''s quotes','escape''s quotes 2') DEFAULT 'escape''s quotes';", - ); - expect(sqlStatements[1]).toStrictEqual( - "ALTER TABLE `table` ADD `text` text DEFAULT ('escape''s quotes');", - ); - expect(sqlStatements[2]).toStrictEqual( - "ALTER TABLE `table` ADD `varchar` varchar(255) DEFAULT 'escape''s quotes';", - ); -}); - -test('composite primary key', async () => { - const from = {}; - const to = { - table: mysqlTable('works_to_creators', { - workId: int('work_id').notNull(), - creatorId: int('creator_id').notNull(), - classification: text('classification').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.workId, t.creatorId, t.classification], - }), - })), - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `works_to_creators` (\n\t`work_id` int NOT NULL,\n\t`creator_id` int NOT NULL,\n\t`classification` text NOT NULL,\n\tCONSTRAINT `works_to_creators_work_id_creator_id_classification_pk` PRIMARY KEY(`work_id`,`creator_id`,`classification`)\n);\n', - ]); -}); - -test('add column before creating unique constraint', async () => { - const from = { - table: mysqlTable('table', { - id: serial('id').primaryKey(), - }), - }; - const to = { - table: mysqlTable('table', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }, (t) => ({ - uq: unique('uq').on(t.name), - })), - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `table` ADD `name` text NOT NULL;', - 'ALTER TABLE `table` ADD CONSTRAINT `uq` UNIQUE(`name`);', - ]); -}); - -test('optional db aliases (snake case)', async () => { - const from = {}; - - const t1 = mysqlTable( - 't1', - { - t1Id1: int().notNull().primaryKey(), - t1Col2: int().notNull(), - t1Col3: int().notNull(), - t2Ref: int().notNull().references(() => t2.t2Id), - t1Uni: int().notNull(), - t1UniIdx: int().notNull(), - t1Idx: int().notNull(), - }, - (table) => ({ - uni: unique('t1_uni').on(table.t1Uni), - uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), - idx: index('t1_idx').on(table.t1Idx), - fk: foreignKey({ - columns: [table.t1Col2, table.t1Col3], - foreignColumns: [t3.t3Id1, t3.t3Id2], - }), - }), - ); - - const t2 = mysqlTable( - 't2', - { - t2Id: serial().primaryKey(), - }, - ); - - const t3 = mysqlTable( - 't3', - { - t3Id1: int(), - t3Id2: int(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3Id1, table.t3Id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'snake_case'); - - const st1 = `CREATE TABLE \`t1\` ( - \`t1_id1\` int NOT NULL, - \`t1_col2\` int NOT NULL, - \`t1_col3\` int NOT NULL, - \`t2_ref\` int NOT NULL, - \`t1_uni\` int NOT NULL, - \`t1_uni_idx\` int NOT NULL, - \`t1_idx\` int NOT NULL, - CONSTRAINT \`t1_t1_id1\` PRIMARY KEY(\`t1_id1\`), - CONSTRAINT \`t1_uni\` UNIQUE(\`t1_uni\`), - CONSTRAINT \`t1_uni_idx\` UNIQUE(\`t1_uni_idx\`) -); -`; - - const st2 = `CREATE TABLE \`t2\` ( - \`t2_id\` serial AUTO_INCREMENT NOT NULL, - CONSTRAINT \`t2_t2_id\` PRIMARY KEY(\`t2_id\`) -); -`; - - const st3 = `CREATE TABLE \`t3\` ( - \`t3_id1\` int NOT NULL, - \`t3_id2\` int NOT NULL, - CONSTRAINT \`t3_t3_id1_t3_id2_pk\` PRIMARY KEY(\`t3_id1\`,\`t3_id2\`) -); -`; - - const st4 = - `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2_ref_t2_t2_id_fk\` FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON DELETE no action ON UPDATE no action;`; - - const st5 = - `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk\` FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON DELETE no action ON UPDATE no action;`; - - const st6 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); -}); - -test('optional db aliases (camel case)', async () => { - const from = {}; - - const t1 = mysqlTable( - 't1', - { - t1_id1: int().notNull().primaryKey(), - t1_col2: int().notNull(), - t1_col3: int().notNull(), - t2_ref: int().notNull().references(() => t2.t2_id), - t1_uni: int().notNull(), - t1_uni_idx: int().notNull(), - t1_idx: int().notNull(), - }, - (table) => ({ - uni: unique('t1Uni').on(table.t1_uni), - uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), - idx: index('t1Idx').on(table.t1_idx), - fk: foreignKey({ - columns: [table.t1_col2, table.t1_col3], - foreignColumns: [t3.t3_id1, t3.t3_id2], - }), - }), - ); - - const t2 = mysqlTable( - 't2', - { - t2_id: serial().primaryKey(), - }, - ); - - const t3 = mysqlTable( - 't3', - { - t3_id1: int(), - t3_id2: int(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3_id1, table.t3_id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'camelCase'); - - const st1 = `CREATE TABLE \`t1\` ( - \`t1Id1\` int NOT NULL, - \`t1Col2\` int NOT NULL, - \`t1Col3\` int NOT NULL, - \`t2Ref\` int NOT NULL, - \`t1Uni\` int NOT NULL, - \`t1UniIdx\` int NOT NULL, - \`t1Idx\` int NOT NULL, - CONSTRAINT \`t1_t1Id1\` PRIMARY KEY(\`t1Id1\`), - CONSTRAINT \`t1Uni\` UNIQUE(\`t1Uni\`), - CONSTRAINT \`t1UniIdx\` UNIQUE(\`t1UniIdx\`) -); -`; - - const st2 = `CREATE TABLE \`t2\` ( - \`t2Id\` serial AUTO_INCREMENT NOT NULL, - CONSTRAINT \`t2_t2Id\` PRIMARY KEY(\`t2Id\`) -); -`; - - const st3 = `CREATE TABLE \`t3\` ( - \`t3Id1\` int NOT NULL, - \`t3Id2\` int NOT NULL, - CONSTRAINT \`t3_t3Id1_t3Id2_pk\` PRIMARY KEY(\`t3Id1\`,\`t3Id2\`) -); -`; - - const st4 = - `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2Ref_t2_t2Id_fk\` FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON DELETE no action ON UPDATE no action;`; - - const st5 = - `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk\` FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON DELETE no action ON UPDATE no action;`; - - const st6 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); -}); - -test('add table with ts enum', async () => { - enum Test { - value = 'value', - } - const to = { - users: mysqlTable('users', { - enum: mysqlEnum(Test), - }), - }; - - const { statements } = await diffTestSchemasMysql({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [{ - autoincrement: false, - name: 'enum', - notNull: false, - primaryKey: false, - type: "enum('value')", - }], - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); -}); diff --git a/drizzle-kit/tests/mysql/commutativity.integration.test.ts b/drizzle-kit/tests/mysql/commutativity.integration.test.ts new file mode 100644 index 0000000000..057c4281e8 --- /dev/null +++ b/drizzle-kit/tests/mysql/commutativity.integration.test.ts @@ -0,0 +1,245 @@ +import { sql } from 'drizzle-orm'; +import { check, index, mysqlTable, primaryKey, unique } from 'drizzle-orm/mysql-core'; +import { describe, expect, test } from 'vitest'; +import { conflictsFromSchema } from './mocks'; + +describe('conflict rule coverage (statement pairs)', () => { + test('column: create vs drop (same-resource-different-op)', async () => { + const parent = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + })), + }; + + const child1 = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + d: t.varchar({ length: 255 }), + })), + }; + + const child2 = { + t: mysqlTable('t', (t) => ({})), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).toBeUndefined(); + }); + + test('column: alter vs alter (same-resource-same-op)', async () => { + const parent = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + })), + }; + + const child1 = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }).notNull(), + })), + }; + + const child2 = { + t: mysqlTable('t', (t) => ({ + c: t.int(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('table drop vs child index', async () => { + const parent = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + })), + }; + + const child1 = {}; + + const child2 = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + }), (table) => [index('test_idx').on(table.c)]), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('pk: alter vs drop', async () => { + const parent = { + t: mysqlTable('t', (t) => ({ + id: t.int().primaryKey(), + c: t.varchar({ length: 255 }), + })), + }; + + const child1 = { + t: mysqlTable('t', (t) => ({ + id: t.int(), + c: t.varchar({ length: 255 }), + }), (table) => [primaryKey({ columns: [table.id, table.c] })]), + }; + + const child2 = { + t: mysqlTable('t', (t) => ({ + id: t.int(), + c: t.varchar({ length: 255 }), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('unique: create vs drop', async () => { + const parent = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }).unique(), + })), + }; + + const child1 = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }).unique(), + d: t.varchar({ length: 255 }).unique(), + })), + }; + + const child2 = { + t: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('fk: recreate vs drop', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const p = mysqlTable('p', (t) => ({ + id: t.int().primaryKey(), + })); + + const parent = { + p, + t: mysqlTable('t', (t) => ({ + id: t.int().primaryKey(), + pId: t.int().references(() => p.id), + })), + }; + + const child1 = { + p, + t: mysqlTable('t', (t) => ({ + id: t.int().primaryKey(), + pId: t.int().references(() => p.id, { onDelete: 'cascade' }), + })), + }; + + const child2 = { + p, + t: mysqlTable('t', (t) => ({ + id: t.int().primaryKey(), + pId: t.int(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('check: alter vs drop', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const parent = { + t: mysqlTable('t', (t) => ({ + c: t.int(), + }), (table) => [check('chk', sql`${table.c} > 0`)]), + }; + + const child1 = { + t: mysqlTable('t', (t) => ({ + c: t.int(), + }), (table) => [check('chk', sql`${table.c} > 5`)]), + }; + + const child2 = { + t: mysqlTable('t', (t) => ({ + c: t.int(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('explainConflicts returns reason for table drop vs column alter', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const parent = { + c: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }), + })), + }; + + const child1 = {}; + const child2 = { + c: mysqlTable('t', (t) => ({ + c: t.varchar({ length: 255 }).notNull(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + expect(conflicts?.leftStatement.type).toBe('alter_column'); + expect(conflicts?.rightStatement.type).toBe('drop_table'); + }); +}); diff --git a/drizzle-kit/tests/mysql/commutativity.test.ts b/drizzle-kit/tests/mysql/commutativity.test.ts new file mode 100644 index 0000000000..ee6df8c0b0 --- /dev/null +++ b/drizzle-kit/tests/mysql/commutativity.test.ts @@ -0,0 +1,785 @@ +import { createDDL } from 'src/dialects/mysql/ddl'; +import type { MysqlSnapshot } from 'src/dialects/mysql/snapshot'; +import { detectNonCommutative } from 'src/utils/commutativity'; +import { describe, expect, test } from 'vitest'; + +const baseId = '00000000-0000-0000-0000-000000000000'; + +function makeSnapshot(id: string, prevIds: string[], ddlEntities: any[] = []): MysqlSnapshot { + return { + version: '6', + dialect: 'mysql', + id, + prevIds, + ddl: ddlEntities, + renames: [], + } as any; +} + +function writeTempSnapshot(dir: string, tag: string, snap: MysqlSnapshot) { + const fs = require('fs'); + const path = require('path'); + const folder = path.join(dir, tag); + fs.mkdirSync(folder, { recursive: true }); + fs.writeFileSync(path.join(folder, 'snapshot.json'), JSON.stringify(snap, null, 2)); + return path.join(folder, 'snapshot.json'); +} + +const ORIGIN = '00000000-0000-0000-0000-000000000000'; + +function mkTmp(): { tmp: string; fs: any; path: any; os: any } { + const fs = require('fs'); + const path = require('path'); + const os = require('os'); + const tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'dk-comm-int-mysql-')); + return { tmp, fs, path, os } as any; +} + +describe('commutativity integration (mysql)', () => { + test('Parent not empty: detects conflict when first migration of branch A has a conflict with the last migration of branch B', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const parentDDL = createDDL(); + parentDDL.tables.push({ name: 'users' }); + parentDDL.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const parent = makeSnapshot('p1', [baseId], parentDDL.entities.list()); + + const A = createDDL(); + A.tables.push({ name: 'users' }); + A.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const A2 = createDDL(); + A2.tables.push({ name: 'users' }); + A2.columns.push({ + table: 'users', + name: 'email2', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA2 = makeSnapshot('a2', ['a1'], A2.entities.list()); + + const B = createDDL(); + B.tables.push({ name: 'users' }); + B.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + B.tables.push({ name: 'posts' }); + B.columns.push({ + table: 'posts', + name: 'content', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const B2 = createDDL(); + B2.tables.push({ name: 'users' }); + B2.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + B2.tables.push({ name: 'posts' }); + B2.columns.push({ + table: 'posts', + name: 'content', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB2 = makeSnapshot('b2', ['b1'], B2.entities.list()); + + const B3 = createDDL(); + B3.tables.push({ name: 'posts' }); + B3.columns.push({ + table: 'posts', + name: 'content', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB3 = makeSnapshot('b3', ['b2'], B3.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-mysql-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const a2Path = writeTempSnapshot(tmp, '001_leafA2', leafA2); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + const b2Path = writeTempSnapshot(tmp, '002_leafB2', leafB2); + const b3Path = writeTempSnapshot(tmp, '002_leafB3', leafB3); + + const report = await detectNonCommutative([pPath, aPath, bPath, b2Path, b3Path, a2Path], 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('Parent empty: detects conflict when last migration of branch A has a conflict with a first migration of branch B', async () => { + const parent = makeSnapshot('p1', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ name: 'users' }); + A.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const A2 = createDDL(); + A2.tables.push({ name: 'posts' }); + A2.columns.push({ + table: 'posts', + name: 'description', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA2 = makeSnapshot('a2', ['a1'], A2.entities.list()); + + const B = createDDL(); + B.tables.push({ name: 'posts' }); + B.columns.push({ + table: 'users', + name: 'content', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const B2 = createDDL(); + B2.tables.push({ name: 'posts' }); + B2.columns.push({ + table: 'users', + name: 'content', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB2 = makeSnapshot('b2', ['b1'], B2.entities.list()); + + const B3 = createDDL(); + B3.tables.push({ name: 'posts' }); + B3.columns.push({ + table: 'users', + name: 'content', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + B3.tables.push({ name: 'media' }); + B3.columns.push({ + table: 'media', + name: 'url', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB3 = makeSnapshot('b3', ['b2'], B3.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-mysql-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const a2Path = writeTempSnapshot(tmp, '002_leafA2', leafA2); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + const b2Path = writeTempSnapshot(tmp, '003_leafB2', leafB2); + const b3Path = writeTempSnapshot(tmp, '004_leafB3', leafB3); + + const report = await detectNonCommutative([pPath, aPath, a2Path, bPath, b2Path, b3Path], 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('detects conflict when drop table in one branch and add column in other', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const parentDDL = createDDL(); + parentDDL.tables.push({ name: 'users' }); + parentDDL.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const parent = makeSnapshot('p1', [baseId], parentDDL.entities.list()); + + const A = createDDL(); + A.tables.push({ name: 'users' }); + A.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const leafB = makeSnapshot('b1', ['p1'], createDDL().entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-mysql-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('detects conflict when both branches alter same column', async () => { + const parent = makeSnapshot('p1', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ name: 'users' }); + A.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const B = createDDL(); + B.tables.push({ name: 'users' }); + B.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-mysql-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('no conflict when branches touch different tables', async () => { + const parent = makeSnapshot('p2', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ name: 'users' }); + const leafA = makeSnapshot('a2', ['p2'], A.entities.list()); + + const B = createDDL(); + B.tables.push({ name: 'posts' }); + const leafB = makeSnapshot('b2', ['p2'], B.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-mysql-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'mysql'); + expect(report.conflicts.length).toBe(0); + }); + + test('column conflict: both branches change same column', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ name: 'users' }); + const p = makeSnapshot('p_col', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ name: 'users' }); + a.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + const b = createDDL(); + b.tables.push({ name: 'users' }); + b.columns.push({ + table: 'users', + name: 'email', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '000_p_col', p), + writeTempSnapshot(tmp, '001_a_col', makeSnapshot('a_col', ['p_col'], a.entities.list())), + writeTempSnapshot(tmp, '002_b_col', makeSnapshot('b_col', ['p_col'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('table drop vs child column alter', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ name: 't1' }); + parent.columns.push({ + table: 't1', + name: 'c1', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + const p = makeSnapshot('p_drop', [ORIGIN], parent.entities.list()); + + const a = createDDL(); // dropping table in branch A (no t1) + const b = createDDL(); + b.tables.push({ name: 't1' }); + b.columns.push({ + table: 't1', + name: 'c1', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '010_p_drop', p), + writeTempSnapshot(tmp, '011_a_drop', makeSnapshot('a_drop', ['p_drop'], a.entities.list())), + writeTempSnapshot(tmp, '012_b_drop', makeSnapshot('b_drop', ['p_drop'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBe(1); + expect(report.conflicts[0].branchA.headId).toStrictEqual('a_drop'); + expect(report.conflicts[0].branchB.headId).toStrictEqual('b_drop'); + }); + + test('unique constraint same name on same table', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ name: 't2' }); + const p = makeSnapshot('p_uq', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ name: 't2' }); + a.indexes.push({ + table: 't2', + nameExplicit: true, + name: 't2_uq', + columns: [{ value: 'c', isExpression: false }], + isUnique: true, + using: null, + algorithm: null, + lock: null, + } as any); + + const b = createDDL(); + b.tables.push({ name: 't2' }); + b.indexes.push({ + table: 't2', + nameExplicit: true, + name: 't2_uq', + columns: [{ value: 'c', isExpression: false }], + isUnique: true, + using: null, + algorithm: null, + lock: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '020_p_uq', p), + writeTempSnapshot(tmp, '021_a_uq', makeSnapshot('a_uq', ['p_uq'], a.entities.list())), + writeTempSnapshot(tmp, '022_b_uq', makeSnapshot('b_uq', ['p_uq'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('view: same name in both branches', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const p = makeSnapshot('p_view', [ORIGIN], createDDL().entities.list()); + const a = createDDL(); + a.views.push({ + name: 'v1', + definition: 'select 1', + algorithm: 'undefined', + sqlSecurity: 'definer', + withCheckOption: null, + } as any); + + const b = createDDL(); + b.views.push({ + name: 'v1', + definition: 'select 1', + algorithm: 'undefined', + sqlSecurity: 'definer', + withCheckOption: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '030_p_view', p), + writeTempSnapshot(tmp, '031_a_view', makeSnapshot('a_view', ['p_view'], a.entities.list())), + writeTempSnapshot(tmp, '032_b_view', makeSnapshot('b_view', ['p_view'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('three-way branch: A,B,C from same parent', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ name: 't' }); + const p = makeSnapshot('p_three', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ name: 't' }); + a.columns.push({ + table: 't', + name: 'a', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + const b = createDDL(); + b.tables.push({ name: 't' }); + b.columns.push({ + table: 't', + name: 'a', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + const c = createDDL(); + c.tables.push({ name: 't' }); + c.columns.push({ + table: 't', + name: 'b', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '100_p_three', p), + writeTempSnapshot(tmp, '101_a_three', makeSnapshot('a_three', ['p_three'], a.entities.list())), + writeTempSnapshot(tmp, '102_b_three', makeSnapshot('b_three', ['p_three'], b.entities.list())), + writeTempSnapshot(tmp, '103_c_three', makeSnapshot('c_three', ['p_three'], c.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + // At least A vs B should conflict; C may or may not depending on overlap + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('nested branching: parent -> A -> A1 and parent -> B', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const root = createDDL(); + root.tables.push({ name: 't' }); + const p = makeSnapshot('p_nested', [ORIGIN], root.entities.list()); + + const A = createDDL(); + A.tables.push({ name: 't' }); + A.columns.push({ + table: 't', + name: 'c', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + const A1 = createDDL(); + A1.tables.push({ name: 't' }); + A1.columns.push({ + table: 't', + name: 'c', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + const B = createDDL(); + B.tables.push({ name: 't' }); + B.columns.push({ + table: 't', + name: 'd', + type: 'varchar(255)', + notNull: false, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + + files.push( + writeTempSnapshot(tmp, '110_p_nested', p), + writeTempSnapshot(tmp, '111_A', makeSnapshot('A', ['p_nested'], A.entities.list())), + writeTempSnapshot(tmp, '112_A1', makeSnapshot('A1', ['A'], A1.entities.list())), + writeTempSnapshot(tmp, '113_B', makeSnapshot('B', ['p_nested'], B.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBeGreaterThanOrEqual(0); + }); + + test('complex mixed: multiple tables and views diverging', async () => { + // postpone cc: @AndriiSherman + if (Date.now() < +new Date('2025-12-15')) return; + + const { tmp } = mkTmp(); + const files: string[] = []; + + const base = createDDL(); + base.tables.push({ name: 'u' }); + base.tables.push({ name: 'p' }); + const p = makeSnapshot('p_mix', [ORIGIN], base.entities.list()); + + // Branch X: alter u.email, create view v_users + const X = createDDL(); + X.tables.push({ name: 'u' }); + X.columns.push({ + table: 'u', + name: 'email', + type: 'varchar(255)', + notNull: true, + autoIncrement: false, + default: null, + onUpdateNow: false, + onUpdateNowFsp: null, + charSet: null, + collation: null, + generated: null, + } as any); + X.views.push({ + name: 'v_users', + definition: 'select * from u', + algorithm: 'undefined', + sqlSecurity: 'definer', + withCheckOption: null, + } as any); + + // Branch Y: drop table u (conflicts with X's column/view touching u) + const Y = createDDL(); + Y.tables.push({ name: 'p' }); + // no table u -> implies drop vs X touching u + + files.push( + writeTempSnapshot(tmp, '120_p_mix', p), + writeTempSnapshot(tmp, '121_X', makeSnapshot('X', ['p_mix'], X.entities.list())), + writeTempSnapshot(tmp, '122_Y', makeSnapshot('Y', ['p_mix'], Y.entities.list())), + ); + + const report = await detectNonCommutative(files, 'mysql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); +}); diff --git a/drizzle-kit/tests/mysql/constraints.test.ts b/drizzle-kit/tests/mysql/constraints.test.ts new file mode 100644 index 0000000000..87dc140e09 --- /dev/null +++ b/drizzle-kit/tests/mysql/constraints.test.ts @@ -0,0 +1,938 @@ +import { desc, sql } from 'drizzle-orm'; +import { + AnyMySqlColumn, + bigint, + binary, + blob, + char, + date, + datetime, + decimal, + double, + float, + foreignKey, + index, + int, + json, + longblob, + longtext, + mediumblob, + mediumint, + mediumtext, + mysqlEnum, + mysqlSchema, + mysqlTable, + primaryKey, + serial, + smallint, + text, + time, + timestamp, + tinyblob, + tinyint, + tinytext, + unique, + uniqueIndex, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +// TODO: add simple .unique(), etc. To discuss with @OleksiiKH0240 +test('#1', async () => { + const users3 = mysqlTable('users3', { + c1: varchar({ length: 100 }), + }, (t) => [ + unique().on(t.c1), + ]); + + const users4 = mysqlTable('users4', { + c1: varchar({ length: 100 }).unique().references(() => users3.c1), + c2: varchar({ length: 100 }).references((): AnyMySqlColumn => users4.c1), + }); + const to = { + users3, + users4, + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users3` (\n\t`c1` varchar(100),\n\tCONSTRAINT `c1_unique` UNIQUE INDEX(`c1`)\n);\n', + 'CREATE TABLE `users4` (\n\t`c1` varchar(100),\n\t`c2` varchar(100),\n\tCONSTRAINT `c1_unique` UNIQUE INDEX(`c1`)\n);\n', + 'ALTER TABLE `users4` ADD CONSTRAINT `users4_c1_users3_c1_fkey` FOREIGN KEY (`c1`) REFERENCES `users3`(`c1`);', + 'ALTER TABLE `users4` ADD CONSTRAINT `users4_c2_users4_c1_fkey` FOREIGN KEY (`c2`) REFERENCES `users4`(`c1`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// TODO: implement geometry types +test('unique constraint errors #1', async () => { + const to = { + table: mysqlTable('table', { + column1: text().unique(), + column2: tinytext().unique(), + column3: mediumtext().unique(), + column4: longtext().unique(), + column5: blob().unique(), + column6: tinyblob().unique(), + column7: mediumblob().unique(), + column8: longblob().unique(), + column9: json().unique(), + column10: varchar({ length: 769 }).unique(), // 768 max depends on mysql version and engine (4 bytes per character for last version) + // column11: geometry().unique(), + }), + }; + + const { sqlStatements: st, ddl1Err, ddl2Err, mappedErrors1, mappedErrors2 } = await diff({}, to, []); + + expect(ddl1Err).toStrictEqual([]); + expect(ddl2Err).toStrictEqual([ + { + columns: ['column1'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column2'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column3'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column4'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column5'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column6'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column7'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column8'], + table: 'table', + type: 'column_unsupported_unique', + }, + ]); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('unique constraint errors #2', async () => { + const to = { + table: mysqlTable('table', { + column1: text(), + column2: tinytext(), + column3: mediumtext(), + column4: longtext(), + column5: blob(), + column6: tinyblob(), + column7: mediumblob(), + column8: longblob(), + column9: json(), + column10: varchar({ length: 769 }), // 768 max depends on mysql version and engine (4 bytes per character for last version) + // column11: geometry(), + }, (table) => [ + unique().on(table.column1), + unique().on(table.column2), + unique().on(table.column3), + unique().on(table.column4), + unique().on(table.column5), + unique().on(table.column6), + unique().on(table.column7), + unique().on(table.column8), + unique().on(table.column9), + unique().on(table.column10), + // unique().on(table.column11), + ]), + }; + + const { sqlStatements: st, ddl1Err, ddl2Err, mappedErrors1, mappedErrors2 } = await diff({}, to, []); + + expect(ddl1Err).toStrictEqual([]); + expect(ddl2Err).toStrictEqual( + [ + { + columns: ['column1'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column2'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column3'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column4'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column5'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column6'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column7'], + table: 'table', + type: 'column_unsupported_unique', + }, + { + columns: ['column8'], + table: 'table', + type: 'column_unsupported_unique', + }, + ], + ); + expect(mappedErrors1).toStrictEqual([]); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('unique constraint errors #3', async () => { + const to = { + table: mysqlTable('table', { + column1: text(), + column2: tinytext(), + column3: mediumtext(), + column4: longtext(), + column5: blob(), + column6: tinyblob(), + column7: mediumblob(), + column8: longblob(), + column9: json(), + column10: varchar({ length: 769 }), // 768 max depends on mysql version and engine (4 bytes per character for last version) + // column11: geometry(), + }, (table) => [ + unique().on( + table.column1, + table.column2, + table.column3, + table.column4, + table.column5, + table.column6, + table.column7, + table.column8, + table.column9, + table.column10, + ), + ]), + }; + + const { sqlStatements: st, ddl1Err, ddl2Err, mappedErrors1, mappedErrors2 } = await diff({}, to, []); + expect(ddl1Err).toStrictEqual([]); + expect(ddl2Err).toStrictEqual( + [ + { + columns: ['column1', 'column2', 'column3', 'column4', 'column5', 'column6', 'column7', 'column8'], + table: 'table', + type: 'column_unsupported_unique', + }, + ], + ); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('unique, fk constraints order #1', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: varchar({ length: 256 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int,\n\t`column2` varchar(256)\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\t`column2` varchar(256)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const table1 = mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }).unique(), + }); + const table2 = mysqlTable('table2', { + column1: int(), + column2: varchar({ length: 256 }).references(() => table1.column2), + }); + const schema2 = { table1, table2 }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'CREATE UNIQUE INDEX `column2_unique` ON `table1` (`column2`);', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column2_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column2`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('unique, fk constraints order #2', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: varchar({ length: 256 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int,\n\t`column2` varchar(256)\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\t`column2` varchar(256)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const table1 = mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }, (table) => [ + unique().on(table.column1, table.column2), + ]); + const table2 = mysqlTable('table2', { + column1: int(), + column2: varchar({ length: 256 }), + }, (table) => [ + foreignKey({ + columns: [table.column1, table.column2], + foreignColumns: [table1.column1, table1.column2], + name: 'custom_fk', // TODO: revise: should there be any migrations if user change schema to omit name of constraint? + }), + ]); + const schema2 = { table1, table2 }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'CREATE UNIQUE INDEX `column1_column2_unique` ON `table1` (`column1`,`column2`);', + 'ALTER TABLE `table2` ADD CONSTRAINT `custom_fk` FOREIGN KEY (`column1`,`column2`) REFERENCES `table1`(`column1`,`column2`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2236 +// https://github.com/drizzle-team/drizzle-orm/issues/3329 +test('add column before creating unique constraint', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + }), + table2: mysqlTable('table2', { + column1: int(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }, (table) => [ + unique().on(table.column1, table.column2), + ]), + table2: mysqlTable('table2', { + column1: int(), + column2: varchar({ length: 256 }).unique(), + }), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE `table1` ADD `column2` varchar(256);', + 'ALTER TABLE `table2` ADD `column2` varchar(256);', + 'CREATE UNIQUE INDEX `column2_unique` ON `table2` (`column2`);', + 'CREATE UNIQUE INDEX `column1_column2_unique` ON `table1` (`column1`,`column2`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('primary key, fk constraint order #1', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: int(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\t`column2` int\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const table1 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mysqlTable('table2', { + column1: int(), + column2: int().references(() => table1.column1), + }); + const schema2 = { table1, table2 }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2 = [ + 'ALTER TABLE `table1` ADD PRIMARY KEY (`column1`);', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('primary key, fk constraint order #2', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: int(), + column3: varchar({ length: 256 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int,\n\t`column2` varchar(256)\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\t`column2` int,\n\t`column3` varchar(256)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const table1 = mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 256 }), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]); + const table2 = mysqlTable('table2', { + column1: int(), + column2: int(), + column3: varchar({ length: 256 }), + }, (table) => [ + foreignKey({ + columns: [table.column2, table.column3], + foreignColumns: [table1.column1, table1.column2], + name: 'custom_fk', + }), + ]); + const schema2 = { table1, table2 }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2 = [ + 'ALTER TABLE `table1` ADD PRIMARY KEY (`column1`,`column2`);', + 'ALTER TABLE `table2` ADD CONSTRAINT `custom_fk` FOREIGN KEY (`column2`,`column3`) REFERENCES `table1`(`column1`,`column2`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4704 +test('index with sort', async () => { + const to = { + table: mysqlTable('table', { + column1: int(), + column2: int(), + column3: int(), + }, (table) => ({ + tableCompositeIdx: index('table_composite_idx').on( + table.column1, + table.column2, + desc(table.column3), // Attempting to sort by column3 DESC + ), + })), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + const expectedSt = [ + 'CREATE TABLE `table` (\n\t`column1` int,\n\t`column2` int,\n\t`column3` int\n);\n', + 'CREATE INDEX `table_composite_idx` ON `table` (`column1`,`column2`,`column3` desc);', + ]; + + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4221 +test('fk on char column', async () => { + function column1() { + return char('column1', { length: 24 }).primaryKey().$defaultFn(() => '1'); + } + const table1 = mysqlTable( + 'table1', + { + column1: column1(), + }, + ); + const table2 = mysqlTable( + 'table2', + { + column1: column1(), + column2: char('column2', { length: 24 }).references(() => table1.column1).notNull(), + }, + ); + const to = { table1, table2 }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + const expectedSt: string[] = [ + 'CREATE TABLE `table1` (\n\t`column1` char(24) PRIMARY KEY\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` char(24) PRIMARY KEY,\n\t`column2` char(24) NOT NULL\n);\n', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`);', + ]; + + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/486 +// https://github.com/drizzle-team/drizzle-orm/issues/3244 +test('fk name is too long', async () => { + const table1 = mysqlTable( + 'table1_loooooong', + { + column1: int('column1_looooong').primaryKey(), + }, + ); + const table2 = mysqlTable( + 'table2_loooooong', + { + column1: int('column1_looooong').references(() => table1.column1).notNull(), + }, + ); + const to = { table1, table2 }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + const expectedSt: string[] = [ + 'CREATE TABLE `table1_loooooong` (\n\t`column1_looooong` int PRIMARY KEY\n);\n', + 'CREATE TABLE `table2_loooooong` (\n\t`column1_looooong` int NOT NULL\n);\n', + 'ALTER TABLE `table2_loooooong` ADD CONSTRAINT `table2_loooooong_KObGFnvgHDVg_fkey` FOREIGN KEY (`column1_looooong`) REFERENCES `table1_loooooong`(`column1_looooong`);', + ]; + + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456#issuecomment-3076042688 +test('fk multistep #1', async () => { + const foo = mysqlTable('foo', { + id: int().primaryKey(), + }); + + const bar = mysqlTable('bar', { + id: int().primaryKey(), + fooId: int().references(() => foo.id), + }); + + const schema1 = { foo, bar }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `foo` (\n\t`id` int PRIMARY KEY\n);\n', + 'CREATE TABLE `bar` (\n\t`id` int PRIMARY KEY,\n\t`fooId` int\n);\n', + 'ALTER TABLE `bar` ADD CONSTRAINT `bar_fooId_foo_id_fkey` FOREIGN KEY (`fooId`) REFERENCES `foo`(`id`);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + bar: mysqlTable('bar', { + id: int().primaryKey(), + fooId: int(), + }), + }; + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE `bar` DROP CONSTRAINT `bar_fooId_foo_id_fkey`;', + 'DROP INDEX `bar_fooId_foo_id_fkey` ON `bar`', + 'DROP TABLE `foo`;', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/265 +// https://github.com/drizzle-team/drizzle-orm/issues/3293 +// https://github.com/drizzle-team/drizzle-orm/issues/2018 +test('adding on delete to 2 fks', async () => { + const table1 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mysqlTable('table2', { + column1: int().primaryKey(), + column2: int().references(() => table1.column1).notNull(), + column3: int().references(() => table1.column1).notNull(), + }); + const schema1 = { table1, table2 }; + + const { next: n1, sqlStatements: st1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1: string[] = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int PRIMARY KEY,\n\t`column2` int NOT NULL,\n\t`column3` int NOT NULL\n);\n', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`);', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column3_table1_column1_fkey` FOREIGN KEY (`column3`) REFERENCES `table1`(`column1`);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const table3 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table4 = mysqlTable('table2', { + column1: int().primaryKey(), + column2: int().references(() => table1.column1, { onDelete: 'cascade' }).notNull(), + column3: int().references(() => table1.column1, { onDelete: 'cascade' }).notNull(), + }); + const schema2 = { table3, table4 }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table2` DROP CONSTRAINT `table2_column2_table1_column1_fkey`;', + 'ALTER TABLE `table2` DROP CONSTRAINT `table2_column3_table1_column1_fkey`;', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`) ON DELETE CASCADE;', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column3_table1_column1_fkey` FOREIGN KEY (`column3`) REFERENCES `table1`(`column1`) ON DELETE CASCADE;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('adding autoincrement to table with pk #1', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + }), + }; + + const { next: n1, sqlStatements: st1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1: string[] = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().autoincrement().primaryKey(), + }), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table1` MODIFY COLUMN `column1` int AUTO_INCREMENT NOT NULL;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('adding autoincrement to table with pk #2', async () => { + // TODO: revise: I can successfully run all the queries manually, but somehow it throws error in the test + const schema1 = { + table1: mysqlTable('table1', { + column1: int().notNull(), + column2: int(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const { next: n1, sqlStatements: st1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1: string[] = [ + 'CREATE TABLE `table1` (\n\t`column1` int NOT NULL,\n\t`column2` int,\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`column1`,`column2`)\n);\n', + ]; + + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().notNull().autoincrement(), + column2: int().default(1), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table1` MODIFY COLUMN `column1` int AUTO_INCREMENT NOT NULL;', + 'ALTER TABLE `table1` MODIFY COLUMN `column2` int DEFAULT 1;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('adding autoincrement to table with unique #1', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().unique(), + }), + }; + + const { next: n1 } = await diff({}, schema1, []); + await push({ db, to: schema1 }); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().autoincrement().unique(), + }), + }; + + const { sqlStatements: st } = await diff(n1, schema2, []); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const expectedSt: string[] = [ + 'ALTER TABLE `table1` MODIFY COLUMN `column1` int AUTO_INCREMENT;', + ]; + + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +test('adding autoincrement to table with unique #2', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + column2: int(), + }, (table) => [ + unique().on(table.column1, table.column2), + ]), + }; + + const { next: n1 } = await diff({}, schema1, []); + await push({ db, to: schema1 }); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().autoincrement(), + column2: int(), + }, (table) => [ + unique().on(table.column1, table.column2), + ]), + }; + + const { sqlStatements: st } = await diff(n1, schema2, []); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const expectedSt: string[] = [ + 'ALTER TABLE `table1` MODIFY COLUMN `column1` int AUTO_INCREMENT;', + ]; + + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3471 +test('drop column with pk and add pk to another column #1', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: varchar({ length: 256 }).primaryKey(), + column2: varchar({ length: 256 }).notNull(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` varchar(256) PRIMARY KEY,\n\t`column2` varchar(256) NOT NULL\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column2: varchar({ length: 256 }).primaryKey(), + }), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table1` DROP PRIMARY KEY;', + 'ALTER TABLE `table1` ADD PRIMARY KEY (`column2`);', + 'ALTER TABLE `table1` DROP COLUMN `column1`;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('drop column with pk and add pk to another column #2', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: varchar({ length: 256 }), + column2: varchar({ length: 256 }), + column3: varchar({ length: 256 }).notNull(), + column4: varchar({ length: 256 }).notNull(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` varchar(256),\n\t`column2` varchar(256),' + + '\n\t`column3` varchar(256) NOT NULL,\n\t`column4` varchar(256) NOT NULL,' + + '\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`column1`,`column2`)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column3: varchar({ length: 256 }), + column4: varchar({ length: 256 }), + }, (table) => [ + primaryKey({ columns: [table.column3, table.column4] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table1` DROP PRIMARY KEY;', + 'ALTER TABLE `table1` ADD PRIMARY KEY (`column3`,`column4`);', + 'ALTER TABLE `table1` DROP COLUMN `column1`;', + 'ALTER TABLE `table1` DROP COLUMN `column2`;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456 +test('drop column with pk and add pk to another column #3', async () => { + const schema1 = { + authors: mysqlTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + }, (table) => [primaryKey({ columns: [table.publicationId, table.authorID] })]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `authors` (\n\t`publication_id` varchar(64),\n\t`author_id` varchar(10),' + + '\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`publication_id`,`author_id`)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + authors: mysqlTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + orcidId: varchar('orcid_id', { length: 64 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID, table.orcidId] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `authors` DROP PRIMARY KEY;', + 'ALTER TABLE `authors` ADD `orcid_id` varchar(64);', + 'ALTER TABLE `authors` ADD PRIMARY KEY (`publication_id`,`author_id`,`orcid_id`);', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); diff --git a/drizzle-kit/tests/mysql/grammar.test.ts b/drizzle-kit/tests/mysql/grammar.test.ts new file mode 100644 index 0000000000..3a7bdb491d --- /dev/null +++ b/drizzle-kit/tests/mysql/grammar.test.ts @@ -0,0 +1,60 @@ +import { int, mysqlTable, varchar } from 'drizzle-orm/mysql-core'; +import { Decimal, parseEnum } from 'src/dialects/mysql/grammar'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffIntrospect, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +if (!fs.existsSync('tests/mysql/tmp')) { + fs.mkdirSync('tests/mysql/tmp', { recursive: true }); +} + +test('enum', () => { + expect(parseEnum("enum('one','two','three')")).toStrictEqual(['one', 'two', 'three']); +}); + +test('numeric|decimal', () => { + expect.soft(Decimal.is('decimal')).true; + expect.soft(Decimal.is('numeric')).true; + expect.soft(Decimal.is('decimal(7)')).true; + expect.soft(Decimal.is('numeric(7)')).true; + expect.soft(Decimal.is('decimal (7)')).true; + expect.soft(Decimal.is('numeric (7)')).true; + expect.soft(Decimal.is('decimal(7, 4)')).true; + expect.soft(Decimal.is('decimal(7, 0)')).true; + expect.soft(Decimal.is('decimal(7, 0) ZEROFILL')).true; + expect.soft(Decimal.is('decimal(7, 0) unsigned')).true; + expect.soft(Decimal.is('DECIMAL(7, 0) UNSIGNED')).true; + expect.soft(Decimal.is('DECIMAL(7, 0) UNSIGNED ZEROFILL')).true; +}); + +test('column name + options', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + sortKey: varchar('sortKey__!@#', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-value-varchar'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/mysql/mocks.ts b/drizzle-kit/tests/mysql/mocks.ts new file mode 100644 index 0000000000..e70ff089a7 --- /dev/null +++ b/drizzle-kit/tests/mysql/mocks.ts @@ -0,0 +1,480 @@ +import Docker, { Container } from 'dockerode'; +import { is } from 'drizzle-orm'; +import { int, MySqlColumnBuilder, MySqlSchema, MySqlTable, mysqlTable, MySqlView } from 'drizzle-orm/mysql-core'; +import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { + MySqlSchema as MySqlSchemaOld, + MySqlTable as MysqlTableOld, + MySqlView as MysqlViewOld, +} from 'orm044/mysql-core'; +import { v4 as uuid } from 'uuid'; +import { introspect } from '../../src/cli/commands/pull-mysql'; +import { suggestions } from '../../src/cli/commands/push-mysql'; +import { upToV6 } from '../../src/cli/commands/up-mysql'; +import { CasingType } from '../../src/cli/validations/common'; +import { mysqlSchemaError as schemaError } from '../../src/cli/views'; +import { EmptyProgressView } from '../../src/cli/views'; +import { hash } from '../../src/dialects/common'; +import { MysqlDDL, MysqlEntity } from '../../src/dialects/mysql/ddl'; +import { createDDL, interimToDDL } from '../../src/dialects/mysql/ddl'; +import { ddlDiff, ddlDiffDry } from '../../src/dialects/mysql/diff'; +import { defaultFromColumn } from '../../src/dialects/mysql/drizzle'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from '../../src/dialects/mysql/drizzle'; +import { fromDatabaseForDrizzle } from '../../src/dialects/mysql/introspect'; +import { ddlToTypeScript } from '../../src/dialects/mysql/typescript'; +import { diff as legacyDiff } from '../../src/legacy/mysql-v5/mysqlDiff'; +import { serializeMysql } from '../../src/legacy/mysql-v5/serializer'; +import { DB } from '../../src/utils'; +import { mockResolver } from '../../src/utils/mocks'; +import { tsc } from '../utils'; +import 'zx/globals'; +import { expect } from 'vitest'; + +mkdirSync('tests/mysql/tmp', { recursive: true }); + +export type MysqlSchema = Record< + string, + MySqlTable | MySqlSchema | MySqlView +>; + +export type MysqlSchemaOld = Record< + string, + MysqlTableOld | MySqlSchemaOld | MysqlViewOld +>; + +export const fromEntities = (entities: MysqlEntity[]) => { + const ddl = createDDL(); + for (const it of entities) { + ddl.entities.push(it); + } + return ddl; +}; + +export const drizzleToDDL = (sch: MysqlSchema, casing?: CasingType | undefined) => { + const tables = Object.values(sch).filter((it) => is(it, MySqlTable)) as MySqlTable[]; + const views = Object.values(sch).filter((it) => is(it, MySqlView)) as MySqlView[]; + return interimToDDL(fromDrizzleSchema(tables, views, casing)); +}; + +export const diff = async ( + left: MysqlSchema | MysqlDDL, + right: MysqlSchema | MysqlDDL, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1, errors: err1 } = 'entities' in left && '_' in left + ? { ddl: left as MysqlDDL, errors: [] } + : drizzleToDDL(left, casing); + const { ddl: ddl2, errors: err2 } = 'entities' in right && '_' in right + ? { ddl: right as MysqlDDL, errors: [] } + : drizzleToDDL(right, casing); + + const renames = new Set(renamesArr); + + const mappedErrors1 = err1.map((it: any) => schemaError(it)); + const mappedErrors2 = err2.map((it: any) => schemaError(it)); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'default', + ); + + return { sqlStatements, statements, next: ddl2, ddl1Err: err1, ddl2Err: err2, mappedErrors1, mappedErrors2 }; +}; + +export const diffIntrospect = async ( + db: DB, + initSchema: MysqlSchema, + testName: string, + casing?: CasingType | undefined, +) => { + const { ddl: initDDL } = drizzleToDDL(initSchema, casing); + const { sqlStatements: init } = await ddlDiffDry(createDDL(), initDDL); + for (const st of init) await db.query(st); + + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, 'drizzle'); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const filePath = `tests/mysql/tmp/${testName}.ts`; + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel', 'mysql'); + + writeFileSync(filePath, file.file); + await tsc(file.file); + + // generate snapshot from ts file + const response = await prepareFromSchemaFiles([ + filePath, + ]); + + const interim = fromDrizzleSchema( + response.tables, + response.views, + casing, + ); + + const { ddl: ddl2, errors: e3 } = interimToDDL(interim); + + // TODO: handle errors + const renames = new Set(); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + rmSync(`tests/mysql/tmp/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + ddlAfterPull: ddl1, + }; +}; + +export const push = async (config: { + db: DB; + to: MysqlSchema | MysqlDDL; + renames?: string[]; + casing?: CasingType; + log?: 'statements'; + ignoreSubsequent?: boolean; +}) => { + const { db, to, log } = config; + const casing = config.casing ?? 'camelCase'; + + const { schema } = await introspect({ + db, + database: 'drizzle', + filter: () => true, + progress: new EmptyProgressView(), + }); + const { ddl: ddl1, errors: err1 } = interimToDDL(schema); + const { ddl: ddl2, errors: err2 } = 'entities' in to && '_' in to + ? { ddl: to as MysqlDDL, errors: [] } + : drizzleToDDL(to, casing); + + if (err2.length > 0) { + for (const e of err2) { + console.error(`err2: ${JSON.stringify(e)}`); + } + throw new Error('Schema2 Interim Error'); + } + + if (err1.length > 0) { + for (const e of err1) { + console.error(`err: ${JSON.stringify(e)}`); + } + throw new Error('Schema1 Interim Error'); + } + + const renames = new Set(config.renames ?? []); + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const res = await suggestions(db, statements); + + for (const sql of sqlStatements) { + if (log === 'statements') console.log(sql); + await db.query(sql); + } + + // subsequent push + if (!config.ignoreSubsequent) { + { + const { schema } = await introspect({ + db, + database: 'drizzle', + filter: () => true, + progress: new EmptyProgressView(), + }); + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + if (sqlStatements.length > 0) { + console.error('---- subsequent push is not empty ----'); + expect(sqlStatements.join('\n')).toBe(''); + } + } + } + + return { sqlStatements, statements, hints: res }; +}; + +export const diffDefault = async ( + kit: TestDatabase, + builder: T, + expectedDefault: string, + pre: MysqlSchema | null = null, + override?: { + type?: string; + default?: string; + ignoreSubsequent?: boolean; + }, +) => { + await kit.clear(); + + const config = (builder as any).config; + const def = config['default']; + const column = mysqlTable('table', { column: builder }).column; + const type = override?.type ?? column.getSQLType().replace(', ', ','); // real(6, 3)->real(6,3) + const ignoreSubsequent = override?.ignoreSubsequent ?? false; + + const columnDefault = defaultFromColumn(column, 'camelCase'); + const defaultSql = override?.default ?? columnDefault; + + const res = [] as string[]; + if (defaultSql !== expectedDefault) { + res.push(`Unexpected sql: \n${defaultSql}\n${expectedDefault}`); + } + + const init = { + ...pre, + table: mysqlTable('table', { column: builder }), + }; + + const { db, clear } = kit; + if (pre) await push({ db, to: pre }); + const { sqlStatements: st1 } = await push({ db, to: init, ignoreSubsequent }); + const { sqlStatements: st2 } = await push({ db, to: init, ignoreSubsequent }); + + const expectedInit = `CREATE TABLE \`table\` (\n\t\`column\` ${type} DEFAULT ${expectedDefault}\n);\n`; + if (st1.length !== 1 || st1[0] !== expectedInit) res.push(`Unexpected init:\n${st1}\n\n${expectedInit}`); + if (st2.length > 0) res.push(`Unexpected subsequent init:\n${st2}`); + + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, 'drizzle'); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel', 'mysql'); + const path = `tests/mysql/tmp/temp-${hash(String(Math.random()))}.ts`; + + if (existsSync(path)) rmSync(path); + writeFileSync(path, file.file); + await tsc(file.file); + + const response = await prepareFromSchemaFiles([path]); + const sch = fromDrizzleSchema(response.tables, response.views, 'camelCase'); + const { ddl: ddl2, errors: e3 } = interimToDDL(sch); + + const { sqlStatements: afterFileSqlStatements } = await ddlDiffDry(ddl1, ddl2, 'push'); + if (afterFileSqlStatements.length === 0) { + rmSync(path); + } else { + console.log(afterFileSqlStatements); + console.log(`./${path}`); + } + + await clear(); + + config.hasDefault = false; + config.default = undefined; + const schema1 = { + ...pre, + table: mysqlTable('table', { column: builder }), + }; + + config.hasDefault = true; + config.default = def; + const schema2 = { + ...pre, + table: mysqlTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent }); + await push({ db, to: schema1, ignoreSubsequent }); + const { sqlStatements: st3 } = await push({ db, to: schema2, ignoreSubsequent }); + const expectedAlter = `ALTER TABLE \`table\` MODIFY COLUMN \`column\` ${type} DEFAULT ${expectedDefault};`; + if (st3.length !== 1 || st3[0] !== expectedAlter) res.push(`Unexpected default alter:\n${st3}\n\n${expectedAlter}`); + + await clear(); + + const schema3 = { + ...pre, + table: mysqlTable('table', { id: int() }), + }; + + const schema4 = { + ...pre, + table: mysqlTable('table', { id: int(), column: builder }), + }; + + if (pre) await push({ db, to: pre, ignoreSubsequent }); + await push({ db, to: schema3, ignoreSubsequent }); + const { sqlStatements: st4 } = await push({ db, to: schema4, ignoreSubsequent }); + + const expectedAddColumn = `ALTER TABLE \`table\` ADD \`column\` ${type} DEFAULT ${expectedDefault};`; + if (st4.length !== 1 || st4[0] !== expectedAddColumn) { + res.push(`Unexpected add column:\n${st4[0]}\n\n${expectedAddColumn}`); + } + + return res; +}; + +export const createDockerDB = async (): Promise<{ url: string; container: Container }> => { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + const mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + + return { url: `mysql://root:mysql@127.0.0.1:${port}/drizzle`, container: mysqlContainer }; +}; + +export type TestDatabase = { + db: DB; + db_url: string; + close: () => Promise; + clear: () => Promise; +}; + +export const prepareTestDatabase = async (): Promise => { + const envUrl = process.env['MYSQL_CONNECTION_STRING']; + const { url, container } = envUrl ? { url: envUrl, container: null } : await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + do { + try { + const client: Connection = await createConnection(url); + await client.connect(); + + const db = { + query: async (sql: string, params: any[]) => { + const [res] = await client.query(sql).catch((e: Error) => { + const error = new Error(`query error: ${sql}\n\n${e.message}`); + throw error; + }); + return res as any[]; + }, + }; + const close = async () => { + await client?.end().catch(console.error); + await container?.stop().catch(console.error); + }; + const clear = async () => { + await client.query(`drop database if exists \`drizzle\`;`); + await client.query(`create database \`drizzle\`;`); + await client.query(`use \`drizzle\`;`); + }; + return { db, close, clear, db_url: url }; + } catch (e) { + console.error(e); + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + + throw new Error(); +}; + +export const diffSnapshotV5 = async (db: DB, schema: MysqlSchema, oldSchema: MysqlSchemaOld) => { + const res = await serializeMysql(oldSchema, 'camelCase'); + const { sqlStatements } = await legacyDiff({ right: res }); + + for (const st of sqlStatements) { + await db.query(st); + } + + const snapshot = upToV6(res); + const ddl = fromEntities(snapshot.ddl); + + const { sqlStatements: st, next } = await diff(ddl, schema, []); + const { sqlStatements: pst } = await push({ db, to: schema }); + const { sqlStatements: st1 } = await diff(next, ddl, []); + const { sqlStatements: pst1 } = await push({ db, to: schema }); + + return { + step1: st, + step2: pst, + step3: st1, + step4: pst1, + all: [...st, ...pst, ...st1, ...pst1], + }; +}; + +type SchemaShape = { + id: string; + prevId?: string; + schema: Record; +}; + +export async function conflictsFromSchema( + { parent, child1, child2 }: { + parent: SchemaShape; + child1: SchemaShape; + child2: SchemaShape; + }, +) { + const child1Interim = fromDrizzleSchema(Object.values(child1.schema), [], undefined); + + const child1Snapshot = { + version: '6', + dialect: 'mysql', + id: child1.id, + prevIds: child1.prevId ? [child1.prevId] : [], + ddl: interimToDDL(child1Interim).ddl.entities.list(), + renames: [], + } as any; + + const child2Interim = fromDrizzleSchema(Object.values(child2.schema), [], undefined); + + const child2Snapshot = { + version: '6', + dialect: 'mysql', + id: child2.id, + prevIds: child2.prevId ? [child2.prevId] : [], + ddl: interimToDDL(child2Interim).ddl.entities.list(), + renames: [], + } as any; + + const { statements: st1 } = await diff(parent.schema, child1.schema, []); + const { statements: st2 } = await diff(parent.schema, child2.schema, []); + + const { getReasonsFromStatements } = await import('src/dialects/mysql/commutativity'); + const r = await getReasonsFromStatements(st1, st2, child1Snapshot, child2Snapshot); + return r; +} diff --git a/drizzle-kit/tests/mysql/mysql-checks.test.ts b/drizzle-kit/tests/mysql/mysql-checks.test.ts new file mode 100644 index 0000000000..3624383180 --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql-checks.test.ts @@ -0,0 +1,333 @@ +import { sql } from 'drizzle-orm'; +import { check, int, mysqlTable, serial, varchar } from 'drizzle-orm/mysql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table with check', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + ]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`users\` ( +\t\`id\` serial PRIMARY KEY, +\t\`age\` int, +\tCONSTRAINT \`some_check_name\` CHECK(\`users\`.\`age\` > 21) +);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check constraint to existing table #1', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + ]), + }; + + const { sqlStatements: st, next } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name\` CHECK (\`users\`.\`age\` > 21);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check constraint to existing table #2', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }, (table) => [check('some_check1', sql`${table.values} < 100`), check('some_check2', sql`'test' < 100`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE \`test\` ADD CONSTRAINT \`some_check1\` CHECK (\`test\`.\`values\` < 100);', + `ALTER TABLE \`test\` ADD CONSTRAINT \`some_check2\` CHECK ('test' < 100);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check constraint in existing table #1', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }), + }; + + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check constraint in existing table #2', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }, (table) => [ + check('some_check1', sql`${table.values} < 100`), + check('some_check2', sql`'test' < 100`), + ]), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE \`test\` DROP CONSTRAINT \`some_check1\`;', + `ALTER TABLE \`test\` DROP CONSTRAINT \`some_check2\`;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename check constraint', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 21);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter check constraint', async (t) => { + const from = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 10`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, + `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 10);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter multiple check constraints', async (t) => { + const from = { + users: mysqlTable( + 'users', + { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, + ( + table, + ) => [ + check('some_check_name_1', sql`${table.age} > 21`), + check('some_check_name_2', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const to = { + users: mysqlTable( + 'users', + { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, + ( + table, + ) => [ + check('some_check_name_3', sql`${table.age} > 21`), + check('some_check_name_4', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_1\`;`, + `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_2\`;`, + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_3\` CHECK (\`users\`.\`age\` > 21);`, + `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_4\` CHECK (\`users\`.\`name\` != \'Alex\');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create checks with same names', async (t) => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + age: int('age'), + name: varchar('name', { length: 255 }), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + check('some_check_name', sql`${table.name} != 'Alex'`), + ]), + }; + + await expect(diff({}, to, [])).rejects.toThrowError(); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +// TODO not possible to parse check definition +test.todo('create checks on serail or autoincrement', async (t) => { + const schema1 = { + table1: mysqlTable('table1', { + column1: serial(), + }, (table) => [ + check('some_check_name1', sql`${table.column1} > 21`), + ]), + }; + + await expect(diff({}, schema1, [])).rejects.toThrowError(); + await expect(push({ db, to: schema1 })).rejects.toThrowError(); + + const schema2 = { + table1: mysqlTable('table1', { + columnй: int().autoincrement(), + }, (table) => [ + check('some_check_name2', sql`${table.columnй} > 21`), + ]), + }; + + await expect(diff({}, schema2, [])).rejects.toThrowError(); + await expect(push({ db, to: schema2 })).rejects.toThrowError(); +}); + +test('db has checks. Push with same names', async () => { + const schema1 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [ + check('some_check', sql`${table.values} < 100`), + ]), + }; + const schema2 = { + test: mysqlTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }, (table) => [ + check('some_check', sql`some new value`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mysql/mysql-defaults.test.ts b/drizzle-kit/tests/mysql/mysql-defaults.test.ts new file mode 100644 index 0000000000..8ff6f299ed --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql-defaults.test.ts @@ -0,0 +1,790 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + binary, + blob, + boolean, + char, + date, + datetime, + decimal, + double, + float, + int, + json, + longblob, + longtext, + mediumblob, + mediumint, + mediumtext, + mysqlEnum, + real, + serial, + smallint, + text, + time, + timestamp, + tinyblob, + tinyint, + tinytext, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffDefault, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('tinyint', async () => { + const res1 = await diffDefault(_, tinyint().default(-128), '-128'); + const res2 = await diffDefault(_, tinyint().default(0), '0'); + const res3 = await diffDefault(_, tinyint().default(127), '127'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('smallint', async () => { + const res1 = await diffDefault(_, smallint().default(-32768), '-32768'); + const res2 = await diffDefault(_, smallint().default(0), '0'); + const res3 = await diffDefault(_, smallint().default(32767), '32767'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('mediumint', async () => { + const res1 = await diffDefault(_, mediumint().default(-8388608), '-8388608'); + const res2 = await diffDefault(_, mediumint().default(0), '0'); + const res3 = await diffDefault(_, mediumint().default(8388607), '8388607'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('int', async () => { + const res1 = await diffDefault(_, int().default(-2147483648), '-2147483648'); + const res2 = await diffDefault(_, int().default(0), '0'); + const res3 = await diffDefault(_, int().default(2147483647), '2147483647'); + const res4 = await diffDefault(_, int().default(1e4), '10000'); + const res5 = await diffDefault(_, int().default(-1e4), '-10000'); + + // expressions + const res6 = await diffDefault(_, int().default(sql`(1 + 1)`), '(1 + 1)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('bigint', async () => { + const res1 = await diffDefault(_, bigint({ mode: 'number' }).default(9007199254740991), '9007199254740991'); // 2^53 + const res2 = await diffDefault(_, bigint({ mode: 'number' }).default(-9007199254740991), '-9007199254740991'); + const res3 = await diffDefault(_, bigint({ mode: 'bigint' }).default(9223372036854775807n), '9223372036854775807'); // 2^63 - 1 + const res4 = await diffDefault(_, bigint({ mode: 'bigint' }).default(-9223372036854775808n), '-9223372036854775808'); // -2^63 + const res5 = await diffDefault( + _, + bigint({ mode: 'number', unsigned: true }).default(9007199254740991), + '9007199254740991', + ); + const res6 = await diffDefault( + _, + bigint({ mode: 'bigint', unsigned: true }).default(18446744073709551615n), + '18446744073709551615', // 2^64 max in Mysql + ); + + // expressions + const res7 = await diffDefault(_, bigint({ mode: 'number' }).default(sql`(1 + 1)`), '(1 + 1)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('decimal', async () => { + const res1 = await diffDefault(_, decimal().default('10.123'), '(10.123)'); + + const res2 = await diffDefault(_, decimal({ precision: 6 }).default('10.123'), '(10.123)'); + const res3 = await diffDefault(_, decimal({ precision: 6, scale: 2 }).default('10.123'), '(10.123)'); + + // string + const res4 = await diffDefault(_, decimal({ mode: 'string' }).default('10.123'), '(10.123)'); + + const res5 = await diffDefault(_, decimal({ mode: 'string', scale: 2 }).default('10.123'), '(10.123)'); + const res6 = await diffDefault(_, decimal({ mode: 'string', precision: 6 }).default('10.123'), '(10.123)'); + const res7 = await diffDefault( + _, + decimal({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), + '(10.123)', + ); + + // number + // const res8 = await diffDefault(_, decimal({ mode: 'number' }).default(9007199254740991), '9007199254740991'); + const res9 = await diffDefault( + _, + decimal({ mode: 'number', precision: 16 }).default(9007199254740991), + '(9007199254740991)', + ); + + const res10 = await diffDefault(_, decimal({ mode: 'number', precision: 6, scale: 2 }).default(10.123), '(10.123)'); + const res11 = await diffDefault(_, decimal({ mode: 'number', scale: 2 }).default(10.123), '(10.123)'); + const res12 = await diffDefault(_, decimal({ mode: 'number', precision: 6 }).default(10.123), '(10.123)'); + + // TODO revise: maybe bigint mode should set the precision to a value appropriate for bigint, since the default precision (10) is insufficient. + // the line below will fail + const res13 = await diffDefault( + _, + decimal({ mode: 'bigint' }).default(9223372036854775807n), + '(9223372036854775807)', + ); + const res14 = await diffDefault( + _, + decimal({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + '(9223372036854775807)', + ); + + // expressions + const res15 = await diffDefault(_, decimal().default(sql`(1.10 + 1.20)`), '(1.10 + 1.20)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + // expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); +}); + +test('real', async () => { + const res1 = await diffDefault(_, real().default(10.123), '10.123'); + // TODO: revise: It seems that the real type can’t be configured using only one property—precision or scale; both must be specified. + // The commented line below will fail + // const res2 = await diffDefault(_, real({ precision: 6 }).default(10.123), '10.123'); + const res3 = await diffDefault(_, real({ precision: 6, scale: 3 }).default(10.123), '10.123'); + const res4 = await diffDefault(_, real({ precision: 6, scale: 2 }).default(10.123), '10.123', null, { + ignoreSubsequent: true, + }); + + // expressions + const res5 = await diffDefault(_, decimal().default(sql`(1.10 + 1.20)`), '(1.10 + 1.20)'); + + expect.soft(res1).toStrictEqual([]); + // expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([ + 'Unexpected subsequent init:\n' + + 'ALTER TABLE `table` MODIFY COLUMN `column` real(6,2) DEFAULT 10.123;', // expected due to scale 2 + ]); + expect.soft(res5).toStrictEqual([]); +}); + +test('double', async () => { + const res1 = await diffDefault(_, double().default(10.123), '10.123'); + // TODO: revise: It seems that the double type can’t be configured using only one property precision or scale; both must be specified. + // The commented line below will fail + // const res2 = await diffDefault(_, double({ precision: 6 }).default(10.123), '10.123'); + const res3 = await diffDefault(_, double({ precision: 6, scale: 2 }).default(10.123), '10.123', null, { + ignoreSubsequent: true, + }); + const res4 = await diffDefault(_, double({ unsigned: true }).default(10.123), '10.123'); + const res5 = await diffDefault( + _, + double({ unsigned: true, precision: 6, scale: 2 }).default(10.123), + '10.123', + null, + { ignoreSubsequent: true }, + ); + + // expressions + const res6 = await diffDefault(_, decimal({ precision: 6, scale: 2 }).default(sql`(1.10 + 1.20)`), '(1.10 + 1.20)'); + + expect.soft(res1).toStrictEqual([]); + // expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([ + 'Unexpected subsequent init:\n' + + 'ALTER TABLE `table` MODIFY COLUMN `column` double(6,2) DEFAULT 10.123;', + ]); + expect.soft(res4).toStrictEqual([]); + + // + expect.soft(res5).toStrictEqual([ + 'Unexpected subsequent init:\n' + + 'ALTER TABLE `table` MODIFY COLUMN `column` double(6,2) unsigned DEFAULT 10.123;', + ]); + expect.soft(res6).toStrictEqual([]); +}); + +test('float', async () => { + const res1 = await diffDefault(_, float().default(10.123), '10.123'); + + const res2 = await diffDefault(_, float({ precision: 6 }).default(10.123), '10.123'); + const res3 = await diffDefault(_, float({ precision: 6, scale: 3 }).default(10.123), '10.123'); + + const res4 = await diffDefault(_, float({ unsigned: true }).default(10.123), '10.123'); + const res5 = await diffDefault(_, float({ unsigned: true, precision: 6, scale: 3 }).default(10.123), '10.123'); + const res6 = await diffDefault(_, float({ unsigned: true, precision: 6, scale: 2 }).default(10.123), '10.123', null, { + ignoreSubsequent: true, + }); + + // expressions + const res7 = await diffDefault(_, decimal({ precision: 6, scale: 2 }).default(sql`(1.10 + 1.20)`), '(1.10 + 1.20)'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([ + 'Unexpected subsequent init:\n' + + 'ALTER TABLE `table` MODIFY COLUMN `column` float(6,2) unsigned DEFAULT 10.123;', + ]); + expect.soft(res7).toStrictEqual([]); +}); + +test('boolean', async () => { + // sql`null` equals no default value, while we handle it properly + // it breaks on expected sql statements since they always expect DEFAULT + const res1 = await diffDefault(_, boolean().default(sql`null`), 'null', null, { ignoreSubsequent: true }); + const res2 = await diffDefault(_, boolean().default(true), 'true'); + const res3 = await diffDefault(_, boolean().default(false), 'false'); + const res4 = await diffDefault(_, boolean().default(sql`true`), '(true)'); + + // null vs { value: "null", type: "unknown" } + expect.soft(res1.length).greaterThan(0); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('char', async () => { + const res1 = await diffDefault(_, char({ length: 10 }).default('10'), `'10'`); + const res2 = await diffDefault(_, char({ length: 10 }).default("'"), `''''`); + const res3 = await diffDefault(_, char({ length: 10 }).default('"'), `'"'`); + const res4 = await diffDefault(_, char({ length: 10 }).default('text\'text"'), "'text''text\"'"); + + const res5 = await diffDefault(_, char({ length: 100 }).default(sql`('hello' + ' world')`), "('hello' + ' world')"); + const res6 = await diffDefault(_, char({ length: 100 }).default(sql`'hey'`), "('hey')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('varchar', async () => { + const res1 = await diffDefault(_, varchar({ length: 10 }).default('text'), `'text'`); + const res2 = await diffDefault(_, varchar({ length: 10 }).default("text'text"), `'text''text'`); + const res3 = await diffDefault(_, varchar({ length: 10 }).default('text\'text"'), "'text''text\"'"); + + // expressions + const res4 = await diffDefault( + _, + varchar({ length: 100 }).default(sql`('hello' + ' world')`), + "('hello' + ' world')", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('tinytext', async () => { + const res1 = await diffDefault(_, tinytext().default('text'), `('text')`); + const res2 = await diffDefault(_, tinytext().default("'"), `('''')`); + const res3 = await diffDefault(_, tinytext().default('"'), `('"')`); + const res4 = await diffDefault(_, tinytext().default("text'text"), `('text''text')`); + const res5 = await diffDefault(_, tinytext().default('text\'text"'), `('text''text"')`); + + // expressions + const res6 = await diffDefault(_, tinytext().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('mediumtext', async () => { + const res1 = await diffDefault(_, mediumtext().default('text'), `('text')`); + const res2 = await diffDefault(_, mediumtext().default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, mediumtext().default('text\'text"'), `('text''text"')`); + + // expressions + const res4 = await diffDefault(_, mediumtext().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('text', async () => { + const res1 = await diffDefault(_, text().default('text'), `('text')`); + const res2 = await diffDefault(_, text().default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, text().default('text\'text"'), `('text''text"')`); + + // expressions + const res4 = await diffDefault(_, text().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('longtext', async () => { + const res1 = await diffDefault(_, longtext().default('text'), `('text')`); + const res2 = await diffDefault(_, longtext().default("text'text"), `('text''text')`); + const res3 = await diffDefault(_, longtext().default('text\'text"'), `('text''text"')`); + + // expressions + const res4 = await diffDefault(_, longtext().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('tinyblob', async () => { + const res1 = await diffDefault(_, tinyblob({ mode: 'string' }).default('text'), `('text')`); + const res1_1 = await diffDefault( + _, + tinyblob().default(Buffer.from('text')), + `(0x${Buffer.from('text').toString('hex')})`, + ); + const res2 = await diffDefault(_, tinyblob({ mode: 'string' }).default("text'text"), `('text''text')`); + const res2_1 = await diffDefault( + _, + tinyblob().default(Buffer.from("text't")), + `(0x${Buffer.from("text't").toString('hex')})`, + ); + const res3 = await diffDefault(_, tinyblob({ mode: 'string' }).default('text\'text"'), `('text''text"')`); + const res3_1 = await diffDefault( + _, + tinyblob().default(Buffer.from('text\'t"')), + `(0x${Buffer.from('text\'t"').toString('hex')})`, + ); + + // expressions + const res4 = await diffDefault(_, tinyblob().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res3_1).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('mediumblob', async () => { + const res1 = await diffDefault(_, mediumblob({ mode: 'string' }).default('text'), `('text')`); + const res1_1 = await diffDefault( + _, + mediumblob().default(Buffer.from('text')), + `(0x${Buffer.from('text').toString('hex')})`, + ); + const res2 = await diffDefault(_, mediumblob({ mode: 'string' }).default("text'text"), `('text''text')`); + const res2_1 = await diffDefault( + _, + mediumblob().default(Buffer.from("text'text")), + `(0x${Buffer.from("text'text").toString('hex')})`, + ); + const res3 = await diffDefault(_, mediumblob({ mode: 'string' }).default('text\'text"'), `('text''text"')`); + const res3_1 = await diffDefault( + _, + mediumblob().default(Buffer.from('text\'text"')), + `(0x${Buffer.from('text\'text"').toString('hex')})`, + ); + + // expressions + const res4 = await diffDefault(_, mediumblob().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res3_1).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('blob', async () => { + const res1 = await diffDefault(_, blob({ mode: 'string' }).default('text'), `('text')`); + const res1_1 = await diffDefault( + _, + blob().default(Buffer.from('text')), + `(0x${Buffer.from('text').toString('hex')})`, + ); + const res2 = await diffDefault(_, blob({ mode: 'string' }).default("text'text"), `('text''text')`); + const res2_1 = await diffDefault( + _, + blob().default(Buffer.from("text'text")), + `(0x${Buffer.from("text'text").toString('hex')})`, + ); + const res3 = await diffDefault(_, blob({ mode: 'string' }).default('text\'text"'), `('text''text"')`); + const res3_1 = await diffDefault( + _, + blob().default(Buffer.from('text\'text"')), + `(0x${Buffer.from('text\'text"').toString('hex')})`, + ); + + // expressions + const res4 = await diffDefault(_, blob().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res3_1).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('longblob', async () => { + const res1 = await diffDefault(_, longblob({ mode: 'string' }).default('text'), `('text')`); + const res1_1 = await diffDefault( + _, + longblob().default(Buffer.from('text')), + `(0x${Buffer.from('text').toString('hex')})`, + ); + const res2 = await diffDefault(_, longblob({ mode: 'string' }).default("text'text"), `('text''text')`); + const res2_1 = await diffDefault( + _, + longblob().default(Buffer.from("text'text")), + `(0x${Buffer.from("text'text").toString('hex')})`, + ); + const res3 = await diffDefault(_, longblob({ mode: 'string' }).default('text\'text"'), `('text''text"')`); + const res3_1 = await diffDefault( + _, + longblob().default(Buffer.from('text\'text"')), + `(0x${Buffer.from('text\'text"').toString('hex')})`, + ); + + // expressions + const res4 = await diffDefault(_, longblob().default(sql`('hello' + ' world')`), "('hello' + ' world')"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res3_1).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('enum', async () => { + const res1 = await diffDefault( + _, + mysqlEnum(['sad', 'ok', 'happy']).default('ok'), + `'ok'`, + null, + { + type: `enum('sad','ok','happy')`, + }, + ); + const res2 = await diffDefault(_, mysqlEnum(["'"]).default("'"), `''''`, null, { type: `enum('''')` }); + const res3 = await diffDefault(_, mysqlEnum(['"']).default('"'), `'"'`, null, { type: `enum('"')` }); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('binary', async () => { + const res1 = await diffDefault(_, binary().default('binary'), `('binary')`); + const res2 = await diffDefault(_, binary({ length: 10 }).default('binary'), `('binary')`); + const res3 = await diffDefault(_, binary().default(sql`(lower('HELLO'))`), `(lower('HELLO'))`); + const res4 = await diffDefault(_, binary().default(sql`lower('HELLO')`), `(lower('HELLO'))`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('varbinary', async () => { + const res1 = await diffDefault(_, varbinary({ length: 10 }).default('binary'), `(0x62696e617279)`); + const res2 = await diffDefault(_, varbinary({ length: 16 }).default(sql`(lower('HELLO'))`), `(lower('HELLO'))`); + const res3 = await diffDefault(_, varbinary({ length: 16 }).default(sql`lower('HELLO')`), `(lower('HELLO'))`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('json', async () => { + const res1 = await diffDefault(_, json().default({}), `('{}')`); + const res2 = await diffDefault(_, json().default([]), `('[]')`); + const res3 = await diffDefault(_, json().default([1, 2, 3]), `('[1,2,3]')`); + const res4 = await diffDefault(_, json().default({ key: 'value' }), `('{"key":"value"}')`); + const res5 = await diffDefault(_, json().default({ key: "val'ue" }), `('{"key":"val''ue"}')`); + const res7 = await diffDefault(_, json().default({ key1: { key2: 'value' } }), `('{"key1":{"key2":"value"}}')`); + + const res8 = await diffDefault(_, json().default({ key: 9223372036854775807n }), `('{"key":9223372036854775807}')`); + const res9 = await diffDefault( + _, + json().default(sql`'{"key":9223372036854775807}'`), + `('{"key":9223372036854775807}')`, + ); + const res10 = await diffDefault( + _, + json().default([9223372036854775807n, 9223372036854775806n]), + `('[9223372036854775807,9223372036854775806]')`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +test('timestamp', async () => { + const res1 = await diffDefault(_, timestamp({ mode: 'date' }).defaultNow(), `(now())`); + const res2 = await diffDefault(_, timestamp({ mode: 'string' }).defaultNow(), `(now())`); + const res3 = await diffDefault( + _, + timestamp({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + null, + { ignoreSubsequent: true }, + ); + const res4 = await diffDefault( + _, + timestamp({ mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + ); + + const res5 = await diffDefault( + _, + timestamp({ mode: 'string' }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115'`, + null, + { ignoreSubsequent: true }, + ); + const res6 = await diffDefault( + _, + timestamp({ mode: 'string', fsp: 3 }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115'`, + ); + const res7 = await diffDefault( + _, + timestamp({ mode: 'string', fsp: 6 }).default('2025-05-23 12:53:53.123456'), + `'2025-05-23 12:53:53.123456'`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([ + // without fsp timestamp column returns no .115 + 'Unexpected subsequent init:\n' + + "ALTER TABLE `table` MODIFY COLUMN `column` timestamp DEFAULT '2025-05-23 12:53:53.115';", + ]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([ + // without fsp timestamp column returns no .115 + 'Unexpected subsequent init:\n' + + "ALTER TABLE `table` MODIFY COLUMN `column` timestamp DEFAULT '2025-05-23 12:53:53.115';", + ]); + + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('datetime', async () => { + const res1 = await diffDefault( + _, + datetime({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + null, + { ignoreSubsequent: true }, + ); + const res2 = await diffDefault( + _, + datetime({ mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + ); + + const res3 = await diffDefault( + _, + datetime({ mode: 'string' }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115'`, + null, + { ignoreSubsequent: true }, + ); + const res4 = await diffDefault( + _, + datetime({ mode: 'string', fsp: 3 }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115'`, + ); + const res5 = await diffDefault( + _, + datetime({ mode: 'string', fsp: 6 }).default('2025-05-23 12:53:53.123456'), + `'2025-05-23 12:53:53.123456'`, + ); + + // database datetime without precision does not return .115 fraction + expect.soft(res1).toStrictEqual([ + 'Unexpected subsequent init:\n' + + "ALTER TABLE `table` MODIFY COLUMN `column` datetime DEFAULT '2025-05-23 12:53:53.115';", + ]); + expect.soft(res2).toStrictEqual([]); + + // database datetime without precision does not return .115 fraction + expect.soft(res3).toStrictEqual([ + 'Unexpected subsequent init:\n' + + "ALTER TABLE `table` MODIFY COLUMN `column` datetime DEFAULT '2025-05-23 12:53:53.115';", + ]); + + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('time', async () => { + const res1 = await diffDefault(_, time().default('15:50:33'), `'15:50:33'`); + const res2 = await diffDefault( + _, + time({ fsp: 3 }).default('15:50:33.123'), + `'15:50:33.123'`, + ); + + const res3 = await diffDefault( + _, + time({ fsp: 6 }).default('15:50:33.123456'), + `'15:50:33.123456'`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('date', async () => { + const res1 = await diffDefault(_, date({ mode: 'string' }).default('2025-05-23'), `'2025-05-23'`); + const res2 = await diffDefault(_, date({ mode: 'date' }).default(new Date('2025-05-23')), `'2025-05-23'`); + const res3 = await diffDefault( + _, + date({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23'`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); +}); + +test('year', async () => { + const res1 = await diffDefault(_, year().default(2025), `2025`); + const res2 = await diffDefault(_, year().default(sql`2025`), `(2025)`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); +}); + +test.skip('corner cases', async () => { + await diffDefault(_, json().default({ key: 'mo",\\`}{od' }), `('{"key":"mo\\\\",\\\\\\\\\`}{od"}'))`); + await diffDefault( + _, + mysqlEnum(['sad', 'ok', 'happy', 'mo",\\`}{od']).default('mo",\\`}{od'), + `'mo",\\\\\`}{od'`, + null, + { type: `enum('sad','ok','happy','mo",\\\\\`}{od')` }, + ); + + await diffDefault(_, longtext().default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + await diffDefault( + _, + longtext({ enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + + await diffDefault( + _, + text({ enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + await diffDefault(_, text().default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + await diffDefault( + _, + mediumtext({ enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + await diffDefault(_, mediumtext().default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + await diffDefault( + _, + tinytext({ enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + await diffDefault(_, tinytext().default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + await diffDefault( + _, + varchar({ length: 15, enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + await diffDefault(_, varchar({ length: 15 }).default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + await diffDefault( + _, + char({ length: 15, enum: ['one', 'two', 'three', 'mo",\\`}{od'] }).default('mo",\\`}{od'), + `('mo",\\\`}{od')`, + ); + await diffDefault(_, char({ length: 15 }).default('mo",\\`}{od'), `('mo",\\\`}{od')`); + + // raw sql for the line below: create table `table` (`column` json default ('{"key1":{"key2":"mo\\\",\\\\`}{od"}}')); + await diffDefault( + _, + json().default({ key1: { key2: 'mo",\\`}{od' } }), + `('{"key1":{"key2":"mo\\\\",\\\\\\\\\`}{od"}}')`, + ); +}); diff --git a/drizzle-kit/tests/mysql/mysql-enum.test.ts b/drizzle-kit/tests/mysql/mysql-enum.test.ts new file mode 100644 index 0000000000..dab5918ec6 --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql-enum.test.ts @@ -0,0 +1,54 @@ +import { int, mysqlEnum, mysqlTable, varchar } from 'drizzle-orm/mysql-core'; +import { Decimal, parseEnum } from 'src/dialects/mysql/grammar'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, diffIntrospect, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +if (!fs.existsSync('tests/mysql/tmp')) { + fs.mkdirSync('tests/mysql/tmp', { recursive: true }); +} + +// https://github.com/drizzle-team/drizzle-orm/issues/3613 +test('enum', async () => { + const ORDER_STATUSES = [ + 'Ny', + 'Bestilling sendt', + 'Sendt til leverandør(er)', + 'Mottatt av leverandør(er)', + 'Behandlet av leverandør(er)', + 'Under behandling', + 'Noe gikk galt', + ] as const; + const schema1 = { + table: mysqlTable('table', { + status: mysqlEnum('status', ORDER_STATUSES).default('Sendt til leverandør(er)'), + }), + }; + + const { sqlStatements: st } = await diff({}, schema1, []); + const { sqlStatements: pst } = await push({ db, to: schema1 }); + + const st0: string[] = [ + "CREATE TABLE `table` (\n\t`status` enum('Ny','Bestilling sendt','Sendt til leverandør(er)','Mottatt av leverandør(er)','Behandlet av leverandør(er)','Under behandling','Noe gikk galt') DEFAULT 'Sendt til leverandør(er)'\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mysql-generated.test.ts b/drizzle-kit/tests/mysql/mysql-generated.test.ts similarity index 51% rename from drizzle-kit/tests/mysql-generated.test.ts rename to drizzle-kit/tests/mysql/mysql-generated.test.ts index 3531582d0b..e041097d64 100644 --- a/drizzle-kit/tests/mysql-generated.test.ts +++ b/drizzle-kit/tests/mysql/mysql-generated.test.ts @@ -1,9 +1,69 @@ import { SQL, sql } from 'drizzle-orm'; import { int, mysqlTable, text } from 'drizzle-orm/mysql-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasMysql } from './schemaDiffer'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; -test('generated as callback: add column with generated constraint', async () => { +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2616 +test('generated as callback: create table with generated constraint #1', async () => { + const to = { + users: mysqlTable('users', { + name: text('name'), + generatedName: text('gen_name').notNull().generatedAlwaysAs( + (): SQL => sql`${to.users.name} || 'hello'`, + { mode: 'stored' }, + ), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + "CREATE TABLE `users` (\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED NOT NULL\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// TODO +// why to use generated with literal? +// Looks like invalid use case +test.skip('generated as callback: create table with generated constraint #2', async () => { + const to = { + users: mysqlTable('users', { + name: text('name'), + generatedName: text('gen_name').notNull().generatedAlwaysAs('Default', { mode: 'stored' }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + "CREATE TABLE `users` (\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ('Default') STORED NOT NULL\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: add column with generated constraint #1', async () => { const from = { users: mysqlTable('users', { id: int('id'), @@ -23,33 +83,93 @@ test('generated as callback: add column with generated constraint', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: add column with generated constraint #2', async () => { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: add generated constraints to an exisiting columns', async () => { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name'), + generatedName1: text('gen_name1'), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name} || 'hello'`, + { mode: 'virtual' }, + ), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add generated constraint to an exisiting column as stored', async () => { @@ -74,33 +194,20 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { @@ -125,34 +232,57 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('generated as callback: drop generated constraint', async () => { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'stored' }, + ), + generatedName1: text('gen_name1').generatedAlwaysAs( + (): SQL => sql`${schema2.users.name}`, + { mode: 'virtual' }, + ), + }), + }; + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name'), + generatedName1: text('gen_name1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', + 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', + 'ALTER TABLE `users` ADD `gen_name1` text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: drop generated constraint as stored', async () => { @@ -176,42 +306,14 @@ test('generated as callback: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['ALTER TABLE `users` MODIFY COLUMN `gen_name` text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: drop generated constraint as virtual', async () => { @@ -235,43 +337,21 @@ test('generated as callback: drop generated constraint as virtual', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: change generated constraint type from virtual to stored', async () => { @@ -298,34 +378,21 @@ test('generated as callback: change generated constraint type from virtual to st }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); test('generated as callback: change generated constraint type from stored to virtual', async () => { @@ -350,37 +417,20 @@ test('generated as callback: change generated constraint type from stored to vir }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); -test('generated as callback: change generated constraint', async () => { +test('generated as callback: change generated constraint #1', async () => { const from = { users: mysqlTable('users', { id: int('id'), @@ -402,34 +452,49 @@ test('generated as callback: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +test('generated as callback: change generated constraint #2', async () => { + const schema1 = { + users: mysqlTable('users', { + id: int('id'), + gen1: text().generatedAlwaysAs((): SQL => sql`${schema1.users.id}`, { mode: 'stored' }), + gen2: text().generatedAlwaysAs((): SQL => sql`${schema1.users.id}`, { mode: 'virtual' }), + }), + }; + + const schema2 = { + users: mysqlTable('users', { + id: int('id'), + gen1: text().generatedAlwaysAs((): SQL => sql`${schema2.users.id} || 'hello'`, { mode: 'stored' }), + gen2: text().generatedAlwaysAs((): SQL => sql`${schema2.users.id} || 'hello'`, { mode: 'virtual' }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen1`;', + "ALTER TABLE `users` ADD `gen1` text GENERATED ALWAYS AS (`users`.`id` || 'hello') STORED;", + 'ALTER TABLE `users` DROP COLUMN `gen2`;', + "ALTER TABLE `users` ADD `gen2` text GENERATED ALWAYS AS (`users`.`id` || 'hello') VIRTUAL;", + ]; + expect.soft(st).toStrictEqual(st0); + expect.soft(pst).toStrictEqual([]); }); // --- @@ -454,33 +519,20 @@ test('generated as sql: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add generated constraint to an exisiting column as stored', async () => { @@ -505,33 +557,20 @@ test('generated as sql: add generated constraint to an exisiting column as store }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { @@ -556,34 +595,21 @@ test('generated as sql: add generated constraint to an exisiting column as virtu }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: drop generated constraint as stored', async () => { @@ -607,42 +633,18 @@ test('generated as sql: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['ALTER TABLE `users` MODIFY COLUMN `gen_name` text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: drop generated constraint as virtual', async () => { @@ -666,43 +668,21 @@ test('generated as sql: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: change generated constraint type from virtual to stored', async () => { @@ -729,34 +709,17 @@ test('generated as sql: change generated constraint type from virtual to stored' }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); test('generated as sql: change generated constraint type from stored to virtual', async () => { @@ -781,34 +744,17 @@ test('generated as sql: change generated constraint type from stored to virtual' }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); test('generated as sql: change generated constraint', async () => { @@ -833,34 +779,21 @@ test('generated as sql: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); // --- @@ -885,33 +818,20 @@ test('generated as string: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add generated constraint to an exisiting column as stored', async () => { @@ -936,33 +856,20 @@ test('generated as string: add generated constraint to an exisiting column as st }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add generated constraint to an exisiting column as virtual', async () => { @@ -987,34 +894,21 @@ test('generated as string: add generated constraint to an exisiting column as vi }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: drop generated constraint as stored', async () => { @@ -1038,42 +932,20 @@ test('generated as string: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: drop generated constraint as virtual', async () => { @@ -1097,43 +969,21 @@ test('generated as string: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: change generated constraint type from virtual to stored', async () => { @@ -1159,34 +1009,17 @@ test('generated as string: change generated constraint type from virtual to stor }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); test('generated as string: change generated constraint type from stored to virtual', async () => { @@ -1209,34 +1042,17 @@ test('generated as string: change generated constraint type from stored to virtu }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); }); test('generated as string: change generated constraint', async () => { @@ -1259,32 +1075,42 @@ test('generated as string: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasMysql( - from, - to, - [], - ); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` drop column `gen_name`;', + const st0: string[] = [ + 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +test('generated as string: with backslashes', async () => { + const to = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `'users\\\\hello'`, + ), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`users\` ( + \`id\` int, + \`id2\` int, + \`name\` text, + \`gen_name\` text GENERATED ALWAYS AS ('users\\\\hello') VIRTUAL +);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); diff --git a/drizzle-kit/tests/mysql/mysql-schemas.test.ts b/drizzle-kit/tests/mysql/mysql-schemas.test.ts new file mode 100644 index 0000000000..2a82b74f15 --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql-schemas.test.ts @@ -0,0 +1,219 @@ +import { int, mysqlSchema, mysqlTable } from 'drizzle-orm/mysql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +// We don't manage databases(schemas) in MySQL with Drizzle Kit +test('add schema #1', async () => { + const to = { + devSchema: mysqlSchema('dev'), + }; + + const { statements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + const to = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + + const { statements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + + const { statements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: {} }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + const to = { + devSchema: mysqlSchema('dev'), + }; + + const { statements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: {} }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: mysqlSchema('dev'), + }; + const to = { + devSchema2: mysqlSchema('dev2'), + }; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: mysqlSchema('dev'), + devSchema1: mysqlSchema('dev1'), + }; + const to = { + devSchema: mysqlSchema('dev'), + devSchema2: mysqlSchema('dev2'), + }; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table to schema #1', async () => { + const dev = mysqlSchema('dev'); + const from = {}; + const to = { + dev, + users: dev.table('users', {}), + }; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table to schema #2', async () => { + const dev = mysqlSchema('dev'); + const from = { dev }; + const to = { + dev, + users: dev.table('users', {}), + }; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table to schema #3', async () => { + const dev = mysqlSchema('dev'); + const from = { dev }; + const to = { + dev, + usersInDev: dev.table('users', {}), + users: mysqlTable('users', { id: int() }), + }; + + const renames = ['dev->dev2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['CREATE TABLE `users` (\n\t`id` int\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('remove table from schema #1', async () => { + const dev = mysqlSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = { + dev, + }; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('remove table from schema #2', async () => { + const dev = mysqlSchema('dev'); + const from = { dev, users: dev.table('users', {}) }; + const to = {}; + + const renames = ['dev->dev2']; + const { statements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mysql/mysql-views.test.ts b/drizzle-kit/tests/mysql/mysql-views.test.ts new file mode 100644 index 0000000000..370c2b0abf --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql-views.test.ts @@ -0,0 +1,680 @@ +import { eq, sql } from 'drizzle-orm'; +import { int, mysqlTable, mysqlView, text } from 'drizzle-orm/mysql-core'; +import { drizzle } from 'drizzle-orm/mysql2'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create view #1', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE ALGORITHM = undefined SQL SECURITY definer VIEW \`some_view\` AS (select \`id\` from \`users\`);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view #2', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS (SELECT * FROM \`users\`) WITH cascaded CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view #3', async () => { + const users = mysqlTable('users', { + id: int().primaryKey().notNull(), + name: text(), + }); + const posts = mysqlTable('posts', { + id: int().primaryKey(), + content: text(), + userId: int().references(() => users.id), + }); + + const from = { users, posts }; + const to = { + users, + posts, + view: mysqlView('some_view').as((qb) => { + return qb.select({ userId: sql`${users.id}`.as('user'), postId: sql`${posts.id}`.as('post') }).from(users) + .leftJoin( + posts, + eq(posts.userId, users.id), + ); + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + + await db.query(`INSERT INTO \`users\` (\`id\`, \`name\`) VALUE (1, 'Alex'), (2, 'Andrew')`); + await db.query( + `INSERT INTO \`posts\` (\`id\`, \`content\`, \`userId\`) VALUE (1, 'alex-content', 1), (3, 'andrew-content', 2)`, + ); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE ALGORITHM = undefined SQL SECURITY definer VIEW \`some_view\` AS (select \`users\`.\`id\` as \`user\`, \`posts\`.\`id\` as \`post\` from \`users\` left join \`posts\` on \`posts\`.\`userId\` = \`users\`.\`id\`);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view with existing flag', async () => { + const users = mysqlTable('users', { + id: int(), + }); + + const from = { + users: users, + }; + const to = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #1', async () => { + const users = mysqlTable('users', { + id: int('id'), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const to = { users: users }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [`DROP VIEW \`some_view\`;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #2', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['DROP VIEW \`view\`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id'), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const renames = ['some_view->new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [`RENAME TABLE \`some_view\` TO \`new_some_view\`;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view and alter meta options', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view1: mysqlView('view1', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + view2: mysqlView('view2', {}).algorithm('undefined').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('view1new', {}).sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + view2: mysqlView('view2new', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const renames = ['view1->view1new', 'view2->view2new']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + /* + UNDEFINED lets the server pick at execution time (often it still runs as a merge if the query is “mergeable”). + Specifying MERGE when it’s not possible causes MySQL to store UNDEFINED with a warning, + but the reverse (forcing UNDEFINED to overwrite MERGE) doesn’t happen via ALTER. + + https://dev.mysql.com/doc/refman/8.4/en/view-algorithms.html + */ + const st0: string[] = [ + 'RENAME TABLE `view1` TO `view1new`;', + 'RENAME TABLE `view2` TO `view2new`;', + `ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`view2new\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const renames = ['some_view->new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add meta to view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add meta to view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('push: alter meta to view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('diff: alter meta to view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const st0: string[] = [ + 'ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;', + ]; + expect(st).toStrictEqual(st0); +}); + +test('diff: alter meta to view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); +}); + +test('push: alter meta to view with existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop meta from view', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER ALGORITHM = undefined SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\`;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop meta from view existing flag', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') + .withCheckOption('cascaded').existing(), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('diff: alter view ".as" value', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const st0: string[] = [ + `CREATE OR REPLACE ALGORITHM = temptable SQL SECURITY invoker VIEW \`some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); +}); + +test('push: alter view ".as" value', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE OR REPLACE ALGORITHM = merge SQL SECURITY invoker VIEW \`some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // Do not trigger definition changes on push +}); + +test('alter view ".as"', async () => { + const table = mysqlTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: mysqlView('view').as((qb) => qb.select().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE OR REPLACE ALGORITHM = undefined SQL SECURITY definer VIEW `view` AS (select `id` from `test`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // do not trigger definition changes on push +}); + +test('rename and alter view ".as" value', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), + }; + + const renames = ['some_view->new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [ + `RENAME TABLE \`some_view\` TO \`new_some_view\`;`, + `CREATE OR REPLACE ALGORITHM = merge SQL SECURITY invoker VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(['RENAME TABLE \`some_view\` TO \`new_some_view\`;']); // do not trigger definition chages on push +}); + +test('set existing', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('invoker') + .withCheckOption('cascaded').existing(), + }; + + const { sqlStatements: st1 } = await diff(from, to, []); + const renames = [`some_view->new_some_view`]; + const { sqlStatements: st2 } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst1 } = await push({ db, to }); + + // TODO: revise + await _.clear(); + await push({ db, to: from }); + const { sqlStatements: pst2 } = await push({ db, to, renames }); + + const st0: string[] = [ + `DROP VIEW \`some_view\`;`, + ]; + expect(st1).toStrictEqual(st0); + expect(st2).toStrictEqual(st0); + expect(pst1).toStrictEqual(st0); + expect(pst2).toStrictEqual(st0); +}); + +test('drop existing', async () => { + const users = mysqlTable('users', { + id: int('id').primaryKey().notNull(), + }); + + const from = { + users: users, + view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('invoker').existing(), + }; + const to = { + users: users, + view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('invoker').as( + sql`SELECT * FROM ${users} WHERE ${users.id} = 1`, + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE ALGORITHM = merge SQL SECURITY invoker VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/mysql/mysql.test.ts b/drizzle-kit/tests/mysql/mysql.test.ts new file mode 100644 index 0000000000..7b10740adc --- /dev/null +++ b/drizzle-kit/tests/mysql/mysql.test.ts @@ -0,0 +1,2126 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + binary, + blob, + boolean, + char, + customType, + date, + datetime, + decimal, + double, + float, + foreignKey, + index, + int, + json, + longblob, + longtext, + mediumblob, + mediumint, + mediumtext, + mysqlEnum, + mysqlSchema, + mysqlTable, + primaryKey, + serial, + smallint, + text, + time, + timestamp, + tinyblob, + tinyint, + tinytext, + unique, + uniqueIndex, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add table #1', async () => { + const to = { + users: mysqlTable('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE TABLE `users` (\n\t`id` int\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #2', async () => { + const to = { + users: mysqlTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` serial PRIMARY KEY\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #3', async () => { + const to = { + users: mysqlTable('users', { + id: serial('id'), + test: varchar('test', { length: 1 }), + }, (t) => [ + primaryKey({ + columns: [t.id, t.test], + }), + ]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` serial,\n\t`test` varchar(1),\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`id`,`test`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #4', async () => { + const to = { + users: mysqlTable('users', { id: int() }), + posts: mysqlTable('posts', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` int\n);\n', + 'CREATE TABLE `posts` (\n\t`id` int\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #5', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #6', async () => { + const from = { + users1: mysqlTable('users1', { id: int() }), + }; + + const to = { + users2: mysqlTable('users2', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users2` (\n\t`id` int\n);\n', + 'DROP TABLE `users1`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3539 +test('add table #7', async () => { + const from = { + users1: mysqlTable('users1', { id: int() }), + }; + + const to = { + users: mysqlTable('users', { id: int() }), + users2: mysqlTable('users2', { id: int() }), + }; + + const renames = ['users1->users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` int\n);\n', + 'RENAME TABLE `users1` TO `users2`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2599 +test('drop + add table', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2: int(), + }, (table) => [ + index('unique-index1').on(table.column2), + ]), + }; + + const schema2 = { + table2: mysqlTable('table2', { + column1: int().primaryKey(), + column2: int(), + }, (table) => [ + index('unique-index2').on(table.column2), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY,\n\t`column2` int\n);\n', + 'CREATE INDEX `unique-index1` ON `table1` (`column2`);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2 = [ + 'CREATE TABLE `table2` (\n\t`column1` int PRIMARY KEY,\n\t`column2` int\n);\n', + 'DROP TABLE `table1`;', + 'CREATE INDEX `unique-index2` ON `table2` (`column2`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456 +test('drop tables with fk constraint', async () => { + const table1 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mysqlTable('table2', { + column1: int().primaryKey(), + column2: int().references(() => table1.column1), + }); + const schema1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int PRIMARY KEY,\n\t`column2` int\n);\n', + 'ALTER TABLE \`table2\` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`);', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, {}, []); + const { sqlStatements: pst2 } = await push({ db, to: {} }); + + const expectedSt2 = [ + 'ALTER TABLE `table2` DROP CONSTRAINT `table2_column2_table1_column1_fkey`;', + 'DROP TABLE `table1`;', + 'DROP TABLE `table2`;', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('add schema + table #1', async () => { + const schema = mysqlSchema('folder'); + + const to = { + schema, + users: schema.table('users', {}), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change schema with tables #1', async () => { + const schema = mysqlSchema('folder'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const renames = ['folder->folder2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #1', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + users: mysqlTable('users', { id: int() }), + }; + const to = { + schema, + users: schema.table('users', { id: int() }), + }; + + const renames = ['users->folder.users']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['DROP TABLE `users`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #2', async () => { + const schema = mysqlSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: mysqlTable('users', { id: int() }), + }; + + const renames = ['folder.users->users']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['CREATE TABLE `users` (\n\t`id` int\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #3', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const renames = ['folder1.users->folder2.users']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #4', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = ['folder1.users->folder2.users']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #5', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = ['folder1.users->folder2.users']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #5', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const renames = ['folder1.users->folder2.users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #6', async () => { + const schema1 = mysqlSchema('folder1'); + const schema2 = mysqlSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', { id: int() }), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', { id: int() }), // rename table + }; + + const renames = ['folder1->folder2', 'folder2.users->folder2.users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #10', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({}), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('{}')\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #11', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default([]), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[]')\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #12', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[1,2,3]')\n);\n", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #13', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({ key: 'value' }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value"}\')\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #14', async () => { + const to = { + users: mysqlTable('table', { + json: json('json').default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value","arr":[1,2,3]}\')\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/472 +// https://github.com/drizzle-team/drizzle-orm/issues/3373 +test('add table #15. timestamp + fsp + default now + on update now + fsp', async () => { + // TODO: revise: maybe .onUpdateNow should be able to get fsp from timestamp config. + // Because fsp in timestamp config and onUpdateNow config should be the same for query to run successfully. + // It might also be helpfull to add fsp field to .defaultNow config, + // since setting now() as default without specifying fsp caused an error on PlanetScale (issue 472). + const to = { + users: mysqlTable('table', { + createdAt: timestamp({ fsp: 4 }).defaultNow().onUpdateNow({ fsp: 4 }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`createdAt` timestamp(4) DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP(4)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #16. timestamp + on update now + fsp', async () => { + const to = { + users: mysqlTable('table', { + createdAt: timestamp().onUpdateNow({ fsp: 4 }), + }), + }; + + // TODO: revise: shouldn't diff also throw an error? + const { sqlStatements: st } = await diff({}, to, []); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`createdAt` timestamp ON UPDATE CURRENT_TIMESTAMP(4)\n);\n', + ]; + + expect(st).toStrictEqual(st0); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('add table #17. timestamp + fsp + on update now', async () => { + const to = { + users: mysqlTable('table', { + createdAt: timestamp({ fsp: 4 }).onUpdateNow(), + }), + }; + + // TODO: revise: shouldn't diff also throw an error? + const { sqlStatements: st } = await diff({}, to, []); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`createdAt` timestamp(4) ON UPDATE CURRENT_TIMESTAMP\n);\n', + ]; + + expect(st).toStrictEqual(st0); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2180 +test('add table #18. serial + primary key, timestamp + default with sql``', async () => { + const to = { + table1: mysqlTable('table1', { + column1: serial().primaryKey(), + column2: timestamp().notNull().default(sql`CURRENT_TIMESTAMP`), + }), + }; + + // TODO: revise: the sql`` passed to .default() may not need parentheses + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + const expectedSt = [ + 'CREATE TABLE `table1` (\n\t`column1` serial PRIMARY KEY,\n\t`column2` timestamp NOT NULL DEFAULT (CURRENT_TIMESTAMP)\n);\n', + ]; + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +test('add table #19. timestamp + default with sql``', async () => { + const to = { + table1: mysqlTable('table1', { + column1: timestamp().notNull().defaultNow().onUpdateNow(), + column2: timestamp().notNull().default(sql`(CURRENT_TIMESTAMP)`).onUpdateNow(), + // column3: timestamp().notNull().default(sql`CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP`), + }), + }; + + // TODO: revise: the sql`` passed to .default() may not need parentheses + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + const expectedSt = [ + 'CREATE TABLE `table1` (\n\t' + + '`column1` timestamp NOT NULL DEFAULT (now()) ON UPDATE CURRENT_TIMESTAMP,\n\t' + + '`column2` timestamp NOT NULL DEFAULT (CURRENT_TIMESTAMP) ON UPDATE CURRENT_TIMESTAMP\n);\n', + ]; + expect(st).toStrictEqual(expectedSt); + expect(pst).toStrictEqual(expectedSt); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2599 +// https://github.com/drizzle-team/drizzle-orm/issues/3359 +// https://github.com/drizzle-team/drizzle-orm/issues/1413 +// https://github.com/drizzle-team/drizzle-orm/issues/3473 +// https://github.com/drizzle-team/drizzle-orm/issues/2815 +test('add table #20. table already exists; multiple pk defined', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: int().autoincrement().primaryKey(), + column2: varchar({ length: 256 }).notNull().unique(), + }), + table2: mysqlTable('table2', { + column1: int().autoincrement(), + }, (table) => [ + primaryKey({ columns: [table.column1] }), + ]), + table3: mysqlTable('table3', { + column1: int(), + column2: int(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema, []); + const { sqlStatements: pst1 } = await push({ db, to: schema }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int AUTO_INCREMENT PRIMARY KEY,\n\t`column2` varchar(256) NOT NULL,' + + '\n\tCONSTRAINT `column2_unique` UNIQUE INDEX(`column2`)\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int AUTO_INCREMENT PRIMARY KEY\n);\n', + 'CREATE TABLE `table3` (\n\t`column1` int,\n\t`column2` int,\n\t' + + 'CONSTRAINT `PRIMARY` PRIMARY KEY(`column1`,`column2`)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, schema, []); + const { sqlStatements: pst2 } = await push({ db, to: schema }); + + const expectedSt2: string[] = []; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1742 +test('add table #21. table with hyphen in identifiers', async () => { + const schema1 = { + 'table-1': mysqlTable('table-1', { + 'column-1': int('column-1'), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table-1` (\n\t`column-1` int\n);\n', + ]; + + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + 'table-1': mysqlTable('table-1', { + 'column-1': int('column-1').notNull(), + }), + }; + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE `table-1` MODIFY COLUMN `column-1` int NOT NULL;', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/818 +test('add table #22. custom type; default', async () => { + interface Semver { + major: number; + minor: number; + patch: number; + } + const semver = customType<{ + data: Semver; + driverData: string; + config: { length: number }; + configRequired: true; + }>({ + dataType(config) { + return `varchar(${config.length})`; + }, + fromDriver(value: string): Semver { + const [major, minor, patch] = value.split('.'); + if (!major || !minor || !patch) { + throw new Error(`Invalid semver: ${value}`); + } + return { + major: parseInt(major), + minor: parseInt(minor), + patch: parseInt(patch), + }; + }, + toDriver(value: Semver): string { + return `${value.major}.${value.minor}.${value.patch}`; + }, + }); + const schema = { + table1: mysqlTable('table1', { + column1: semver({ length: 12 }).default({ major: 0, minor: 0, patch: 0 }), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema, []); + const { sqlStatements: pst1 } = await push({ db, to: schema }); + const expectedSt1 = [ + "CREATE TABLE `table1` (\n\t`column1` varchar(12) DEFAULT '0.0.0'\n);\n", + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, schema, []); + const { sqlStatements: pst2 } = await push({ db, to: schema }); + + const expectedSt2: string[] = []; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/364 +test('add column #1. timestamp + fsp + on update now + fsp', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 4 }).onUpdateNow({ fsp: 4 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` ADD `createdAt` timestamp(4) ON UPDATE CURRENT_TIMESTAMP(4);', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column #2. timestamp + on update now + fsp', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp().onUpdateNow({ fsp: 4 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + + const st0: string[] = [ + 'ALTER TABLE `table` ADD `createdAt` timestamp ON UPDATE CURRENT_TIMESTAMP(4);', + ]; + + expect(st).toStrictEqual(st0); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('add column #3. timestamp + fsp + on update now', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 4 }).onUpdateNow(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + + const st0: string[] = [ + 'ALTER TABLE `table` ADD `createdAt` timestamp(4) ON UPDATE CURRENT_TIMESTAMP;', + ]; + + expect(st).toStrictEqual(st0); + await expect(push({ db, to })).rejects.toThrowError(); +}); + +test('modify on update now fsp #1', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 4 }).onUpdateNow({ fsp: 4 }), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp().onUpdateNow(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + const { sqlStatements: pst } = await diff(from, to, []); + + const st0: string[] = [ + 'ALTER TABLE `table` MODIFY COLUMN `createdAt` timestamp ON UPDATE CURRENT_TIMESTAMP;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('modify on update now fsp #2', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp().onUpdateNow(), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 4 }).onUpdateNow({ fsp: 4 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` MODIFY COLUMN `createdAt` timestamp(4) ON UPDATE CURRENT_TIMESTAMP(4);', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('modify on update now fsp #3', async () => { + const from = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 2 }).onUpdateNow({ fsp: 2 }), + }), + }; + const to = { + users: mysqlTable('table', { + id: int(), + createdAt: timestamp({ fsp: 4 }).onUpdateNow({ fsp: 4 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` MODIFY COLUMN `createdAt` timestamp(4) ON UPDATE CURRENT_TIMESTAMP(4);', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/998 +test('drop index', async () => { + const from = { + users: mysqlTable('table', { + name: varchar({ length: 10 }), + }, (t) => [ + index('name_idx').on(t.name), + ]), + }; + + const to = { + users: mysqlTable('table', { + name: varchar({ length: 10 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['DROP INDEX `name_idx` ON `table`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop unique constraint', async () => { + const from = { + users: mysqlTable('table', { + name: varchar({ length: 10 }), + }, (t) => [unique('name_uq').on(t.name)]), + }; + + const to = { + users: mysqlTable('table', { + name: varchar({ length: 10 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'DROP INDEX `name_uq` ON `table`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1888 +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: mysqlTable('users', { + id: serial().primaryKey(), + name: varchar({ length: 100 }), + email: varchar({ length: 100 }), + column4: varchar({ length: 100 }), + }, (t) => [ + uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + index('indexExpr').on(sql`(lower(${t.email}))`), + index('indexExprMultiple').on(sql`(lower(${t.email}))`, sql`(lower(${t.email}))`), + uniqueIndex('uniqueCol').on(t.email), + index('indexCol').on(t.email), + index('indexColMultiple').on(t.email, t.name), + index('indexColExpr').on(sql`(lower(${t.email}))`, t.email), + index('indexCol4Hash').on(sql`(lower(${t.column4}))`).using('hash'), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`users\` (\n\t\`id\` serial PRIMARY KEY,` + + `\n\t\`name\` varchar(100),\n\t\`email\` varchar(100),\n\t\`column4\` varchar(100),` + + `\n\tCONSTRAINT \`uniqueExpr\` UNIQUE INDEX((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE INDEX(\`email\`)\n);\n`, + 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`name`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', + 'CREATE INDEX `indexCol4Hash` ON `users` ((lower(`column4`)));', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2122 +test('varchar and text default values escape single quotes', async (t) => { + const schema1 = { + table: mysqlTable('table', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + table: mysqlTable('table', { + id: serial('id').primaryKey(), + enum: mysqlEnum('enum', ["escape's quotes", "escape's quotes 2"]).default("escape's quotes"), + text: text('text').default("escape's quotes"), + varchar: varchar('varchar', { length: 255 }).default("escape's quotes"), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + "ALTER TABLE `table` ADD `enum` enum('escape''s quotes','escape''s quotes 2') DEFAULT 'escape''s quotes';", + "ALTER TABLE `table` ADD `text` text DEFAULT ('escape''s quotes');", + "ALTER TABLE `table` ADD `varchar` varchar(255) DEFAULT 'escape''s quotes';", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('default on serail', async (t) => { + const schema1 = { + table1: mysqlTable('table1', { + column1: serial().default(1), + }), + }; + + const { ddl1Err, ddl2Err, mappedErrors1, mappedErrors2 } = await diff({}, schema1, []); + expect(ddl1Err).toStrictEqual([]); + expect(ddl2Err).toStrictEqual([ + { + column: 'column1', + table: 'table1', + type: 'column_unsupported_default_on_autoincrement', + }, + ]); + await expect(push({ db, to: schema1 })).rejects.toThrowError(); +}); + +test('default on autoincrement', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().autoincrement().default(1), + }), + }; + + const { ddl1Err, ddl2Err, mappedErrors1, mappedErrors2 } = await diff({}, schema1, []); + expect(ddl1Err).toStrictEqual([]); + expect(ddl2Err).toStrictEqual([ + { + column: 'column1', + table: 'table1', + type: 'column_unsupported_default_on_autoincrement', + }, + ]); + await expect(push({ db, to: schema1 })).rejects.toThrowError(); +}); + +test('composite primary key #1', async () => { + const from = {}; + const to = { + table: mysqlTable('works_to_creators', { + workId: int().notNull(), + creatorId: int().notNull(), + classification: varchar({ length: 10 }).notNull(), + }, (t) => [ + primaryKey({ + columns: [t.workId, t.creatorId, t.classification], + }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `works_to_creators` (\n\t`workId` int NOT NULL,\n\t`creatorId` int NOT NULL,\n\t`classification` varchar(10) NOT NULL,\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`workId`,`creatorId`,`classification`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('composite primary key #2', async () => { + const schema1 = {}; + + const schema2 = { + table: mysqlTable('table', { + col1: int('col1').notNull(), + col2: int('col2').notNull(), + }, (t) => [ + primaryKey({ + columns: [t.col1, t.col2], + }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`col1`,`col2`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table with composite primary key', async () => { + const productsCategoriesTable = (tableName: string) => { + return mysqlTable(tableName, { + productId: varchar('product_id', { length: 10 }).notNull(), + categoryId: varchar('category_id', { length: 10 }).notNull(), + }, (t) => [ + primaryKey({ + columns: [t.productId, t.categoryId], + }), + ]); + }; + + const schema1 = { + table: productsCategoriesTable('products_categories'), + }; + const schema2 = { + test: productsCategoriesTable('products_to_categories'), + }; + + const renames = ['products_categories->products_to_categories']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'RENAME TABLE `products_categories` TO `products_to_categories`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/367 +test('optional db aliases (snake case)', async () => { + const from = {}; + + const t1 = mysqlTable('t1', { + t1Id1: int().notNull().primaryKey(), + t1Col2: int().notNull(), + t1Col3: int().notNull(), + t2Ref: bigint({ mode: 'number', unsigned: true }).references(() => t2.t2Id), + t1Uni: int().notNull(), + t1UniIdx: int().notNull(), + t1Idx: int().notNull(), + }, (table) => [ + unique('t1_uni').on(table.t1Uni), + uniqueIndex('t1_uni_idx').on(table.t1UniIdx), + index('t1_idx').on(table.t1Idx), + foreignKey({ + columns: [table.t1Col2, table.t1Col3], + foreignColumns: [t3.t3Id1, t3.t3Id2], + }), + ]); + + const t2 = mysqlTable('t2', { + t2Id: serial().primaryKey(), + }); + + const t3 = mysqlTable('t3', { + t3Id1: int(), + t3Id2: int(), + }, (table) => [primaryKey({ + columns: [table.t3Id1, table.t3Id2], + })]); + + const to = { t1, t2, t3 }; + + const casing = 'snake_case'; + const { sqlStatements: st } = await diff(from, to, [], casing); + const { sqlStatements: pst } = await push({ db, to, casing }); + + const st0: string[] = [ + `CREATE TABLE \`t1\` ( + \`t1_id1\` int PRIMARY KEY, + \`t1_col2\` int NOT NULL, + \`t1_col3\` int NOT NULL, + \`t2_ref\` bigint unsigned, + \`t1_uni\` int NOT NULL, + \`t1_uni_idx\` int NOT NULL, + \`t1_idx\` int NOT NULL, + CONSTRAINT \`t1_uni\` UNIQUE INDEX(\`t1_uni\`), + CONSTRAINT \`t1_uni_idx\` UNIQUE INDEX(\`t1_uni_idx\`) +);\n`, + `CREATE TABLE \`t2\` (\n\t\`t2_id\` serial PRIMARY KEY\n);\n`, + `CREATE TABLE \`t3\` ( + \`t3_id1\` int, + \`t3_id2\` int, + CONSTRAINT \`PRIMARY\` PRIMARY KEY(\`t3_id1\`,\`t3_id2\`) +);\n`, + `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`, + 'ALTER TABLE `t1` ADD CONSTRAINT `t1_t2_ref_t2_t2_id_fkey` FOREIGN KEY (`t2_ref`) REFERENCES `t2`(`t2_id`);', + 'ALTER TABLE `t1` ADD CONSTRAINT `t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fkey` FOREIGN KEY (`t1_col2`,`t1_col3`) REFERENCES `t3`(`t3_id1`,`t3_id2`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('optional db aliases (camel case)', async () => { + const from = {}; + + const t1 = mysqlTable('t1', { + t1_id1: int().notNull().primaryKey(), + t1_col2: int().notNull(), + t1_col3: int().notNull(), + t2_ref: bigint({ mode: 'number', unsigned: true }).references(() => t2.t2_id), + t1_uni: int().notNull(), + t1_uni_idx: int().notNull(), + t1_idx: int().notNull(), + }, (table) => [ + unique('t1Uni').on(table.t1_uni), + uniqueIndex('t1UniIdx').on(table.t1_uni_idx), + index('t1Idx').on(table.t1_idx), + foreignKey({ + columns: [table.t1_col2, table.t1_col3], + foreignColumns: [t3.t3_id1, t3.t3_id2], + }), + ]); + + const t2 = mysqlTable('t2', { + t2_id: serial().primaryKey(), + }); + + const t3 = mysqlTable('t3', { + t3_id1: int(), + t3_id2: int(), + }, (table) => [primaryKey({ + columns: [table.t3_id1, table.t3_id2], + })]); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'camelCase'; + const { sqlStatements: st } = await diff(from, to, [], casing); + const { sqlStatements: pst } = await push({ db, to, casing }); + + const st0: string[] = [ + `CREATE TABLE \`t1\` (\n\t\`t1Id1\` int PRIMARY KEY,\n\t\`t1Col2\` int NOT NULL,\n\t\`t1Col3\` int NOT NULL,\n` + + `\t\`t2Ref\` bigint unsigned,\n\t\`t1Uni\` int NOT NULL,\n\t\`t1UniIdx\` int NOT NULL,\n\t\`t1Idx\` int NOT NULL,\n` + + `\tCONSTRAINT \`t1Uni\` UNIQUE INDEX(\`t1Uni\`),\n` + + `\tCONSTRAINT \`t1UniIdx\` UNIQUE INDEX(\`t1UniIdx\`)\n` + + `);\n`, + `CREATE TABLE \`t2\` (\n\t\`t2Id\` serial PRIMARY KEY\n);\n`, + `CREATE TABLE \`t3\` (\n\t\`t3Id1\` int,\n\t\`t3Id2\` int,\n\tCONSTRAINT \`PRIMARY\` PRIMARY KEY(\`t3Id1\`,\`t3Id2\`)\n);\n`, + 'CREATE INDEX `t1Idx` ON `t1` (`t1Idx`);', + 'ALTER TABLE `t1` ADD CONSTRAINT `t1_t2Ref_t2_t2Id_fkey` FOREIGN KEY (`t2Ref`) REFERENCES `t2`(`t2Id`);', + 'ALTER TABLE `t1` ADD CONSTRAINT `t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fkey` FOREIGN KEY (`t1Col2`,`t1Col3`) REFERENCES `t3`(`t3Id1`,`t3Id2`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add+drop unique', async () => { + const state0 = {}; + const state1 = { + users: mysqlTable('users', { + id: int().unique(), + }), + }; + const state2 = { + users: mysqlTable('users', { + id: int(), + }), + }; + + // TODO: should I rewrite this test as multistep test? + // const { sqlStatements: st1, next: n1 } = await diff(state0, state1, []); + const { sqlStatements: st1 } = await diff(state0, state1, []); + const { sqlStatements: pst1 } = await push({ db, to: state1 }); + + const { sqlStatements: st2 } = await diff(state1, state2, []); + const { sqlStatements: pst2 } = await push({ db, to: state2 }); + + const st01: string[] = [ + 'CREATE TABLE `users` (\n\t`id` int,\n\tCONSTRAINT `id_unique` UNIQUE INDEX(`id`)\n);\n', + ]; + expect(st1).toStrictEqual(st01); + expect(pst1).toStrictEqual(st01); + + const st02: string[] = [ + 'DROP INDEX `id_unique` ON `users`;', + ]; + expect(st2).toStrictEqual(st02); + expect(pst2).toStrictEqual(st02); +}); + +test('fk #1', async () => { + const users = mysqlTable('users', { + id: int().unique(), + }); + const to = { + users, + places: mysqlTable('places', { + id: int(), + ref: int().references(() => users.id), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` int,\n\tCONSTRAINT `id_unique` UNIQUE INDEX(`id`)\n);\n', + 'CREATE TABLE `places` (\n\t`id` int,\n\t`ref` int\n);\n', + 'ALTER TABLE `places` ADD CONSTRAINT `places_ref_users_id_fkey` FOREIGN KEY (`ref`) REFERENCES `users`(`id`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/367 +test('fk #2', async () => { + const table1 = mysqlTable('table1', { + column1: serial().primaryKey(), + }); + const to = { + table1, + table2: mysqlTable('table2', { + column1: serial().primaryKey(), + column2: bigint({ mode: 'number', unsigned: true }).references(() => table1.column1).notNull(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `table1` (\n\t`column1` serial PRIMARY KEY\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` serial PRIMARY KEY,\n\t`column2` bigint unsigned NOT NULL\n);\n', + 'ALTER TABLE `table2` ADD CONSTRAINT `table2_column2_table1_column1_fkey` FOREIGN KEY (`column2`) REFERENCES `table1`(`column1`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table with ts enum', async () => { + enum Test { + value = 'value', + } + const to = { + users: mysqlTable('users', { + enum: mysqlEnum(Test), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ["CREATE TABLE `users` (\n\t`enum` enum('value')\n);\n"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('all types', async () => { + const schema1 = { + allBigInts: mysqlTable('all_big_ints', { + simple: bigint('simple', { mode: 'number' }), + columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), + columnDefault: bigint('column_default', { mode: 'number' }).default(12), + columnDefaultSql: bigint('column_default_sql', { mode: 'number' }).default(12), + }), + allBools: mysqlTable('all_bools', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(1), + }), + allChars: mysqlTable('all_chars', { + simple: char('simple', { length: 1 }), + columnNotNull: char('column_not_null', { length: 45 }).notNull(), + // columnDefault: char("column_default", { length: 1 }).default("h"), + columnDefaultSql: char('column_default_sql', { length: 1 }).default('h'), + }), + allDateTimes: mysqlTable('all_date_times', { + simple: datetime('simple', { mode: 'string', fsp: 1 }), + columnNotNull: datetime('column_not_null', { mode: 'string' }).notNull(), + columnDefault: datetime('column_default', { mode: 'string' }).default('2023-03-01 14:05:29'), + }), + allDates: mysqlTable('all_dates', { + simple: date('simple', { mode: 'string' }), + column_not_null: date('column_not_null', { mode: 'string' }).notNull(), + column_default: date('column_default', { mode: 'string' }).default('2023-03-01'), + }), + allDecimals: mysqlTable('all_decimals', { + simple: decimal('simple', { precision: 1, scale: 0 }), + columnNotNull: decimal('column_not_null', { precision: 45, scale: 3 }).notNull(), + columnDefault: decimal('column_default', { precision: 10, scale: 0 }).default('100'), + columnDefaultSql: decimal('column_default_sql', { precision: 10, scale: 0 }).default('101'), + }), + + allDoubles: mysqlTable('all_doubles', { + simple: double('simple'), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allEnums: mysqlTable('all_enums', { + simple: mysqlEnum('simple', ['hi', 'hello']), + }), + + allEnums1: mysqlTable('all_enums1', { + simple: mysqlEnum('simple', ['hi', 'hello']).default('hi'), + }), + + allFloats: mysqlTable('all_floats', { + columnNotNull: float('column_not_null').notNull(), + columnDefault: float('column_default').default(100), + columnDefaultSql: float('column_default_sql').default(101), + }), + + allInts: mysqlTable('all_ints', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allIntsRef: mysqlTable('all_ints_ref', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allJsons: mysqlTable('all_jsons', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + }), + + allMInts: mysqlTable('all_m_ints', { + simple: mediumint('simple'), + columnNotNull: mediumint('column_not_null').notNull(), + columnDefault: mediumint('column_default').default(100), + columnDefaultSql: mediumint('column_default_sql').default(101), + }), + + allReals: mysqlTable('all_reals', { + simple: double('simple', { precision: 5, scale: 2 }), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allSInts: mysqlTable('all_s_ints', { + simple: smallint('simple'), + columnNotNull: smallint('column_not_null').notNull(), + columnDefault: smallint('column_default').default(100), + columnDefaultSql: smallint('column_default_sql').default(101), + }), + + allSmallSerials: mysqlTable('all_small_serials', { + columnAll: serial('column_all').primaryKey().notNull(), + }), + + allTInts: mysqlTable('all_t_ints', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(10), + columnDefaultSql: tinyint('column_default_sql').default(11), + }), + + allTexts: mysqlTable('all_texts', { + simple: text('simple'), + columnNotNull: text('column_not_null').notNull(), + columnDefault: text('column_default').default('hello'), + columnDefaultSql: text('column_default_sql').default('hello'), + }), + + allTimes: mysqlTable('all_times', { + simple: time('simple', { fsp: 1 }), + columnNotNull: time('column_not_null').notNull(), + columnDefault: time('column_default').default('22:12:12'), + }), + + allTimestamps: mysqlTable('all_timestamps', { + columnDateNow: timestamp('column_date_now', { fsp: 1, mode: 'string' }).default(sql`(now())`), + columnAll: timestamp('column_all', { mode: 'string' }) + .default('2023-03-01 14:05:29') + .notNull(), + column: timestamp('column', { mode: 'string' }).default('2023-02-28 16:18:31'), + }), + + allVarChars: mysqlTable('all_var_chars', { + simple: varchar('simple', { length: 100 }), + columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), + columnDefault: varchar('column_default', { length: 100 }).default('hello'), + columnDefaultSql: varchar('column_default_sql', { length: 100 }).default('hello'), + }), + + allVarbinaries: mysqlTable('all_varbinaries', { + simple: varbinary('simple', { length: 100 }), + columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), + columnDefault: varbinary('column_default', { length: 12 }).default(sql`(uuid_to_bin(uuid()))`), + }), + + allYears: mysqlTable('all_years', { + simple: year('simple'), + columnNotNull: year('column_not_null').notNull(), + columnDefault: year('column_default').default(2022), + }), + + binafry: mysqlTable('binary', { + simple: binary('simple', { length: 1 }), + columnNotNull: binary('column_not_null', { length: 1 }).notNull(), + columnDefault: binary('column_default', { length: 12 }).default(sql`(uuid_to_bin(uuid()))`), + }), + + allTinyBlobs: mysqlTable('all_tiny_blobs', { + simple: tinyblob('simple'), + columnNotNull: tinyblob('column_not_null').notNull(), + columnDefault: tinyblob('column_default').default(Buffer.from('hello')), + columnDefaultSql: tinyblob('column_default_sql').default(sql`'hello'`), + stringSimple: tinyblob('string_simple', { mode: 'string' }), + stringColumnNotNull: tinyblob('string_column_not_null', { mode: 'string' }).notNull(), + stringColumnDefault: tinyblob('string_column_default', { mode: 'string' }).default('hello'), + stringColumnDefaultSql: tinyblob('string_column_default_sql', { mode: 'string' }).default(sql`'hello'`), + }), + allBlobs: mysqlTable('all_blobs', { + simple: blob('simple'), + columnNotNull: blob('column_not_null').notNull(), + columnDefault: blob('column_default').default(Buffer.from('hello')), + columnDefaultSql: blob('column_default_sql').default(sql`'hello'`), + stringSimple: blob('string_simple', { mode: 'string' }), + stringColumnNotNull: blob('string_column_not_null', { mode: 'string' }).notNull(), + stringColumnDefault: blob('string_column_default', { mode: 'string' }).default('hello'), + stringColumnDefaultSql: blob('string_column_default_sql', { mode: 'string' }).default(sql`('hello')`), + }), + allMediumBlobs: mysqlTable('all_medium_blobs', { + simple: mediumblob('simple'), + columnNotNull: mediumblob('column_not_null').notNull(), + columnDefault: mediumblob('column_default').default(Buffer.from('hello')), + columnDefaultSql: mediumblob('column_default_sql').default(sql`'hello'`), + stringSimple: mediumblob('string_simple', { mode: 'string' }), + stringColumnNotNull: mediumblob('string_column_not_null', { mode: 'string' }).notNull(), + stringColumnDefault: mediumblob('string_column_default', { mode: 'string' }).default('hello'), + stringColumnDefaultSql: mediumblob('string_column_default_sql', { mode: 'string' }).default(sql`'hello'`), + }), + allLongBlobs: mysqlTable('all_long_blobs', { + simple: longblob('simple'), + columnNotNull: longblob('column_not_null').notNull(), + columnDefault: longblob('column_default').default(Buffer.from('hello')), + columnDefaultSql: longblob('column_default_sql').default(sql`'hello'`), + stringSimple: longblob('string_simple', { mode: 'string' }), + stringColumnNotNull: longblob('string_column_not_null', { mode: 'string' }).notNull(), + stringColumnDefault: longblob('string_column_default', { mode: 'string' }).default('hello'), + stringColumnDefaultSql: longblob('string_column_default_sql', { mode: 'string' }).default(sql`'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema1, []); + + await push({ db, to: schema1 }); + const { sqlStatements: sbsqSt } = await push({ db, to: schema1 }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(sbsqSt).toStrictEqual(st0); +}); + +test('drop primary key', async () => { + const from = { + table: mysqlTable('table', { + id: int().primaryKey(), + }), + }; + const to = { + table: mysqlTable('table', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` DROP PRIMARY KEY;', + /* + when we drop pk from the column - we expect implicit not null constraint + to be dropped, though it's not. Thus we need to not only drop pk, + but a not null constraint too. + */ + 'ALTER TABLE `table` MODIFY COLUMN `id` int;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`create table with char set and collate`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_bin'), + name2: char('name2').charSet('big5').collate('big5_bin'), + name3: text('name3').charSet('big5').collate('big5_bin'), + name4: tinytext('name4').charSet('big5').collate('big5_bin'), + name5: mediumtext('name5').charSet('big5').collate('big5_bin'), + name6: longtext('name6').charSet('big5').collate('big5_bin'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_bin'), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`table\` ( + \`id\` int, + \`name1\` varchar(1) CHARACTER SET big5 COLLATE big5_bin, + \`name2\` char CHARACTER SET big5 COLLATE big5_bin, + \`name3\` text CHARACTER SET big5 COLLATE big5_bin, + \`name4\` tinytext CHARACTER SET big5 COLLATE big5_bin, + \`name5\` mediumtext CHARACTER SET big5 COLLATE big5_bin, + \`name6\` longtext CHARACTER SET big5 COLLATE big5_bin, + \`test_enum\` enum('1','2') CHARACTER SET big5 COLLATE big5_bin +);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`create table with char set and collate with default and not null`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 15 }).charSet('big5').collate('big5_bin').notNull().default('hey'), + name2: char('name2', { length: 10 }).charSet('big5').collate('big5_bin').notNull().default('hey'), + name3: text('name3').charSet('big5').collate('big5_bin').notNull().default('hey'), + name4: tinytext('name4').charSet('big5').collate('big5_bin').notNull().default('hey'), + name5: mediumtext('name5').charSet('big5').collate('big5_bin').notNull().default('hey'), + name6: longtext('name6').charSet('big5').collate('big5_bin').notNull().default('hey'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_bin').notNull().default('1'), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`table\` ( + \`id\` int, + \`name1\` varchar(15) CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT 'hey', + \`name2\` char(10) CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT 'hey', + \`name3\` text CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT ('hey'), + \`name4\` tinytext CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT ('hey'), + \`name5\` mediumtext CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT ('hey'), + \`name6\` longtext CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT ('hey'), + \`test_enum\` enum('1','2') CHARACTER SET big5 COLLATE big5_bin NOT NULL DEFAULT '1' +);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`add column with char set and collate`, async () => { + const from = { + table: mysqlTable('table', { + id: int(), + }), + }; + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_bin'), + name2: char('name2').charSet('big5').collate('big5_bin'), + name3: text('name3').charSet('big5').collate('big5_bin'), + name4: tinytext('name4').charSet('big5').collate('big5_bin'), + name5: mediumtext('name5').charSet('big5').collate('big5_bin'), + name6: longtext('name6').charSet('big5').collate('big5_bin'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_bin'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` ADD `name1` varchar(1) CHARACTER SET big5 COLLATE big5_bin;', + 'ALTER TABLE `table` ADD `name2` char CHARACTER SET big5 COLLATE big5_bin;', + 'ALTER TABLE `table` ADD `name3` text CHARACTER SET big5 COLLATE big5_bin;', + 'ALTER TABLE `table` ADD `name4` tinytext CHARACTER SET big5 COLLATE big5_bin;', + 'ALTER TABLE `table` ADD `name5` mediumtext CHARACTER SET big5 COLLATE big5_bin;', + 'ALTER TABLE `table` ADD `name6` longtext CHARACTER SET big5 COLLATE big5_bin;', + "ALTER TABLE `table` ADD `test_enum` enum('1','2') CHARACTER SET big5 COLLATE big5_bin;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`update char set and collate`, async () => { + const from = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_bin'), + name2: char('name2').charSet('big5').collate('big5_bin'), + name3: text('name3').charSet('big5').collate('big5_bin'), + name4: tinytext('name4').charSet('big5').collate('big5_bin'), + name5: mediumtext('name5').charSet('big5').collate('big5_bin'), + name6: longtext('name6').charSet('big5').collate('big5_bin'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_bin'), + }), + }; + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('cp1250').collate('cp1250_bin'), + name2: char('name2').charSet('cp1250').collate('cp1250_bin'), + name3: text('name3').charSet('cp1250').collate('cp1250_bin'), + name4: tinytext('name4').charSet('cp1250').collate('cp1250_bin'), + name5: mediumtext('name5').charSet('cp1250').collate('cp1250_bin'), + name6: longtext('name6').charSet('cp1250').collate('cp1250_bin'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('cp1250').collate('cp1250_bin'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` MODIFY COLUMN `name1` varchar(1) CHARACTER SET cp1250 COLLATE cp1250_bin;', + 'ALTER TABLE `table` MODIFY COLUMN `name2` char CHARACTER SET cp1250 COLLATE cp1250_bin;', + 'ALTER TABLE `table` MODIFY COLUMN `name3` text CHARACTER SET cp1250 COLLATE cp1250_bin;', + 'ALTER TABLE `table` MODIFY COLUMN `name4` tinytext CHARACTER SET cp1250 COLLATE cp1250_bin;', + 'ALTER TABLE `table` MODIFY COLUMN `name5` mediumtext CHARACTER SET cp1250 COLLATE cp1250_bin;', + 'ALTER TABLE `table` MODIFY COLUMN `name6` longtext CHARACTER SET cp1250 COLLATE cp1250_bin;', + "ALTER TABLE `table` MODIFY COLUMN `test_enum` enum('1','2') CHARACTER SET cp1250 COLLATE cp1250_bin;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`update collate`, async () => { + const from = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_bin'), + name2: char('name2').charSet('big5').collate('big5_bin'), + name3: text('name3').charSet('big5').collate('big5_bin'), + name4: tinytext('name4').charSet('big5').collate('big5_bin'), + name5: mediumtext('name5').charSet('big5').collate('big5_bin'), + name6: longtext('name6').charSet('big5').collate('big5_bin'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_bin'), + }), + }; + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_chinese_ci'), + name2: char('name2').charSet('big5').collate('big5_chinese_ci'), + name3: text('name3').charSet('big5').collate('big5_chinese_ci'), + name4: tinytext('name4').charSet('big5').collate('big5_chinese_ci'), + name5: mediumtext('name5').charSet('big5').collate('big5_chinese_ci'), + name6: longtext('name6').charSet('big5').collate('big5_chinese_ci'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_chinese_ci'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` MODIFY COLUMN `name1` varchar(1) CHARACTER SET big5 COLLATE big5_chinese_ci;', + 'ALTER TABLE `table` MODIFY COLUMN `name2` char CHARACTER SET big5 COLLATE big5_chinese_ci;', + 'ALTER TABLE `table` MODIFY COLUMN `name3` text CHARACTER SET big5 COLLATE big5_chinese_ci;', + 'ALTER TABLE `table` MODIFY COLUMN `name4` tinytext CHARACTER SET big5 COLLATE big5_chinese_ci;', + 'ALTER TABLE `table` MODIFY COLUMN `name5` mediumtext CHARACTER SET big5 COLLATE big5_chinese_ci;', + 'ALTER TABLE `table` MODIFY COLUMN `name6` longtext CHARACTER SET big5 COLLATE big5_chinese_ci;', + "ALTER TABLE `table` MODIFY COLUMN `test_enum` enum('1','2') CHARACTER SET big5 COLLATE big5_chinese_ci;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: only char set is specified (default collation used for char set)`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5'), + name2: char('name2').charSet('big5'), + name3: text('name3').charSet('big5'), + name4: tinytext('name4').charSet('big5'), + name5: mediumtext('name5').charSet('big5'), + name6: longtext('name6').charSet('big5'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5'), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: only collation is specified (char set that is linked to this collation used)`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).collate('utf8mb3_slovak_ci'), + name2: char('name2').collate('ascii_bin'), + name3: text('name3').collate('cp1250_general_ci'), + name4: tinytext('name4').collate('cp1256_bin'), + name5: mediumtext('name5').collate('koi8u_bin'), + name6: longtext('name6').collate('utf16_danish_ci'), + name7: mysqlEnum('test_enum', ['1', '2']).collate('utf16_danish_ci'), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: no collation + no char set (db stores as collation: 'utf8mb4_0900_ai_ci', charSet: 'utf8mb4')`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }), + name2: char('name2'), + name3: text('name3'), + name4: tinytext('name4'), + name5: mediumtext('name5'), + name6: longtext('name6'), + name7: mysqlEnum('test_enum', ['1', '2']), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: collation char set`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_chinese_ci'), + name2: char('name2').charSet('big5').collate('big5_chinese_ci'), + name3: text('name3').charSet('big5').collate('big5_chinese_ci'), + name4: tinytext('name4').charSet('big5').collate('big5_chinese_ci'), + name5: mediumtext('name5').charSet('big5').collate('big5_chinese_ci'), + name6: longtext('name6').charSet('big5').collate('big5_chinese_ci'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_chinese_ci'), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: check on update now with fsp #1`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + created_at: timestamp().onUpdateNow(), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test(`push-push: check on update now with fsp #2`, async () => { + const to = { + table: mysqlTable('table', { + id: int(), + created_at: timestamp({ fsp: 3 }).onUpdateNow({ fsp: 3 }), + }), + }; + + await push({ db, to }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(pst).toStrictEqual(st0); +}); + +test('weird serial non-pk', async () => { + // old kit was generating serials with autoincrements which is wrong + db.query('create table `table`(c1 int not null, c2 serial auto_increment, CONSTRAINT `PRIMARY` PRIMARY KEY(`c1`));'); + + const table = mysqlTable('table', { + c1: int().primaryKey(), + c2: serial(), + }); + + const res1 = await push({ db, to: { table } }); + const res2 = await push({ db, to: { table } }); + + expect(res1.sqlStatements).toStrictEqual([]); + expect(res2.sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2216 +test('rename column with pk on another column', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2: int(), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: int(), + column3: int(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY,\n\t`column2` int\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\t`column2` int,\n\t`column3` int,\n\tCONSTRAINT `PRIMARY` PRIMARY KEY(`column1`,`column2`)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2_renamed: int('column2_renamed').notNull(), + }), + table2: mysqlTable('table2', { + column1: int(), + column2: int(), + column3_renamed: int('column3_renamed').notNull(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + ]), + }; + + const renames = [ + 'table1.column2->table1.column2_renamed', + 'table2.column3->table2.column3_renamed', + ]; + const { sqlStatements: st2 } = await diff(n1, schema2, renames); + const { sqlStatements: pst2 } = await push({ db, to: schema2, renames }); + const expectedSt2 = [ + 'ALTER TABLE `table1` RENAME COLUMN `column2` TO `column2_renamed`;', + 'ALTER TABLE `table2` RENAME COLUMN `column3` TO `column3_renamed`;', + 'ALTER TABLE `table1` MODIFY COLUMN `column2_renamed` int NOT NULL;', + 'ALTER TABLE `table2` MODIFY COLUMN `column3_renamed` int NOT NULL;', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/706 +test('add pk', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int(), + }), + table2: mysqlTable('table2', { + column1: int().unique(), + }), + table3: mysqlTable('table3', { + column1: int().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int\n);\n', + 'CREATE TABLE `table2` (\n\t`column1` int,\n\tCONSTRAINT `column1_unique` UNIQUE INDEX(`column1`)\n);\n', + 'CREATE TABLE `table3` (\n\t`column1` int,\n\tCONSTRAINT `column1_unique` UNIQUE INDEX(`column1`)\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + }), + table2: mysqlTable('table2', { + column1: int().unique().primaryKey(), + }), + table3: mysqlTable('table3', { + column1: int().primaryKey(), + }), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'DROP INDEX `column1_unique` ON `table3`;', + 'ALTER TABLE `table1` ADD PRIMARY KEY (`column1`);', + 'ALTER TABLE `table2` ADD PRIMARY KEY (`column1`);', + 'ALTER TABLE `table3` ADD PRIMARY KEY (`column1`);', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/2795 +test('add not null to column with default', async () => { + const schema1 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2: boolean().default(true), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE `table1` (\n\t`column1` int PRIMARY KEY,\n\t`column2` boolean DEFAULT true\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2: boolean().default(true), + column3: boolean().default(false), + }), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE `table1` ADD `column3` boolean DEFAULT false;', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); + + const schema3 = { + table1: mysqlTable('table1', { + column1: int().primaryKey(), + column2: boolean().default(true).notNull(), + column3: boolean().default(false).notNull(), + }), + }; + + const { sqlStatements: st3 } = await diff(n2, schema3, []); + const { sqlStatements: pst3 } = await push({ db, to: schema3 }); + const expectedSt3 = [ + 'ALTER TABLE `table1` MODIFY COLUMN `column2` boolean DEFAULT true NOT NULL;', + 'ALTER TABLE `table1` MODIFY COLUMN `column3` boolean DEFAULT false NOT NULL;', + ]; + expect(st3).toStrictEqual(expectedSt3); + expect(pst3).toStrictEqual(expectedSt3); +}); diff --git a/drizzle-kit/tests/mysql/pull.test.ts b/drizzle-kit/tests/mysql/pull.test.ts new file mode 100644 index 0000000000..2b8c7a92df --- /dev/null +++ b/drizzle-kit/tests/mysql/pull.test.ts @@ -0,0 +1,564 @@ +import 'dotenv/config'; +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + blob, + boolean, + char, + check, + customType, + decimal, + double, + float, + foreignKey, + index, + int, + json, + longblob, + longtext, + mediumblob, + mediumint, + mediumtext, + mysqlEnum, + mysqlTable, + mysqlView, + primaryKey, + serial, + smallint, + text, + tinyblob, + tinyint, + tinytext, + unique, + uniqueIndex, + varchar, +} from 'drizzle-orm/mysql-core'; +import * as fs from 'fs'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffIntrospect, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +if (!fs.existsSync('tests/mysql/tmp')) { + fs.mkdirSync('tests/mysql/tmp', { recursive: true }); +} + +test('generated always column: link to another column', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + ), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'generated-link'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated always column virtual: link to another column', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs( + (): SQL => sql`\`email\``, + { mode: 'virtual' }, + ), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'generated-link-virtual'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('Default value of character type column: char', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + sortKey: char('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-value-char'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3318 +// https://github.com/drizzle-team/drizzle-orm/issues/1754 +test('Default value of character type column: varchar', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + sortKey: varchar('sortKey', { length: 255 }).default('0'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-value-varchar'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4620 +// https://github.com/drizzle-team/drizzle-orm/issues/4786 +test('Default value of character type column: enum', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + status: mysqlEnum(['0', '1', '2']).default('0'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-value-enum'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3559 +// https://github.com/drizzle-team/drizzle-orm/issues/4713 +test('Default value of empty string column: enum, char, varchar, text, tinytext, mediumtext, longtext', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: mysqlEnum(['0', '1', '2', '']).default(''), + column2: char({ length: 50 }).default(''), + column3: varchar({ length: 50 }).default(''), + column4: text().default(''), + column5: tinytext().default(''), + column6: mediumtext().default(''), + column7: longtext().default(''), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-value-of-empty-string'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1402 +test('introspect default with expression', async () => { + const table1 = mysqlTable('table1', { + id: int().primaryKey(), + url: text().notNull(), + // TODO: revise: it would be nice to use .default like below + // hash: char({ length: 32 }).charSet('utf8mb4').collate('utf8mb4_0900_ai_ci').notNull().default(() =>sql`md5(${table1.url})`), + hash: char({ length: 32 }).charSet('utf8mb4').collate('utf8mb4_0900_ai_ci').notNull().default(sql`md5(\`url\`)`), + }); + const schema = { table1 }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'default-with-expression'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('introspect checks', async () => { + const schema = { + users: mysqlTable('users', { + id: serial('id'), + name: varchar('name', { length: 255 }), + age: int('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'checks'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('view #1', async () => { + const users = mysqlTable('users', { id: int('id') }); + const testView = mysqlView('some_view', { id: int('id') }).as( + sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'view-1'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('view #2', async () => { + const users = mysqlTable('some_users', { id: int('id') }); + const testView = mysqlView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( + sql`SELECT * FROM ${users}`, + ); + + const schema = { + users: users, + testView, + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'view-2'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3285 +test('handle float type', async () => { + const schema = { + table: mysqlTable('table', { + col1: float(), + col2: float({ precision: 2 }), + col3: float({ precision: 2, scale: 1 }), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'float-type'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/258 +// https://github.com/drizzle-team/drizzle-orm/issues/1675 +// https://github.com/drizzle-team/drizzle-orm/issues/2950 +test('handle unsigned numerical types', async () => { + const schema = { + table: mysqlTable('table', { + col1: int({ unsigned: true }), + col2: tinyint({ unsigned: true }), + col3: smallint({ unsigned: true }), + col4: mediumint({ unsigned: true }), + col5: bigint({ mode: 'number', unsigned: true }), + col6: float({ unsigned: true }), + col7: float({ precision: 2, scale: 1, unsigned: true }), + col8: double({ unsigned: true }), + col9: double({ precision: 2, scale: 1, unsigned: true }), + col10: decimal({ unsigned: true }), + col11: decimal({ precision: 2, scale: 1, unsigned: true }), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'unsigned-numerical-types'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +test('instrospect strings with single quotes', async () => { + const schema = { + columns: mysqlTable('columns', { + enum: mysqlEnum('my_enum', ['escape\'s quotes "', 'escape\'s quotes 2 "']).default('escape\'s quotes "'), + text: text('text').default('escape\'s quotes " '), + varchar: varchar('varchar', { length: 255 }).default('escape\'s quotes " '), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'strings-with-single-quotes'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3297 +test('introspect varchar with \r\n in default, column name starts with number', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: varchar({ length: 24 }).notNull().default(' aaa\r\nbbbb'), + '2column_': tinyint('2column_').default(0).notNull(), + column3: decimal({ precision: 2, scale: 1, unsigned: true }).notNull(), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-varchar-with-breakline'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1928 +test('introspect column with colon/semicolon in its name', async () => { + const schema = { + table1: mysqlTable('table1', { + 'column:1': text('column:1'), + 'column;2': text('column;2'), + 'column;3': text(), + 'column;4': text(), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-column-with-colon'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('charSet and collate', async () => { + const schema = { + columns: mysqlTable('columns', { + name1: varchar('name1', { length: 1 }).charSet('big5').collate('big5_chinese_ci'), + name2: char('name2').charSet('big5').collate('big5_chinese_ci'), + name3: text('name3').charSet('big5').collate('big5_chinese_ci'), + name4: tinytext('name4').charSet('big5').collate('big5_chinese_ci'), + name5: mediumtext('name5').charSet('big5').collate('big5_chinese_ci'), + name6: longtext('name6').charSet('big5').collate('big5_chinese_ci'), + name7: mysqlEnum('test_enum', ['1', '2']).charSet('big5').collate('big5_chinese_ci'), + name8: text('name:first').charSet('utf8mb4').collate('utf8mb4_0900_ai_ci'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'charSet_and_collate'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1020 +// https://github.com/drizzle-team/drizzle-orm/issues/3457 +// https://github.com/drizzle-team/drizzle-orm/issues/1871 +// https://github.com/drizzle-team/drizzle-orm/issues/2950 +// https://github.com/drizzle-team/drizzle-orm/issues/2988 +// https://github.com/drizzle-team/drizzle-orm/issues/4653 +test('introspect bigint, mediumint, int, smallint, tinyint', async () => { + const schema = { + columns: mysqlTable('columns', { + column1: tinyint(), + column2: smallint(), + column3: int(), + column4: mediumint(), + column5: bigint({ mode: 'bigint' }), + column6: bigint({ mode: 'number' }), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-int'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3290 +// https://github.com/drizzle-team/drizzle-orm/issues/1428 +// https://github.com/drizzle-team/drizzle-orm/issues/3552 +// https://github.com/drizzle-team/drizzle-orm/issues/4602 +test('introspect table with primary key and check', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: int().autoincrement().primaryKey(), + }), + table2: mysqlTable('table2', { + column1: int().autoincrement(), + }, (table) => [ + primaryKey({ columns: [table.column1] }), + ]), + table3: mysqlTable('table3', { + column1: int(), + column2: int(), + }, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), + check('age_check1', sql`${table.column1} > 21`), + ]), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'table-with-primary-key-and-check'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4415 +test('introspect table with fk', async () => { + const table1 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mysqlTable('table2', { + column1: int(), + column2: int().references(() => table1.column1), + }, (table) => [ + foreignKey({ columns: [table.column1], foreignColumns: [table1.column1], name: 'custom_fk' }), + ]); + const schema = { table1, table2 }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'table-with-fk'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4115 +test('introspect fk name with onDelete, onUpdate set', async () => { + const table1 = mysqlTable('table1', { + column1: int().primaryKey(), + }); + const table2 = mysqlTable('table2', { + column1: int(), + }, (table) => [ + foreignKey({ columns: [table.column1], foreignColumns: [table1.column1], name: 'custom_fk' }), + ]); + const schema = { table1, table2 }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'fk-with-on-delete-and-on-update'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4110 +test('introspect table with boolean(tinyint(1))', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: boolean(), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'table-with-boolean'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3046 +// TODO: revise: seems like drizzle-kit can't do this right now +test('introspect index on json', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: json(), + }, (table) => [ + index('custom_json_index').on( + sql`(((cast(json_unquote(json_extract(${table.column1}, _utf8mb4'$.data.nestedJsonProperty.')) as char(30) charset utf8mb4) collate utf8mb4_bin)))`, + ), + ]), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'index-on-json'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/1306 +// https://github.com/drizzle-team/drizzle-orm/issues/1512 +// https://github.com/drizzle-team/drizzle-orm/issues/1870 +// https://github.com/drizzle-team/drizzle-orm/issues/2525 +test('introspect index and fk with action', async () => { + const entity = mysqlTable('Entity', { + id: int('id').autoincrement().notNull(), + name: varchar('name', { length: 191 }).notNull(), + }, (table) => [ + primaryKey({ columns: [table.id] }), + ]); + + const entityTag = mysqlTable('EntityTag', { + id: int('id').autoincrement().notNull(), + name: varchar('name', { length: 191 }).notNull(), + }, (table) => [ + primaryKey({ columns: [table.id] }), + ]); + + const entityToEntityTag = mysqlTable('_EntityToEntityTag', { + a: int('A').notNull().references(() => entity.id, { onDelete: 'cascade', onUpdate: 'cascade' }), + b: int('B').notNull().references(() => entityTag.id, { onDelete: 'cascade', onUpdate: 'cascade' }), + }, (table) => { + return { + bIdx: index('_EntityToEntityTag_B_index').on(table.b), + entityToEntityTagAbUnique: uniqueIndex('_EntityToEntityTag_AB_unique').on(table.a, table.b), + }; + }); + + const schema = { entity, entityTag, entityToEntityTag }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-index'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect hash index', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: int(), + column2: varchar({ length: 100 }), + }, (table) => [ + index('idx_name').on(table.column2).using('hash'), + ]), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-hash-index'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); +test('introspect blob, tinyblob, mediumblob, longblob', async () => { + const schema = { + columns: mysqlTable('columns', { + column1: tinyblob(), + column2: mediumblob(), + column3: blob(), + column4: mediumblob(), + column5: longblob(), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-blobs'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/3480 +test('introspect bit(1); custom type', async () => { + const schema = { + table1: mysqlTable('table1', { + column1: customType({ dataType: () => 'bit(1)' })().default("b'1'"), // this fails + column2: customType({ dataType: () => 'bit(1)' })().default(sql`b'1'`), // this works fine + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-bit(1)'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('generated as string: change generated constraint', async () => { + const schema = { + users: mysqlTable('users', { + id: int('id'), + id2: int('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs( + `'users\\\\hello'`, + ), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-generated-with-backslashes'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/mysql/snapshot-v5.test.ts b/drizzle-kit/tests/mysql/snapshot-v5.test.ts new file mode 100644 index 0000000000..1a78fb0be9 --- /dev/null +++ b/drizzle-kit/tests/mysql/snapshot-v5.test.ts @@ -0,0 +1,35 @@ +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffSnapshotV5, prepareTestDatabase, TestDatabase } from './mocks'; +import * as s01old from './snapshots/schema01'; +import * as s01 from './snapshots/schema01new'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +// TODO +// author: @AlexSherman +// @AlexBlokh - I have added new fields in ddl. Just in case ping you +test('s01', async (t) => { + const res = await diffSnapshotV5(db, s01, s01old); + + // previous kit did generate `default '10.123'` for decimals which results in introspected '10' trimmed value + expect(res.all).toStrictEqual([ + 'ALTER TABLE `all_data_types` MODIFY COLUMN `decimal` decimal DEFAULT (10.123);', + 'ALTER TABLE `all_data_types` MODIFY COLUMN `decimal_precision` decimal(6) DEFAULT (10.123);', + 'ALTER TABLE `all_data_types` MODIFY COLUMN `decimal_precision_scale` decimal(6,2) DEFAULT (10.123);', + ]); +}); diff --git a/drizzle-kit/tests/mysql/snapshots/schema01.ts b/drizzle-kit/tests/mysql/snapshots/schema01.ts new file mode 100644 index 0000000000..e93a76216f --- /dev/null +++ b/drizzle-kit/tests/mysql/snapshots/schema01.ts @@ -0,0 +1,194 @@ +import { eq, SQL, sql } from 'orm044'; +import { + AnyMySqlColumn, + bigint, + binary, + boolean, + char, + check, + date, + datetime, + decimal, + double, + float, + foreignKey, + index, + int, + json, + longtext, + mediumint, + mediumtext, + mysqlEnum, + mysqlSchema, + mysqlTable, + mysqlView, + primaryKey, + real, + serial, + smallint, + text, + time, + timestamp, + tinyint, + tinytext, + unique, + uniqueIndex, + varbinary, + varchar, + year, +} from 'orm044/mysql-core'; + +// TODO: extend massively cc: @OleksiiKH0240 +export const allDataTypes = mysqlTable('all_data_types', { + int: int('int').default(2147483647), + intScientific: int('int_scientific').default(1e4), + intExpression: int('int_expression').default(sql`(1 + 1)`), + tinyint: tinyint('tinyint').default(127), + smallint: smallint('smallint').default(32767), + mediumint: mediumint('mediumint').default(8388607), + bigintUnsigned: bigint('bigint_unsigned', { mode: 'bigint', unsigned: true }), + bigint53: bigint('bigint_53', { mode: 'number' }).default(9007199254740991), + bigint63: bigint('bigint_63', { mode: 'bigint' }).default(sql`9223372036854775807`), + real: real('real').default(10.123), + realPrecisionScale: real('real_precision_scale', { precision: 6, scale: 2 }).default(10.12), + decimal: decimal('decimal').default('10.123'), + decimalPrecision: decimal('decimal_precision', { precision: 6 }).default('10.123'), + decimalPrecisionScale: decimal('decimal_precision_scale', { precision: 6, scale: 2 }).default('10.123'), + decimalBigint: decimal('decimal_bigint', { precision: 19 }).default(sql`'9223372036854775807'`), + double: double('double').default(10.123), + doublePrecisionScale: double('double_precision_scale', { precision: 6, scale: 2 }).default(10.12), + doubleUnsigned: double('double_unsigned', { unsigned: true }).default(10.123), + float: float('float').default(10.123), + floatPrecision: float('float_precision', { precision: 6 }).default(10.123), + floatPrecisionScale: float('float_precision_scale', { precision: 6, scale: 2 }).default(10.12), + floatUnsigned: float('floatUnsigned', { unsigned: true }).default(10.123), + serial: serial('serial').primaryKey(), + binary: binary('binary', { length: 10 }).default('binary'), + binaryExpression: binary('binary_expression', { length: 10 }).default(sql`(lower('HELLO'))`), + varbinary: varbinary('varbinary', { length: 10 }).default('binary'), + varbinaryExpression: varbinary('varbinary_expression', { length: 10 }).default(sql`(lower('HELLO'))`), + char: char('char', { length: 255 }).default(`text'"\`:[]{},text`), + varchar: varchar('varchar', { length: 256 }).default(`text'"\`:[]{},text`), + text: text('text').default(`text'"\`:[]{},text`), + tinytext: tinytext('tinytext').default(sql`('text''"\`:[]{},text')`), + mediumtext: mediumtext('mediumtext').default(sql`('text''"\`:[]{},text')`), + longtext: longtext('longtext').default(sql`('text''"\`:[]{},text')`), + boolean: boolean('boolean').default(true), + booleanNull: boolean('boolean_null').default(sql`null`), + date: date('date', { mode: 'date' }), + datetime: datetime('datetime', { mode: 'date' }).default(new Date('2025-05-23T12:53:53.000Z')), + datetimeFsp: datetime('datetime_fsp', { mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + time: time('time').default('15:50:33'), + timeFsp: time('time_fsp', { fsp: 3 }).default('15:50:33.123'), + year: year('year').default(2025), + timestamp: timestamp('timestamp', { mode: 'date' }).default(new Date('2025-05-23T12:53:53.000Z')), + timestampNow: timestamp('timestamp_now', { mode: 'date' }).defaultNow(), + timestampFsp: timestamp('timestamp_fsp', { mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + jsonArray: json('json_array').default(sql`('[9223372036854775807, 9223372036854775806]')`), + json: json('json').default({ key: `text[]{},text` }), + mysqlEnum: mysqlEnum('popularity', ['unknown', 'known', 'popular']).default( + `popular`, + ), +}); + +// constraints +// unique +export const uniqueTable = mysqlTable('unique_table', { + column1: int().primaryKey(), + column2: serial(), + column3: int().unique(), + column4: int().unique('column4_custom_unique_name'), + column5: int(), + column6: int(), +}, (table) => [ + unique().on(table.column5), + unique('custom_unique').on(table.column5, table.column6), +]); + +// primary +export const compositePrimaryKey = mysqlTable('composite_primary_key', { + column1: int(), + column2: varchar({ length: 10 }), +}, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), +]); + +export const compositePrimaryKeyCustomName = mysqlTable('composite_primary_key_custom_name', { + column1: int(), + column2: varchar({ length: 10 }), +}, (table) => [ + primaryKey({ columns: [table.column1, table.column2], name: 'composite_primary_key_custom_name_' }), +]); + +// references +export const referencingTable = mysqlTable('referencing_table', { + column0: int(), + column1: int().unique().references(() => uniqueTable.column1, { onDelete: 'cascade', onUpdate: 'cascade' }), + column2: int(), + column3: int(), + column4: int(), + column5: varchar({ length: 10 }), + column6: int().references((): AnyMySqlColumn => referencingTable.column0), +}, (table) => [ + primaryKey({ columns: [table.column0] }), + foreignKey({ + name: 'referencing_table_custom_fk1', + columns: [table.column2, table.column3], + foreignColumns: [uniqueTable.column5, uniqueTable.column6], + }), + foreignKey({ + name: 'referencing_table_custom_fk2', + columns: [table.column4, table.column5], + foreignColumns: [compositePrimaryKey.column1, compositePrimaryKey.column2], + }), +]); + +// generatedAlwaysAs, check, index, not null, auto increment +export const table1 = mysqlTable('table1', { + column1: varchar({ length: 256 }).generatedAlwaysAs("'Default'"), + column2: varchar({ length: 256 }).generatedAlwaysAs((): SQL => sql`(concat(${table1.column1}, 'hello'))`, { + mode: 'stored', + }), + column3: varchar({ length: 256 }).generatedAlwaysAs((): SQL => sql`(concat(${table1.column1}, 'hello'))`, { + mode: 'virtual', + }), + column4: int().notNull().autoincrement().primaryKey(), + column5: int(), + column6: varchar({ length: 256 }), +}, (table) => [ + check('age_check1', sql`${table.column5} > 0`), + index('table1_column4_index').on(table.column4), + uniqueIndex('table1_column4_unique_index').on(table.column4), + index('table1_composite_index').on(table.column5, table.column6), + uniqueIndex('table1_composite_unique_index').on(table.column5, table.column6), +]); + +// view +export const table1View1 = mysqlView('table1_view1').as((qb) => qb.select().from(table1)); +export const table1View2 = mysqlView('table1_view2', { + column4: int().notNull().autoincrement(), +}).as( + sql`select column4 from ${table1} where ${eq(table1.column4, 3)}`, +); + +export const users = mysqlTable('users1', { + id: int().unique(), + id1: int(), + id2: int(), +}, (t) => [ + primaryKey({ columns: [t.id1, t.id2] }), +]); + +export const analytics = mysqlSchema('analytics'); + +export const analyticsEvents = analytics.table( + 'events', + { + id: serial('id').primaryKey(), + userId: int('user_id').references(() => users.id, { onDelete: 'set null' }), + type: varchar('type', { length: 64 }).notNull(), + payload: json('payload').default({}), + occurredAt: timestamp('occurred_at', { fsp: 3 }).notNull().defaultNow(), + }, + (t) => [index('idx_analytics_events_user_time').on(t.userId, t.occurredAt)], +); diff --git a/drizzle-kit/tests/mysql/snapshots/schema01new.ts b/drizzle-kit/tests/mysql/snapshots/schema01new.ts new file mode 100644 index 0000000000..b36582ddfc --- /dev/null +++ b/drizzle-kit/tests/mysql/snapshots/schema01new.ts @@ -0,0 +1,194 @@ +import { eq, SQL, sql } from 'drizzle-orm'; +import { + AnyMySqlColumn, + bigint, + binary, + boolean, + char, + check, + date, + datetime, + decimal, + double, + float, + foreignKey, + index, + int, + json, + longtext, + mediumint, + mediumtext, + mysqlEnum, + mysqlSchema, + mysqlTable, + mysqlView, + primaryKey, + real, + serial, + smallint, + text, + time, + timestamp, + tinyint, + tinytext, + unique, + uniqueIndex, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; + +// TODO: extend massively cc: @OleksiiKH0240 +export const allDataTypes = mysqlTable('all_data_types', { + int: int('int').default(2147483647), + intScientific: int('int_scientific').default(1e4), + intExpression: int('int_expression').default(sql`(1 + 1)`), + tinyint: tinyint('tinyint').default(127), + smallint: smallint('smallint').default(32767), + mediumint: mediumint('mediumint').default(8388607), + bigintUnsigned: bigint('bigint_unsigned', { mode: 'bigint', unsigned: true }), + bigint53: bigint('bigint_53', { mode: 'number' }).default(9007199254740991), + bigint63: bigint('bigint_63', { mode: 'bigint' }).default(sql`9223372036854775807`), + real: real('real').default(10.123), + realPrecisionScale: real('real_precision_scale', { precision: 6, scale: 2 }).default(10.12), + decimal: decimal('decimal').default('10.123'), + decimalPrecision: decimal('decimal_precision', { precision: 6 }).default('10.123'), + decimalPrecisionScale: decimal('decimal_precision_scale', { precision: 6, scale: 2 }).default('10.123'), + decimalBigint: decimal('decimal_bigint', { precision: 19, mode: 'bigint' }).default(9223372036854775807n), + double: double('double').default(10.123), + doublePrecisionScale: double('double_precision_scale', { precision: 6, scale: 2 }).default(10.12), + doubleUnsigned: double('double_unsigned', { unsigned: true }).default(10.123), + float: float('float').default(10.123), + floatPrecision: float('float_precision', { precision: 6 }).default(10.123), + floatPrecisionScale: float('float_precision_scale', { precision: 6, scale: 2 }).default(10.12), + floatUnsigned: float('floatUnsigned', { unsigned: true }).default(10.123), + serial: serial('serial').primaryKey(), + binary: binary('binary', { length: 10 }).default('binary'), + binaryExpression: binary('binary_expression', { length: 10 }).default(sql`(lower('HELLO'))`), + varbinary: varbinary('varbinary', { length: 10 }).default('binary'), + varbinaryExpression: varbinary('varbinary_expression', { length: 10 }).default(sql`(lower('HELLO'))`), + char: char('char', { length: 255 }).default(`text'"\`:[]{},text`), + varchar: varchar('varchar', { length: 256 }).default(`text'"\`:[]{},text`), + text: text('text').default(`text'"\`:[]{},text`), + tinytext: tinytext('tinytext').default(sql`('text''"\`:[]{},text')`), + mediumtext: mediumtext('mediumtext').default(sql`('text''"\`:[]{},text')`), + longtext: longtext('longtext').default(sql`('text''"\`:[]{},text')`), + boolean: boolean('boolean').default(true), + booleanNull: boolean('boolean_null').default(sql`null`), + date: date('date', { mode: 'date' }), + datetime: datetime('datetime', { mode: 'date' }).default(new Date('2025-05-23T12:53:53.000Z')), + datetimeFsp: datetime('datetime_fsp', { mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + time: time('time').default('15:50:33'), + timeFsp: time('time_fsp', { fsp: 3 }).default('15:50:33.123'), + year: year('year').default(2025), + timestamp: timestamp('timestamp', { mode: 'date' }).default(new Date('2025-05-23T12:53:53.000Z')), + timestampNow: timestamp('timestamp_now', { mode: 'date' }).defaultNow(), + timestampFsp: timestamp('timestamp_fsp', { mode: 'date', fsp: 3 }).default(new Date('2025-05-23T12:53:53.115Z')), + jsonArray: json('json_array').default(sql`('[9223372036854775807, 9223372036854775806]')`), + json: json('json').default({ key: `text[]{},text` }), + mysqlEnum: mysqlEnum('popularity', ['unknown', 'known', 'popular']).default( + `popular`, + ), +}); + +// constraints +// unique +export const uniqueTable = mysqlTable('unique_table', { + column1: int().primaryKey(), + column2: serial(), + column3: int().unique(), + column4: int().unique('column4_custom_unique_name'), + column5: int(), + column6: int(), +}, (table) => [ + unique().on(table.column5), + unique('custom_unique').on(table.column5, table.column6), +]); + +// primary +export const compositePrimaryKey = mysqlTable('composite_primary_key', { + column1: int(), + column2: varchar({ length: 10 }), +}, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), +]); + +export const compositePrimaryKeyCustomName = mysqlTable('composite_primary_key_custom_name', { + column1: int(), + column2: varchar({ length: 10 }), +}, (table) => [ + primaryKey({ columns: [table.column1, table.column2] }), +]); + +// references +export const referencingTable = mysqlTable('referencing_table', { + column0: int(), + column1: int().unique().references(() => uniqueTable.column1, { onDelete: 'cascade', onUpdate: 'cascade' }), + column2: int(), + column3: int(), + column4: int(), + column5: varchar({ length: 10 }), + column6: int().references((): AnyMySqlColumn => referencingTable.column0), +}, (table) => [ + primaryKey({ columns: [table.column0] }), + foreignKey({ + name: 'referencing_table_custom_fk1', + columns: [table.column2, table.column3], + foreignColumns: [uniqueTable.column5, uniqueTable.column6], + }), + foreignKey({ + name: 'referencing_table_custom_fk2', + columns: [table.column4, table.column5], + foreignColumns: [compositePrimaryKey.column1, compositePrimaryKey.column2], + }), +]); + +// generatedAlwaysAs, check, index, not null, auto increment +export const table1 = mysqlTable('table1', { + column1: varchar({ length: 256 }).generatedAlwaysAs("'Default'"), + column2: varchar({ length: 256 }).generatedAlwaysAs((): SQL => sql`(concat(${table1.column1}, 'hello'))`, { + mode: 'stored', + }), + column3: varchar({ length: 256 }).generatedAlwaysAs((): SQL => sql`(concat(${table1.column1}, 'hello'))`, { + mode: 'virtual', + }), + column4: int().notNull().autoincrement().primaryKey(), + column5: int(), + column6: varchar({ length: 256 }), +}, (table) => [ + check('age_check1', sql`${table.column5} > 0`), + index('table1_column4_index').on(table.column4), + uniqueIndex('table1_column4_unique_index').on(table.column4), + index('table1_composite_index').on(table.column5, table.column6), + uniqueIndex('table1_composite_unique_index').on(table.column5, table.column6), +]); + +// view +export const table1View1 = mysqlView('table1_view1').as((qb) => qb.select().from(table1)); +export const table1View2 = mysqlView('table1_view2', { + column4: int().notNull().autoincrement(), +}).as( + sql`select column4 from ${table1} where ${eq(table1.column4, 3)}`, +); + +export const users = mysqlTable('users1', { + id: int().unique(), + id1: int(), + id2: int(), +}, (t) => [ + primaryKey({ columns: [t.id1, t.id2] }), +]); + +export const analytics = mysqlSchema('analytics'); + +export const analyticsEvents = analytics.table( + 'events', + { + id: serial('id').primaryKey(), + userId: int('user_id').references(() => users.id, { onDelete: 'set null' }), + type: varchar('type', { length: 64 }).notNull(), + payload: json('payload').default({}), + occurredAt: timestamp('occurred_at', { fsp: 3 }).notNull().defaultNow(), + }, + (t) => [index('idx_analytics_events_user_time').on(t.userId, t.occurredAt)], +); diff --git a/drizzle-kit/tests/other/bin.test.ts b/drizzle-kit/tests/other/bin.test.ts new file mode 100644 index 0000000000..302db77aa6 --- /dev/null +++ b/drizzle-kit/tests/other/bin.test.ts @@ -0,0 +1,255 @@ +import chalk from 'chalk'; +import { assert, test } from 'vitest'; +import { analyzeImports, ChainLink } from '../../imports-checker/checker'; + +const chainToString = (chains: ChainLink[]) => { + if (chains.length === 0) throw new Error(); + + let out = chains[0]!.file + '\n'; + let indentation = 0; + for (let chain of chains) { + out += ' '.repeat(indentation) + + '└' + + chain.import + + ` ${chalk.gray(chain.file)}\n`; + indentation += 1; + } + return out; +}; + +test('imports-issues', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: [ + '@drizzle-team/brocli', + '@ewoudenberg/difflib', + 'path', + 'fs', + 'fs/*', + 'url', + 'zod', + 'node:*', + 'hono', + 'glob', + 'hono/*', + 'hono/**/*', + '@hono/*', + 'crypto', + 'hanji', + 'chalk', + 'dotenv/config', + 'camelcase', + 'semver', + 'env-paths', + '@js-temporal/polyfill', + 'ohm-js', + ], + entry: 'src/cli/index.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +// test('imports-issues2', () => { +// const issues = analyzeImports({ +// basePath: '.', +// localPaths: ['src'], +// whiteList: [ +// 'zod', +// // 'hanji', +// // 'chalk', +// // '@ewoudenberg/difflib', +// ], +// entry: 'src/utils/studio.ts', +// logger: true, +// ignoreTypes: true, +// }).issues; + +// console.log(); +// for (const issue of issues) { +// console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); +// console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); +// } + +// assert.equal(issues.length, 0); +// }); + +test('check imports api-postgres', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/api-postgres.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports api-mysql', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/api-mysql.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports api-sqlite', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/api-sqlite.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports api-singlestore', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/api-singlestore.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports sqlite-studio', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/studio-sqlite.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports postgres-studio', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['camelcase', 'ohm-js', '@js-temporal/polyfill'], + entry: 'src/ext/studio-postgres.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports mysql-studio', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['camelcase', 'ohm-js', '@js-temporal/polyfill'], + entry: 'src/ext/studio-mysql.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports postgres-mover', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['camelcase', 'ohm-js', '@js-temporal/polyfill'], + entry: 'src/ext/mover-postgres.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); + +test('check imports mysql-mover', () => { + const issues = analyzeImports({ + basePath: '.', + localPaths: ['src'], + whiteList: ['@js-temporal/polyfill', 'ohm-js'], + entry: 'src/ext/mover-mysql.ts', + logger: true, + ignoreTypes: true, + }).issues; + + console.log(); + for (const issue of issues) { + console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); + console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); + } + + assert.equal(issues.length, 0); +}); diff --git a/drizzle-kit/tests/cli-export.test.ts b/drizzle-kit/tests/other/cli-export.test.ts similarity index 95% rename from drizzle-kit/tests/cli-export.test.ts rename to drizzle-kit/tests/other/cli-export.test.ts index 8719ddd6a7..a46bcbd836 100644 --- a/drizzle-kit/tests/cli-export.test.ts +++ b/drizzle-kit/tests/other/cli-export.test.ts @@ -1,6 +1,6 @@ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; -import { exportRaw } from '../src/cli/schema'; +import { exportRaw } from '../../src/cli/schema'; // good: // #1 drizzle-kit export --dialect=postgresql --schema=schema.ts @@ -26,6 +26,7 @@ test('export #1', async (t) => { dialect: 'postgresql', schema: 'schema.ts', sql: true, + casing: undefined, }); }); @@ -37,6 +38,7 @@ test('export #2', async (t) => { dialect: 'postgresql', schema: './schema.ts', sql: true, + casing: undefined, }); }); @@ -49,6 +51,7 @@ test('export #3', async (t) => { dialect: 'sqlite', schema: './schema.ts', sql: true, + casing: undefined, }); }); diff --git a/drizzle-kit/tests/cli-generate.test.ts b/drizzle-kit/tests/other/cli-generate.test.ts similarity index 99% rename from drizzle-kit/tests/cli-generate.test.ts rename to drizzle-kit/tests/other/cli-generate.test.ts index a4adf979f2..d070e75575 100644 --- a/drizzle-kit/tests/cli-generate.test.ts +++ b/drizzle-kit/tests/other/cli-generate.test.ts @@ -1,6 +1,6 @@ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; -import { generate } from '../src/cli/schema'; +import { generate } from '../../src/cli/schema'; // good: // #1 drizzle-kit generate --dialect=postgresql --schema=schema.ts diff --git a/drizzle-kit/tests/cli-migrate.test.ts b/drizzle-kit/tests/other/cli-migrate.test.ts similarity index 98% rename from drizzle-kit/tests/cli-migrate.test.ts rename to drizzle-kit/tests/other/cli-migrate.test.ts index 1425691f0b..06c0bac790 100644 --- a/drizzle-kit/tests/cli-migrate.test.ts +++ b/drizzle-kit/tests/other/cli-migrate.test.ts @@ -1,6 +1,6 @@ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; -import { migrate } from '../src/cli/schema'; +import { migrate } from '../../src/cli/schema'; // good: // #1 drizzle-kit generate diff --git a/drizzle-kit/tests/cli-push.test.ts b/drizzle-kit/tests/other/cli-push.test.ts similarity index 77% rename from drizzle-kit/tests/cli-push.test.ts rename to drizzle-kit/tests/other/cli-push.test.ts index f5daf2bd05..3399b9aefd 100644 --- a/drizzle-kit/tests/cli-push.test.ts +++ b/drizzle-kit/tests/other/cli-push.test.ts @@ -1,6 +1,6 @@ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; -import { push } from '../src/cli/schema'; +import { push } from '../../src/cli/schema'; // good: // #1 drizzle-kit push @@ -23,10 +23,13 @@ test('push #1', async (t) => { }, force: false, schemaPath: './schema.ts', - schemasFilter: ['public'], - tablesFilter: [], - entities: undefined, - strict: false, + explain: false, + filters: { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, verbose: false, casing: undefined, }); @@ -43,9 +46,13 @@ test('push #2', async (t) => { }, force: false, schemaPath: './schema.ts', - schemasFilter: ['public'], - tablesFilter: [], - strict: false, + explain: false, + filters: { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, verbose: false, casing: undefined, }); @@ -64,9 +71,13 @@ test('push #3', async (t) => { }, force: false, schemaPath: './schema.ts', - schemasFilter: ['public'], - tablesFilter: [], - strict: false, + explain: false, + filters: { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, verbose: false, casing: undefined, }); @@ -85,11 +96,14 @@ test('push #4', async (t) => { user: 'postgresql', }, force: false, + explain: false, + filters: { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, schemaPath: './schema.ts', - schemasFilter: ['public'], - tablesFilter: [], - entities: undefined, - strict: false, verbose: false, casing: undefined, }); @@ -98,7 +112,10 @@ test('push #4', async (t) => { // catched a bug test('push #5', async (t) => { const res = await brotest(push, '--config=postgres2.config.ts'); - if (res.type !== 'handler') assert.fail(res.type, 'handler'); + if (res.type !== 'handler') { + assert.fail(res.type, 'handler'); + } + expect(res.options).toStrictEqual({ dialect: 'postgresql', credentials: { @@ -109,10 +126,13 @@ test('push #5', async (t) => { user: 'postgresql', }, schemaPath: './schema.ts', - schemasFilter: ['public'], - tablesFilter: [], - strict: false, - entities: undefined, + explain: false, + filters: { + schemas: undefined, + tables: undefined, + entities: undefined, + extensions: undefined, + }, force: false, verbose: false, casing: undefined, diff --git a/drizzle-kit/tests/other/dialect.test.ts b/drizzle-kit/tests/other/dialect.test.ts new file mode 100644 index 0000000000..b630f13f90 --- /dev/null +++ b/drizzle-kit/tests/other/dialect.test.ts @@ -0,0 +1,2730 @@ +import { create, diff } from 'src/dialects/dialect'; +import { createDDL as pg } from 'src/dialects/postgres/ddl'; +import { beforeEach } from 'vitest'; +import { expect, expectTypeOf, test } from 'vitest'; + +const db = create({ + tables: {}, + columns: { + table: 'required', + type: 'string', + primaryKey: 'boolean', + notNull: 'boolean', + autoincrement: 'boolean?', + default: 'string?', + generated: { + type: 'string', + as: 'string', + }, + }, + indexes: { + table: 'required', + columns: [{ + value: 'string', + expression: 'boolean', + }], + isUnique: 'boolean', + where: 'string?', + }, + fks: { + table: 'required', + tableFrom: 'string', + columnsFrom: 'string[]', + tableTo: 'string', + columnsTo: 'string[]', + onUpdate: 'string?', + onDelete: 'string?', + }, + pks: { + table: 'required', + columns: 'string[]', + }, + uniques: { + table: 'required', + columns: 'string[]', + }, + checks: { + table: 'required', + value: 'string', + }, + views: { + definition: 'string?', + isExisting: 'boolean', + }, + viewColumns: {}, +}); + +beforeEach(() => { + db.entities.delete(); +}); + +test('Insert with custom conflict detection list', () => { + db.entities.push({ + entityType: 'checks', + name: 'a', + table: 't', + value: '2', + }, ['name']); + expect( + db.entities.push({ + entityType: 'checks', + name: 'b', + table: 't', + value: '2', + }, ['name']).status, + ).toStrictEqual('OK'); + expect( + db.entities.push({ + entityType: 'checks', + name: 'a', + table: 'tt', + value: '2', + }, ['name']).status, + ).toStrictEqual('CONFLICT'); +}); + +test('Insert & list multiple entities', () => { + const inFirst = db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + const inSecond = db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + expect(inFirst).toStrictEqual({ + status: 'OK', + data: { + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, + }); + + expect(inSecond).toStrictEqual({ + status: 'OK', + data: { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }, + }); + + expect(db.entities.one()).toStrictEqual({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }); + + expect(db.pks.one()).toStrictEqual(null); + + expect(db.entities.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.columns.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(db.indexes.list()).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.views.list()).toStrictEqual([]); +}); + +test('Insert & list multiple entities via common function', () => { + const inFirst = db.entities.push({ + entityType: 'columns', + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + const inSecond = db.entities.push({ + entityType: 'indexes', + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + expect(inFirst).toStrictEqual({ + status: 'OK', + data: { + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, + }); + + expect(inSecond).toStrictEqual({ + status: 'OK', + data: { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }, + }); + + expect(db.entities.one()).toStrictEqual({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }); + + expect(db.pks.one()).toStrictEqual(null); + + expect(db.entities.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.columns.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(db.indexes.list()).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.views.list()).toStrictEqual([]); +}); + +test('Insert with common hash conflict', () => { + const inFirst = db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + const inSecond = db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: null, + notNull: false, + primaryKey: false, + table: 'users', + type: 'text', + }); + + expect(inFirst).toStrictEqual({ + status: 'OK', + data: { + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, + }); + + expect(inSecond).toStrictEqual({ + status: 'CONFLICT', + data: { + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, + }); + + expect(db.entities.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(db.columns.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); +}); + +test('Delete specific entities', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const delFirst = db.columns.delete(); + + const delSecond = db.indexes.delete({ + columns: { + CONTAINS: { + value: 'user_id', + expression: false, + }, + }, + }); + + expect(delFirst).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(delSecond).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.entities.list()).toStrictEqual([{ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.columns.list()).toStrictEqual([]); + + expect(db.indexes.list()).toStrictEqual([{ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + entityType: 'indexes', + }]); +}); + +test('Delete specific entities via common function', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const delFirst = db.entities.delete({ + entityType: 'columns', + }); + + const delSecond = db.entities.delete({ + entityType: 'indexes', + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + }); + + expect(delFirst).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(delSecond).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.entities.list()).toStrictEqual([{ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + entityType: 'indexes', + }]); + + expect(db.columns.list()).toStrictEqual([]); + + expect(db.indexes.list()).toStrictEqual([{ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + entityType: 'indexes', + }]); +}); + +test('Update entities', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const updFirst = db.columns.update({ + set: { + type: 'bigint', + }, + }); + + const updSecond = db.indexes.update({ + set: { + where: 'whereExp', + columns: (c) => { + return { + ...c, + expression: true, + }; + }, + }, + where: { + columns: { + CONTAINS: { + value: 'user_id', + expression: false, + }, + }, + }, + }); + + expect(updFirst).toStrictEqual({ + status: 'OK', + data: [{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + entityType: 'columns', + }], + }); + + expect(updSecond).toStrictEqual({ + status: 'OK', + data: [{ + columns: [{ + value: 'user_id', + expression: true, + }, { + value: 'group_id', + expression: true, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: 'whereExp', + entityType: 'indexes', + }], + }); + + expect(db.entities.list()).toStrictEqual([ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + }, + { + columns: [ + { + expression: true, + value: 'user_id', + }, + { + expression: true, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: true, + name: 'utg_idx', + table: 'users_to_groups', + where: 'whereExp', + }, + { + columns: [ + { + expression: false, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: false, + name: 'utg_g_idx', + table: 'users_to_groups', + where: null, + }, + ]); + + expect(db.columns.list()).toStrictEqual( + [ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'bigint', + }, + ], + ); + + expect(db.indexes.list()).toStrictEqual( + [ + { + columns: [ + { + expression: true, + value: 'user_id', + }, + { + expression: true, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: true, + name: 'utg_idx', + table: 'users_to_groups', + where: 'whereExp', + }, + { + columns: [ + { + expression: false, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: false, + name: 'utg_g_idx', + table: 'users_to_groups', + where: null, + }, + ], + ); +}); + +test('Update entities conflict - with filter', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'avatar', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }); + + const upd = db.columns.update({ + set: { + name: 'id', + }, + where: { + name: 'name', + }, + }); + + expect(upd).toStrictEqual({ + status: 'CONFLICT', + data: [{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }], + }); + + expect(db.entities.list()).toStrictEqual([ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'avatar', + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }, + ]); + + expect(db.columns.list()).toStrictEqual( + [ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'avatar', + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }, + ], + ); + + expect(db.indexes.list()).toStrictEqual([]); +}); + +test('Update entities conflict - no filter', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'avatar', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }); + + const upd = db.columns.update({ + set: { + name: 'id', + }, + }); + + expect(upd).toStrictEqual({ + status: 'CONFLICT', + data: [{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }], + }); + + expect(db.entities.list()).toStrictEqual([ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'avatar', + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }, + ]); + + expect(db.columns.list()).toStrictEqual( + [ + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: { + as: 'identity', + type: 'always', + }, + name: 'id', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'name', + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }, + { + autoincrement: null, + default: null, + entityType: 'columns', + generated: null, + name: 'avatar', + notNull: true, + primaryKey: false, + table: 'users', + type: 'string', + }, + ], + ); + + expect(db.indexes.list()).toStrictEqual([]); +}); + +test('Update entities via common function', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const updFirst = db.entities.update({ + set: { + table: 'upd_tbl', + }, + }); + + const updSecond = db.entities.update({ + set: { + name: (n) => `${n}_upd`, + }, + where: { + columns: [ + { + expression: false, + value: 'user_id', + }, + { + expression: false, + value: 'group_id', + }, + ], + }, + }); + + expect(updFirst).toStrictEqual({ + status: 'OK', + data: [{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'upd_tbl', + isUnique: true, + name: 'utg_idx_upd', + where: null, + entityType: 'indexes', + }, { + columns: [ + { + expression: false, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: false, + name: 'utg_g_idx', + table: 'upd_tbl', + where: null, + }], + }); + + expect(updSecond).toStrictEqual({ + status: 'OK', + data: [{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'upd_tbl', + isUnique: true, + name: 'utg_idx_upd', + where: null, + entityType: 'indexes', + }], + }); + + expect(db.entities.list()).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'upd_tbl', + isUnique: true, + name: 'utg_idx_upd', + where: null, + entityType: 'indexes', + }, { + columns: [ + { + expression: false, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: false, + name: 'utg_g_idx', + table: 'upd_tbl', + where: null, + }]); + + expect(db.columns.list()).toStrictEqual( + [ + { + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, + { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'upd_tbl', + type: 'string', + entityType: 'columns', + }, + ], + ); + + expect(db.indexes.list()).toStrictEqual( + [ + { + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'upd_tbl', + isUnique: true, + name: 'utg_idx_upd', + where: null, + entityType: 'indexes', + }, + { + columns: [ + { + expression: false, + value: 'group_id', + }, + ], + entityType: 'indexes', + isUnique: false, + name: 'utg_g_idx', + table: 'upd_tbl', + where: null, + }, + ], + ); +}); + +test('List with filters', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const listFirst = db.columns.list(); + + const listSecond = db.indexes.list({ + columns: { + CONTAINS: { + value: 'user_id', + expression: false, + }, + }, + }); + + expect(listFirst).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(listSecond).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); +}); + +test('List via common function with filters', () => { + db.columns.push({ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.columns.push({ + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + }); + + db.indexes.push({ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + }); + + db.indexes.push({ + columns: [{ + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: false, + name: 'utg_g_idx', + where: null, + }); + + const listFirst = db.entities.list({ + entityType: 'columns', + }); + + const listSecond = db.entities.list({ + entityType: 'indexes', + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + }); + + expect(listFirst).toStrictEqual([{ + name: 'id', + autoincrement: null, + default: null, + generated: { + type: 'always', + as: 'identity', + }, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }, { + name: 'name', + autoincrement: null, + default: null, + generated: null, + notNull: true, + primaryKey: true, + table: 'users', + type: 'string', + entityType: 'columns', + }]); + + expect(listSecond).toStrictEqual([{ + columns: [{ + value: 'user_id', + expression: false, + }, { + value: 'group_id', + expression: false, + }], + table: 'users_to_groups', + isUnique: true, + name: 'utg_idx', + where: null, + entityType: 'indexes', + }]); +}); + +test('Validate', () => { + const junk = {}; + if (db.views.validate(junk)) { + expectTypeOf(junk).toEqualTypeOf, null>>(); + } + + if (db.entities.validate(junk)) { + expectTypeOf(junk).toEqualTypeOf, null>>(); + } + + const table: typeof db._.types.tables = { + entityType: 'tables', + name: 'tbl', + }; + + expect(db.entities.validate(table)).toStrictEqual(true); + expect(db.tables.validate(table)).toStrictEqual(true); + expect(db.views.validate(table)).toStrictEqual(false); + + const deformedTable = { + entityType: 'tables', + name: 'tbl', + schema: null, + }; + + expect(db.entities.validate(deformedTable)).toStrictEqual(false); + expect(db.tables.validate(deformedTable)).toStrictEqual(false); + expect(db.views.validate(deformedTable)).toStrictEqual(false); + + const deformedTable2 = { + entityType: 'tables', + name: 'tbl', + schema: 'sch', + }; + + expect(db.entities.validate(deformedTable2)).toStrictEqual(false); + expect(db.tables.validate(deformedTable2)).toStrictEqual(false); + expect(db.views.validate(deformedTable2)).toStrictEqual(false); + + const column: typeof db._.types.columns = { + autoincrement: false, + default: null, + entityType: 'columns', + generated: { as: 'as', type: 'type' }, + name: 'cn', + notNull: false, + primaryKey: false, + table: 'tt', + type: 'varchar', + }; + + expect(db.entities.validate(column)).toStrictEqual(true); + expect(db.columns.validate(column)).toStrictEqual(true); + expect(db.tables.validate(column)).toStrictEqual(false); + + const column2: typeof db._.types.columns = { + autoincrement: false, + default: null, + entityType: 'columns', + generated: null, + name: 'cn', + notNull: false, + primaryKey: false, + table: 'tt', + type: 'varchar', + }; + + expect(db.entities.validate(column2)).toStrictEqual(true); + expect(db.columns.validate(column2)).toStrictEqual(true); + expect(db.tables.validate(column2)).toStrictEqual(false); + + const columnDeformed = { + autoincrement: false, + default: null, + entityType: 'columns', + generated: { as: 'as', type: 'type', something: undefined }, + name: 'cn', + notNull: false, + primaryKey: false, + table: 'tt', + type: 'varchar', + }; + + expect(db.entities.validate(columnDeformed)).toStrictEqual(false); + expect(db.columns.validate(columnDeformed)).toStrictEqual(false); + expect(db.tables.validate(columnDeformed)).toStrictEqual(false); + + const columnDeformed2 = { + autoincrement: false, + default: null, + entityType: 'columns', + generated: 'wrong', + name: 'cn', + notNull: false, + primaryKey: false, + table: 'tt', + type: 'varchar', + }; + + expect(db.entities.validate(columnDeformed2)).toStrictEqual(false); + expect(db.columns.validate(columnDeformed2)).toStrictEqual(false); + expect(db.tables.validate(columnDeformed2)).toStrictEqual(false); + + const pk: typeof db._.types.pks = { + columns: [], + entityType: 'pks', + name: 'pk1', + table: 'tt', + }; + + expect(db.entities.validate(pk)).toStrictEqual(true); + expect(db.pks.validate(pk)).toStrictEqual(true); + expect(db.views.validate(pk)).toStrictEqual(false); + + const pk2: typeof db._.types.pks = { + columns: ['str', 'str2', 'str3'], + entityType: 'pks', + name: 'pk1', + table: 'tt', + }; + + expect(db.entities.validate(pk2)).toStrictEqual(true); + expect(db.pks.validate(pk2)).toStrictEqual(true); + expect(db.views.validate(pk2)).toStrictEqual(false); + + const pkDeformed = { + columns: ['str', null, 'str3'], + entityType: 'pks', + name: 'pk1', + table: 'tt', + }; + + expect(db.entities.validate(pkDeformed)).toStrictEqual(false); + expect(db.pks.validate(pkDeformed)).toStrictEqual(false); + expect(db.views.validate(pkDeformed)).toStrictEqual(false); + + const index: typeof db._.types.indexes = { + columns: [], + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(index)).toStrictEqual(true); + expect(db.indexes.validate(index)).toStrictEqual(true); + expect(db.pks.validate(index)).toStrictEqual(false); + + const index2: typeof db._.types.indexes = { + columns: [{ + expression: true, + value: 'expr', + }], + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(index2)).toStrictEqual(true); + expect(db.indexes.validate(index2)).toStrictEqual(true); + expect(db.pks.validate(index2)).toStrictEqual(false); + + const index3: typeof db._.types.indexes = { + columns: [{ + expression: true, + value: 'expr', + }, { + expression: false, + value: 'ex2', + }], + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(index3)).toStrictEqual(true); + expect(db.indexes.validate(index3)).toStrictEqual(true); + expect(db.pks.validate(index3)).toStrictEqual(false); + + const indexDeformed = { + columns: 2, + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(indexDeformed)).toStrictEqual(false); + expect(db.indexes.validate(indexDeformed)).toStrictEqual(false); + expect(db.pks.validate(indexDeformed)).toStrictEqual(false); + + const indexDeformed2 = { + columns: [{ + expression: true, + value: 'expr', + }, { + expression: false, + value: 'ex2', + }, 'who?'], + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(indexDeformed2)).toStrictEqual(false); + expect(db.indexes.validate(indexDeformed2)).toStrictEqual(false); + expect(db.pks.validate(indexDeformed2)).toStrictEqual(false); + + const indexDeformed3 = { + columns: [null, { + expression: true, + value: 'expr', + }, { + expression: false, + value: 'ex2', + }], + entityType: 'indexes', + isUnique: true, + name: 'idx', + table: 'tt', + where: null, + }; + + expect(db.entities.validate(indexDeformed3)).toStrictEqual(false); + expect(db.indexes.validate(indexDeformed3)).toStrictEqual(false); + expect(db.pks.validate(indexDeformed3)).toStrictEqual(false); +}); + +test('diff: update', () => { + const cfg = { + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + }, + } as const; + + const original = create(cfg); + const changed = create(cfg); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + original.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + changed.column.push({ + name: 'name', + type: 'text', + pk: false, + table: 'user', + }); + + const res = diff.alters(original, changed, 'column'); + + expect(diff.all(original, changed, 'column')).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + $left: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'text', + }, + }]); + expect(diff.all(original, changed)).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + $left: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'text', + }, + }]); + expect(diff.drops(original, changed, 'column')).toStrictEqual([]); + expect(diff.drops(original, changed)).toStrictEqual([]); + expect(diff.alters(original, changed, 'column')).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + $left: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'text', + }, + }]); + expect(diff.alters(original, changed)).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + $left: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'text', + }, + }]); + expect(diff.creates(original, changed, 'column')).toStrictEqual([]); + expect(diff.creates(original, changed)).toStrictEqual([]); + + expect(res).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + $left: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + pk: false, + table: 'user', + type: 'text', + }, + }]); +}); + +test('diff: update object', () => { + const cfg = { + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + obj: { + subfield: 'string', + subArr: 'string[]', + }, + }, + } as const; + + const original = create(cfg); + const changed = create(cfg); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + obj: { + subArr: ['s3', 's4'], + subfield: 'sf_value_upd', + }, + }); + original.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + obj: { + subArr: ['s1', 's2'], + subfield: 'sf_value', + }, + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + obj: null, + }); + changed.column.push({ + name: 'name', + type: 'text', + pk: false, + table: 'user', + obj: { + subArr: ['s3', 's4'], + subfield: 'sf_value', + }, + }); + + const res = diff.alters(original, changed, 'column'); + + expect(res).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'id', + obj: { + from: { + subArr: ['s3', 's4'], + subfield: 'sf_value_upd', + }, + to: null, + }, + $left: { + entityType: 'column', + name: 'id', + obj: { + subArr: [ + 's3', + 's4', + ], + subfield: 'sf_value_upd', + }, + pk: true, + table: 'user', + type: 'serial', + }, + $right: { + entityType: 'column', + name: 'id', + obj: null, + pk: true, + table: 'user', + type: 'serial', + }, + }, { + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + obj: { + from: { + subArr: ['s1', 's2'], + subfield: 'sf_value', + }, + to: { + subArr: ['s3', 's4'], + subfield: 'sf_value', + }, + }, + $left: { + entityType: 'column', + name: 'name', + obj: { + subArr: [ + 's1', + 's2', + ], + subfield: 'sf_value', + }, + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + obj: { + subArr: [ + 's3', + 's4', + ], + subfield: 'sf_value', + }, + pk: false, + table: 'user', + type: 'text', + }, + }]); +}); + +test('diff: update object array', () => { + const original = create({ + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + obj: [{ + subfield: 'string', + subArr: 'string[]', + }], + }, + }); + const changed = create({ + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + obj: [{ + subfield: 'string', + subArr: 'string[]', + }], + }, + }); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + obj: [{ + subArr: ['s3', 's4'], + subfield: 'sf_value', + }], + }); + original.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + obj: [{ + subArr: ['s1', 's2'], + subfield: 'sf_value', + }], + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + obj: [{ + subArr: ['s3', 's4'], + subfield: 'sf_value', + }, { + subArr: ['s1', 's2'], + subfield: 'sf_value', + }], + }); + changed.column.push({ + name: 'name', + type: 'text', + pk: false, + table: 'user', + obj: [{ + subArr: ['s1', 's2'], + subfield: 'sf_value_upd', + }], + }); + + const res = diff.alters(original, changed, 'column'); + + expect(res).toStrictEqual([{ + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'id', + obj: { + from: [{ + subArr: ['s3', 's4'], + subfield: 'sf_value', + }], + to: [{ + subArr: ['s3', 's4'], + subfield: 'sf_value', + }, { + subArr: ['s1', 's2'], + subfield: 'sf_value', + }], + }, + $left: { + entityType: 'column', + name: 'id', + obj: [ + { + subArr: [ + 's3', + 's4', + ], + subfield: 'sf_value', + }, + ], + pk: true, + table: 'user', + type: 'serial', + }, + $right: { + entityType: 'column', + name: 'id', + obj: [ + { + subArr: [ + 's3', + 's4', + ], + subfield: 'sf_value', + }, + { + subArr: [ + 's1', + 's2', + ], + subfield: 'sf_value', + }, + ], + pk: true, + table: 'user', + type: 'serial', + }, + }, { + $diffType: 'alter', + entityType: 'column', + table: 'user', + name: 'name', + type: { + from: 'varchar', + to: 'text', + }, + obj: { + from: [{ + subArr: ['s1', 's2'], + subfield: 'sf_value', + }], + to: [{ + subArr: ['s1', 's2'], + subfield: 'sf_value_upd', + }], + }, + $left: { + entityType: 'column', + name: 'name', + obj: [ + { + subArr: [ + 's1', + 's2', + ], + subfield: 'sf_value', + }, + ], + pk: false, + table: 'user', + type: 'varchar', + }, + $right: { + entityType: 'column', + name: 'name', + obj: [ + { + subArr: [ + 's1', + 's2', + ], + subfield: 'sf_value_upd', + }, + ], + pk: false, + table: 'user', + type: 'text', + }, + }]); +}); + +test('diff: insert', () => { + const cfg = { + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + }, + } as const; + + const original = create(cfg); + const changed = create(cfg); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + changed.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + }); + + const res = diff(original, changed, 'column'); + + expect(diff.all(original, changed, 'column')).toStrictEqual([{ + $diffType: 'create', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.all(original, changed)).toStrictEqual([{ + $diffType: 'create', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.drops(original, changed, 'column')).toStrictEqual([]); + expect(diff.drops(original, changed)).toStrictEqual([]); + expect(diff.alters(original, changed, 'column')).toStrictEqual([]); + expect(diff.alters(original, changed)).toStrictEqual([]); + expect(diff.creates(original, changed, 'column')).toStrictEqual([{ + $diffType: 'create', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.creates(original, changed)).toStrictEqual([{ + $diffType: 'create', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + + expect(res).toStrictEqual([{ + $diffType: 'create', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); +}); + +test('diff: delete', () => { + const cfg = { + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + }, + } as const; + + const original = create(cfg); + const changed = create(cfg); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + original.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + const res = diff(original, changed, 'column'); + + expect(diff.all(original, changed, 'column')).toStrictEqual([{ + $diffType: 'drop', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.all(original, changed)).toStrictEqual([{ + $diffType: 'drop', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.drops(original, changed, 'column')).toStrictEqual([{ + $diffType: 'drop', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.drops(original, changed)).toStrictEqual([{ + $diffType: 'drop', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); + expect(diff.alters(original, changed, 'column')).toStrictEqual([]); + expect(diff.alters(original, changed)).toStrictEqual([]); + expect(diff.creates(original, changed, 'column')).toStrictEqual([]); + expect(diff.creates(original, changed)).toStrictEqual([]); + + expect(res).toStrictEqual([{ + $diffType: 'drop', + entityType: 'column', + name: 'name', + table: 'user', + type: 'varchar', + pk: false, + }]); +}); + +test('indexes #1', () => { + const ddl1 = pg(); + const ddl2 = pg(); + + ddl1.indexes.push({ + schema: 'public', + table: 'users', + name: 'users_id_index', + columns: [{ value: 'id', isExpression: false, opclass: null, nullsFirst: false, asc: false }], + isUnique: false, + where: null, + with: '', + concurrently: false, + method: 'btree', + nameExplicit: true, + }); + + ddl1.indexes.push({ + schema: 'public', + table: 'users', + name: 'indx4', + columns: [{ value: 'id', isExpression: false, opclass: null, nullsFirst: false, asc: false }], + isUnique: false, + where: null, + with: '', + concurrently: false, + method: 'btree', + nameExplicit: true, + }); + + ddl2.indexes.push({ + schema: 'public', + table: 'users', + name: 'users_id_index', + columns: [{ value: 'id', isExpression: false, opclass: null, nullsFirst: false, asc: false }], + isUnique: false, + where: null, + with: '', + concurrently: false, + method: 'btree', + nameExplicit: false, + }); + + ddl2.indexes.push({ + schema: 'public', + table: 'users', + name: 'indx4', + columns: [{ value: 'id', isExpression: false, opclass: null, nullsFirst: false, asc: false }], + isUnique: false, + where: null, + with: '', + concurrently: false, + method: 'btree', + nameExplicit: true, + }); + + const d = diff(ddl1, ddl2, 'indexes'); + expect(d).toStrictEqual([]); +}); + +test('hasDiff', () => { + const cfg = { + column: { + type: 'string', + pk: 'boolean?', + table: 'required', + }, + } as const; + + const original = create(cfg); + const changed = create(cfg); + + original.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + original.column.push({ + name: 'name', + type: 'varchar', + pk: false, + table: 'user', + }); + + changed.column.push({ + name: 'id', + type: 'serial', + pk: true, + table: 'user', + }); + changed.column.push({ + name: 'name', + type: 'text', + pk: false, + table: 'user', + }); + + const res = diff.alters(original, changed, 'column'); + + const exampleDiff = res[0]; + expect(original.column.hasDiff(exampleDiff)).toStrictEqual(true); + expect(original.entities.hasDiff(exampleDiff)).toStrictEqual(true); + + delete exampleDiff['type']; + expect(original.column.hasDiff(exampleDiff)).toStrictEqual(false); + expect(original.entities.hasDiff(exampleDiff)).toStrictEqual(false); +}); diff --git a/drizzle-kit/tests/other/utils.test.ts b/drizzle-kit/tests/other/utils.test.ts new file mode 100644 index 0000000000..bd9b09d1c3 --- /dev/null +++ b/drizzle-kit/tests/other/utils.test.ts @@ -0,0 +1,69 @@ +import { isTime, splitExpressions, trimChar, wrapWith } from 'src/utils'; +import { expect, test } from 'vitest'; + +test('trim chars', () => { + expect.soft(trimChar("'", "'")).toBe("'"); + expect.soft(trimChar("''", "'")).toBe(''); + expect.soft(trimChar("('')", ['(', ')'])).toBe("''"); + expect.soft(trimChar(trimChar("('')", ['(', ')']), "'")).toBe(''); +}); + +test.each([ + ['lower(name)', ['lower(name)']], + ['lower(name), upper(name)', ['lower(name)', 'upper(name)']], + ['lower(name), lower(name)', ['lower(name)', 'lower(name)']], + [`((name || ','::text) || name1)`, [`((name || ','::text) || name1)`]], + ["((name || ','::text) || name1), SUBSTRING(name1 FROM 1 FOR 3)", [ + "((name || ','::text) || name1)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + [`((name || ','::text) || name1), COALESCE("name", '"default", value'::text)`, [ + `((name || ','::text) || name1)`, + `COALESCE("name", '"default", value'::text)`, + ]], + ["COALESCE(name, 'default,'' value'''::text), SUBSTRING(name1 FROM 1 FOR 3)", [ + "COALESCE(name, 'default,'' value'''::text)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + ["COALESCE(name, 'default,value'''::text), SUBSTRING(name1 FROM 1 FOR 3)", [ + "COALESCE(name, 'default,value'''::text)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + ["COALESCE(name, 'default,''value'::text), SUBSTRING(name1 FROM 1 FOR 3)", [ + "COALESCE(name, 'default,''value'::text)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + ["COALESCE(name, 'default,value'::text), SUBSTRING(name1 FROM 1 FOR 3)", [ + "COALESCE(name, 'default,value'::text)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + ["COALESCE(name, 'default, value'::text), SUBSTRING(name1 FROM 1 FOR 3)", [ + "COALESCE(name, 'default, value'::text)", + 'SUBSTRING(name1 FROM 1 FOR 3)', + ]], + [`COALESCE("name", '"default", value'::text), SUBSTRING("name1" FROM 1 FOR 3)`, [ + `COALESCE("name", '"default", value'::text)`, + `SUBSTRING("name1" FROM 1 FOR 3)`, + ]], + [`COALESCE("namewithcomma,", '"default", value'::text), SUBSTRING("name1" FROM 1 FOR 3)`, [ + `COALESCE("namewithcomma,", '"default", value'::text)`, + `SUBSTRING("name1" FROM 1 FOR 3)`, + ]], + ["((lower(first_name) || ', '::text) || lower(last_name))", [ + "((lower(first_name) || ', '::text) || lower(last_name))", + ]], +])('split expression %#: %s', (it, expected) => { + expect(splitExpressions(it)).toStrictEqual(expected); +}); + +test('wrap chars', () => { + expect.soft(wrapWith('10:20:30', "'")).toBe("'10:20:30'"); + expect.soft(wrapWith("10:20:30'", "'")).toBe("'10:20:30''"); + expect.soft(wrapWith("'10:20:30", "'")).toBe("''10:20:30'"); +}); + +test('is time', () => { + expect.soft(isTime('10:20:30')).toBe(true); + expect.soft(isTime('10:20:30+0000')).toBe(true); + expect.soft(isTime('now()')).toBe(false); +}); diff --git a/drizzle-kit/tests/validations.test.ts b/drizzle-kit/tests/other/validations.test.ts similarity index 100% rename from drizzle-kit/tests/validations.test.ts rename to drizzle-kit/tests/other/validations.test.ts diff --git a/drizzle-kit/tests/wrap-param.test.ts b/drizzle-kit/tests/other/wrap-param.test.ts similarity index 92% rename from drizzle-kit/tests/wrap-param.test.ts rename to drizzle-kit/tests/other/wrap-param.test.ts index a27d27d450..ea189e8df7 100644 --- a/drizzle-kit/tests/wrap-param.test.ts +++ b/drizzle-kit/tests/other/wrap-param.test.ts @@ -1,6 +1,6 @@ import chalk from 'chalk'; import { assert, expect, test } from 'vitest'; -import { wrapParam } from '../src/cli/validations/common'; +import { wrapParam } from '../../src/cli/validations/common'; test('wrapParam', () => { expect(wrapParam('password', 'password123', false, 'secret')).toBe(` [${chalk.green('✓')}] password: '*****'`); diff --git a/drizzle-kit/tests/pg-array.test.ts b/drizzle-kit/tests/pg-array.test.ts deleted file mode 100644 index 300355ce2e..0000000000 --- a/drizzle-kit/tests/pg-array.test.ts +++ /dev/null @@ -1,370 +0,0 @@ -import { - bigint, - boolean, - date, - integer, - json, - pgEnum, - pgTable, - serial, - text, - timestamp, - uuid, -} from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('array #1: empty array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, - }); -}); - -test('array #2: integer array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([1, 2, 3]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, - }); -}); - -test('array #3: bigint array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: bigint('values', { mode: 'bigint' }).array().default([BigInt(1), BigInt(2), BigInt(3)]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'bigint[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, - }); -}); - -test('array #4: boolean array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: boolean('values').array().default([true, false, true]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'boolean[]', - primaryKey: false, - notNull: false, - default: "'{true,false,true}'", - }, - }); -}); - -test('array #5: multi-dimensional array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().array().default([[1, 2], [3, 4]]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'integer[][]', - primaryKey: false, - notNull: false, - default: "'{{1,2},{3,4}}'", - }, - }); -}); - -test('array #6: date array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: date('values').array().default(['2024-08-06', '2024-08-07']), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'date[]', - primaryKey: false, - notNull: false, - default: '\'{"2024-08-06","2024-08-07"}\'', - }, - }); -}); - -test('array #7: timestamp array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: timestamp('values').array().default([new Date('2024-08-06'), new Date('2024-08-07')]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'timestamp[]', - primaryKey: false, - notNull: false, - default: '\'{"2024-08-06 00:00:00.000","2024-08-07 00:00:00.000"}\'', - }, - }); -}); - -test('array #8: json array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: json('values').array().default([{ a: 1 }, { b: 2 }]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'json[]', - primaryKey: false, - notNull: false, - default: '\'{"{\\"a\\":1}","{\\"b\\":2}"}\'', - }, - }); -}); - -test('array #9: text array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: text('values').array().default(['abc', 'def']), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'text[]', - primaryKey: false, - notNull: false, - default: '\'{"abc","def"}\'', - }, - }); -}); - -test('array #10: uuid array default', async (t) => { - const from = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: uuid('values').array().default([ - 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', - 'b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11', - ]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'uuid[]', - primaryKey: false, - notNull: false, - default: '\'{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11","b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11"}\'', - }, - }); -}); - -test('array #11: enum array default', async (t) => { - const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); - - const from = { - enum: testEnum, - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - enum: testEnum, - test: pgTable('test', { - id: serial('id').primaryKey(), - values: testEnum('values').array().default(['a', 'b', 'c']), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'test_enum[]', - primaryKey: false, - notNull: false, - default: '\'{"a","b","c"}\'', - typeSchema: 'public', - }, - }); -}); - -test('array #12: enum empty array default', async (t) => { - const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); - - const from = { - enum: testEnum, - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const to = { - enum: testEnum, - test: pgTable('test', { - id: serial('id').primaryKey(), - values: testEnum('values').array().default([]), - }), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { - name: 'values', - type: 'test_enum[]', - primaryKey: false, - notNull: false, - default: "'{}'", - typeSchema: 'public', - }, - }); -}); diff --git a/drizzle-kit/tests/pg-checks.test.ts b/drizzle-kit/tests/pg-checks.test.ts deleted file mode 100644 index 8033aacefb..0000000000 --- a/drizzle-kit/tests/pg-checks.test.ts +++ /dev/null @@ -1,282 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { check, integer, pgTable, serial, varchar } from 'drizzle-orm/pg-core'; -import { JsonCreateTableStatement } from 'src/jsonStatements'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('create table with check', async (t) => { - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [ - { - name: 'id', - type: 'serial', - notNull: true, - primaryKey: true, - }, - { - name: 'age', - type: 'integer', - notNull: false, - primaryKey: false, - }, - ], - compositePKs: [], - checkConstraints: ['some_check_name;"users"."age" > 21'], - compositePkName: '', - uniqueConstraints: [], - isRLSEnabled: false, - policies: [], - } as JsonCreateTableStatement); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" serial PRIMARY KEY NOT NULL, -\t"age" integer, -\tCONSTRAINT "some_check_name" CHECK ("users"."age" > 21) -);\n`); -}); - -test('add check contraint to existing table', async (t) => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }), - }; - - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemas(from, to, []); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_check_constraint', - tableName: 'users', - schema: '', - data: 'some_check_name;"users"."age" > 21', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("users"."age" > 21);`, - ); -}); - -test('drop check contraint in existing table', async (t) => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }), - }; - - const { sqlStatements, statements } = await diffTestSchemas(from, to, []); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'delete_check_constraint', - tableName: 'users', - schema: '', - constraintName: 'some_check_name', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, - ); -}); - -test('rename check constraint', async (t) => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('new_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemas(from, to, []); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - data: 'new_check_name;"users"."age" > 21', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 21);`, - ); -}); - -test('alter check constraint', async (t) => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - }, (table) => ({ - checkConstraint: check('new_check_name', sql`${table.age} > 10`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemas(from, to, []); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - data: 'new_check_name;"users"."age" > 10', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 10);`, - ); -}); - -test('alter multiple check constraints', async (t) => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - name: varchar('name'), - }, (table) => ({ - checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), - })), - }; - - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - name: varchar('name'), - }, (table) => ({ - checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemas(from, to, []); - expect(statements.length).toBe(4); - expect(statements[0]).toStrictEqual({ - constraintName: 'some_check_name_1', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[1]).toStrictEqual({ - constraintName: 'some_check_name_2', - schema: '', - tableName: 'users', - type: 'delete_check_constraint', - }); - expect(statements[2]).toStrictEqual({ - data: 'some_check_name_3;"users"."age" > 21', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - expect(statements[3]).toStrictEqual({ - data: 'some_check_name_4;"users"."name" != \'Alex\'', - schema: '', - tableName: 'users', - type: 'create_check_constraint', - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_1";`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_2";`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_3" CHECK ("users"."age" > 21);`, - ); - expect(sqlStatements[3]).toBe( - `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_4" CHECK ("users"."name" != \'Alex\');`, - ); -}); - -test('create checks with same names', async (t) => { - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - age: integer('age'), - name: varchar('name'), - }, (table) => ({ - checkConstraint1: check('some_check_name', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), - })), - }; - - await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); -}); diff --git a/drizzle-kit/tests/pg-columns.test.ts b/drizzle-kit/tests/pg-columns.test.ts deleted file mode 100644 index ddd744a81a..0000000000 --- a/drizzle-kit/tests/pg-columns.test.ts +++ /dev/null @@ -1,484 +0,0 @@ -import { integer, pgTable, primaryKey, serial, text, uuid, varchar } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('add columns #1', async (t) => { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, - }); -}); - -test('add columns #2', async (t) => { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - email: text('email'), - }), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { name: 'email', type: 'text', primaryKey: false, notNull: false }, - }); -}); - -test('alter column change name #1', async (t) => { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name1'), - }), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, [ - 'public.users.name->public.users.name1', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - oldColumnName: 'name', - newColumnName: 'name1', - }); -}); - -test('alter column change name #2', async (t) => { - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name1'), - email: text('email'), - }), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, [ - 'public.users.name->public.users.name1', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - oldColumnName: 'name', - newColumnName: 'name1', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { - name: 'email', - notNull: false, - primaryKey: false, - type: 'text', - }, - }); -}); - -test('alter table add composite pk', async (t) => { - const schema1 = { - table: pgTable('table', { - id1: integer('id1'), - id2: integer('id2'), - }), - }; - - const schema2 = { - table: pgTable( - 'table', - { - id1: integer('id1'), - id2: integer('id2'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2] }), - }; - }, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_composite_pk', - tableName: 'table', - data: 'id1,id2;table_id1_id2_pk', - schema: '', - constraintName: 'table_id1_id2_pk', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - 'ALTER TABLE "table" ADD CONSTRAINT "table_id1_id2_pk" PRIMARY KEY("id1","id2");', - ); -}); - -test('rename table rename column #1', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id'), - }), - }; - - const schema2 = { - users: pgTable('users1', { - id: integer('id1'), - }), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, [ - 'public.users->public.users1', - 'public.users1.id->public.users1.id1', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users', - tableNameTo: 'users1', - fromSchema: '', - toSchema: '', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_rename_column', - oldColumnName: 'id', - newColumnName: 'id1', - schema: '', - tableName: 'users1', - }); -}); - -test('with composite pks #1', async (t) => { - const schema1 = { - users: pgTable( - 'users', - { - id1: integer('id1'), - id2: integer('id2'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), - }; - }, - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id1: integer('id1'), - id2: integer('id2'), - text: text('text'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { - name: 'text', - notNull: false, - primaryKey: false, - type: 'text', - }, - }); -}); - -test('with composite pks #2', async (t) => { - const schema1 = { - users: pgTable('users', { - id1: integer('id1'), - id2: integer('id2'), - }), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id1: integer('id1'), - id2: integer('id2'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_composite_pk', - tableName: 'users', - schema: '', - constraintName: 'compositePK', - data: 'id1,id2;compositePK', - }); -}); - -test('with composite pks #3', async (t) => { - const schema1 = { - users: pgTable( - 'users', - { - id1: integer('id1'), - id2: integer('id2'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), - }; - }, - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id1: integer('id1'), - id3: integer('id3'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' }), - }; - }, - ), - }; - - // TODO: remove redundand drop/create create constraint - const { statements } = await diffTestSchemas(schema1, schema2, [ - 'public.users.id2->public.users.id3', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - newColumnName: 'id3', - oldColumnName: 'id2', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_composite_pk', - tableName: 'users', - schema: '', - new: 'id1,id3;compositePK', - old: 'id1,id2;compositePK', - newConstraintName: 'compositePK', - oldConstraintName: 'compositePK', - }); -}); - -test('add multiple constraints #1', async (t) => { - const t1 = pgTable('t1', { - id: uuid('id').primaryKey().defaultRandom(), - }); - - const t2 = pgTable('t2', { - id: uuid('id').primaryKey().defaultRandom(), - }); - - const t3 = pgTable('t3', { - id: uuid('id').primaryKey().defaultRandom(), - }); - - const schema1 = { - t1, - t2, - t3, - ref1: pgTable('ref1', { - id1: uuid('id1').references(() => t1.id), - id2: uuid('id2').references(() => t2.id), - id3: uuid('id3').references(() => t3.id), - }), - }; - - const schema2 = { - t1, - t2, - t3, - ref1: pgTable('ref1', { - id1: uuid('id1').references(() => t1.id, { onDelete: 'cascade' }), - id2: uuid('id2').references(() => t2.id, { onDelete: 'set null' }), - id3: uuid('id3').references(() => t3.id, { onDelete: 'cascade' }), - }), - }; - - // TODO: remove redundand drop/create create constraint - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(6); -}); - -test('add multiple constraints #2', async (t) => { - const t1 = pgTable('t1', { - id1: uuid('id1').primaryKey().defaultRandom(), - id2: uuid('id2').primaryKey().defaultRandom(), - id3: uuid('id3').primaryKey().defaultRandom(), - }); - - const schema1 = { - t1, - ref1: pgTable('ref1', { - id1: uuid('id1').references(() => t1.id1), - id2: uuid('id2').references(() => t1.id2), - id3: uuid('id3').references(() => t1.id3), - }), - }; - - const schema2 = { - t1, - ref1: pgTable('ref1', { - id1: uuid('id1').references(() => t1.id1, { onDelete: 'cascade' }), - id2: uuid('id2').references(() => t1.id2, { onDelete: 'set null' }), - id3: uuid('id3').references(() => t1.id3, { onDelete: 'cascade' }), - }), - }; - - // TODO: remove redundand drop/create create constraint - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(6); -}); - -test('add multiple constraints #3', async (t) => { - const t1 = pgTable('t1', { - id1: uuid('id1').primaryKey().defaultRandom(), - id2: uuid('id2').primaryKey().defaultRandom(), - id3: uuid('id3').primaryKey().defaultRandom(), - }); - - const schema1 = { - t1, - ref1: pgTable('ref1', { - id: uuid('id').references(() => t1.id1), - }), - ref2: pgTable('ref2', { - id: uuid('id').references(() => t1.id2), - }), - ref3: pgTable('ref3', { - id: uuid('id').references(() => t1.id3), - }), - }; - - const schema2 = { - t1, - ref1: pgTable('ref1', { - id: uuid('id').references(() => t1.id1, { onDelete: 'cascade' }), - }), - ref2: pgTable('ref2', { - id: uuid('id').references(() => t1.id2, { onDelete: 'set null' }), - }), - ref3: pgTable('ref3', { - id: uuid('id').references(() => t1.id3, { onDelete: 'cascade' }), - }), - }; - - // TODO: remove redundand drop/create create constraint - const { statements } = await diffTestSchemas(schema1, schema2, []); - - expect(statements.length).toBe(6); -}); - -test('varchar and text default values escape single quotes', async (t) => { - const schema1 = { - table: pgTable('table', { - id: serial('id').primaryKey(), - }), - }; - - const schem2 = { - table: pgTable('table', { - id: serial('id').primaryKey(), - text: text('text').default("escape's quotes"), - varchar: varchar('varchar').default("escape's quotes"), - }), - }; - - const { sqlStatements } = await diffTestSchemas(schema1, schem2, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toStrictEqual( - 'ALTER TABLE "table" ADD COLUMN "text" text DEFAULT \'escape\'\'s quotes\';', - ); - expect(sqlStatements[1]).toStrictEqual( - 'ALTER TABLE "table" ADD COLUMN "varchar" varchar DEFAULT \'escape\'\'s quotes\';', - ); -}); diff --git a/drizzle-kit/tests/pg-enums.test.ts b/drizzle-kit/tests/pg-enums.test.ts deleted file mode 100644 index 967df2e3e4..0000000000 --- a/drizzle-kit/tests/pg-enums.test.ts +++ /dev/null @@ -1,2505 +0,0 @@ -import { integer, pgEnum, pgSchema, pgTable, serial, text, varchar } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('enums #1', async () => { - const to = { - enum: pgEnum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value');`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'enum', - schema: 'public', - type: 'create_type_enum', - values: ['value'], - }); -}); - -test('enums #2', async () => { - const folder = pgSchema('folder'); - const to = { - enum: folder.enum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TYPE "folder"."enum" AS ENUM('value');`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'enum', - schema: 'folder', - type: 'create_type_enum', - values: ['value'], - }); -}); - -test('enums #3', async () => { - const from = { - enum: pgEnum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_type_enum', - name: 'enum', - schema: 'public', - }); -}); - -test('enums #4', async () => { - const folder = pgSchema('folder'); - - const from = { - enum: folder.enum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP TYPE "folder"."enum";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_type_enum', - name: 'enum', - schema: 'folder', - }); -}); - -test('enums #5', async () => { - const folder1 = pgSchema('folder1'); - const folder2 = pgSchema('folder2'); - - const from = { - folder1, - enum: folder1.enum('enum', ['value']), - }; - - const to = { - folder2, - enum: folder2.enum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['folder1->folder2']); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER SCHEMA "folder1" RENAME TO "folder2";\n`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'folder1', - to: 'folder2', - }); -}); - -test('enums #6', async () => { - const folder1 = pgSchema('folder1'); - const folder2 = pgSchema('folder2'); - - const from = { - folder1, - folder2, - enum: folder1.enum('enum', ['value']), - }; - - const to = { - folder1, - folder2, - enum: folder2.enum('enum', ['value']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'folder1.enum->folder2.enum', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "folder2";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); -}); - -test('enums #7', async () => { - const from = { - enum: pgEnum('enum', ['value1']), - }; - - const to = { - enum: pgEnum('enum', ['value1', 'value2']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum', - schema: 'public', - value: 'value2', - before: '', - }); -}); - -test('enums #8', async () => { - const from = { - enum: pgEnum('enum', ['value1']), - }; - - const to = { - enum: pgEnum('enum', ['value1', 'value2', 'value3']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); - expect(sqlStatements[1]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value3';`); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum', - schema: 'public', - value: 'value2', - before: '', - }); - - expect(statements[1]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum', - schema: 'public', - value: 'value3', - before: '', - }); -}); - -test('enums #9', async () => { - const from = { - enum: pgEnum('enum', ['value1', 'value3']), - }; - - const to = { - enum: pgEnum('enum', ['value1', 'value2', 'value3']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2' BEFORE 'value3';`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum', - schema: 'public', - value: 'value2', - before: 'value3', - }); -}); - -test('enums #10', async () => { - const schema = pgSchema('folder'); - const from = { - enum: schema.enum('enum', ['value1']), - }; - - const to = { - enum: schema.enum('enum', ['value1', 'value2']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "folder"."enum" ADD VALUE 'value2';`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum', - schema: 'folder', - value: 'value2', - before: '', - }); -}); - -test('enums #11', async () => { - const schema1 = pgSchema('folder1'); - const from = { - enum: schema1.enum('enum', ['value1']), - }; - - const to = { - enum: pgEnum('enum', ['value1']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'folder1.enum->public.enum', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "public";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum', - schemaFrom: 'folder1', - schemaTo: 'public', - }); -}); - -test('enums #12', async () => { - const schema1 = pgSchema('folder1'); - const from = { - enum: pgEnum('enum', ['value1']), - }; - - const to = { - enum: schema1.enum('enum', ['value1']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.enum->folder1.enum', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" SET SCHEMA "folder1";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum', - schemaFrom: 'public', - schemaTo: 'folder1', - }); -}); - -test('enums #13', async () => { - const from = { - enum: pgEnum('enum1', ['value1']), - }; - - const to = { - enum: pgEnum('enum2', ['value1']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.enum1->public.enum2', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_type_enum', - nameFrom: 'enum1', - nameTo: 'enum2', - schema: 'public', - }); -}); - -test('enums #14', async () => { - const folder1 = pgSchema('folder1'); - const folder2 = pgSchema('folder2'); - const from = { - enum: folder1.enum('enum1', ['value1']), - }; - - const to = { - enum: folder2.enum('enum2', ['value1']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'folder1.enum1->folder2.enum2', - ]); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); - expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum1', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_type_enum', - nameFrom: 'enum1', - nameTo: 'enum2', - schema: 'folder2', - }); -}); - -test('enums #15', async () => { - const folder1 = pgSchema('folder1'); - const folder2 = pgSchema('folder2'); - const from = { - enum: folder1.enum('enum1', ['value1', 'value4']), - }; - - const to = { - enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'folder1.enum1->folder2.enum2', - ]); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); - expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); - expect(sqlStatements[2]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value2' BEFORE 'value4';`); - expect(sqlStatements[3]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value3' BEFORE 'value4';`); - - expect(statements.length).toBe(4); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum1', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_type_enum', - nameFrom: 'enum1', - nameTo: 'enum2', - schema: 'folder2', - }); - expect(statements[2]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum2', - schema: 'folder2', - value: 'value2', - before: 'value4', - }); - expect(statements[3]).toStrictEqual({ - type: 'alter_type_add_value', - name: 'enum2', - schema: 'folder2', - value: 'value3', - before: 'value4', - }); -}); - -test('enums #16', async () => { - const enum1 = pgEnum('enum1', ['value1']); - const enum2 = pgEnum('enum2', ['value1']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const to = { - enum2, - table: pgTable('table', { - column: enum2('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.enum1->public.enum2', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_type_enum', - nameFrom: 'enum1', - nameTo: 'enum2', - schema: 'public', - }); -}); - -test('enums #17', async () => { - const schema = pgSchema('schema'); - const enum1 = pgEnum('enum1', ['value1']); - const enum2 = schema.enum('enum1', ['value1']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const to = { - enum2, - table: pgTable('table', { - column: enum2('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.enum1->schema.enum1', - ]); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" SET SCHEMA "schema";`); - - expect(sqlStatements.length).toBe(1); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum1', - schemaFrom: 'public', - schemaTo: 'schema', - }); -}); - -test('enums #18', async () => { - const schema1 = pgSchema('schema1'); - const schema2 = pgSchema('schema2'); - - const enum1 = schema1.enum('enum1', ['value1']); - const enum2 = schema2.enum('enum2', ['value1']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const to = { - enum2, - table: pgTable('table', { - column: enum2('column'), - }), - }; - - // change name and schema of the enum, no table changes - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'schema1.enum1->schema2.enum2', - ]); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TYPE "schema1"."enum1" SET SCHEMA "schema2";`); - expect(sqlStatements[1]).toBe(`ALTER TYPE "schema2"."enum1" RENAME TO "enum2";`); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'move_type_enum', - name: 'enum1', - schemaFrom: 'schema1', - schemaTo: 'schema2', - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_type_enum', - nameFrom: 'enum1', - nameTo: 'enum2', - schema: 'schema2', - }); -}); - -test('enums #19', async () => { - const myEnum = pgEnum('my_enum', ["escape's quotes"]); - - const from = {}; - - const to = { myEnum }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toStrictEqual( - 'CREATE TYPE "public"."my_enum" AS ENUM(\'escape\'\'s quotes\');', - ); -}); - -test('enums #20', async () => { - const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); - - const from = { - myEnum, - table: pgTable('table', { - id: serial('id').primaryKey(), - }), - }; - - const to = { - myEnum, - table: pgTable('table', { - id: serial('id').primaryKey(), - col1: myEnum('col1'), - col2: integer('col2'), - }), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum";', - 'ALTER TABLE "table" ADD COLUMN "col2" integer;', - ]); -}); - -test('enums #21', async () => { - const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); - - const from = { - myEnum, - table: pgTable('table', { - id: serial('id').primaryKey(), - }), - }; - - const to = { - myEnum, - table: pgTable('table', { - id: serial('id').primaryKey(), - col1: myEnum('col1').array(), - col2: integer('col2').array(), - }), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum"[];', - 'ALTER TABLE "table" ADD COLUMN "col2" integer[];', - ]); -}); - -test('drop enum value', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - }; - - const enum2 = pgEnum('enum', ['value1', 'value3']); - const to = { - enum2, - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[1]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [], - deletedValues: [ - 'value2', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -test('drop enum value. enum is columns data type', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const schema = pgSchema('new_schema'); - - const from = { - schema, - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - table2: schema.table('table', { - column: enum1('column'), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3']); - const to = { - schema, - enum2, - table: pgTable('table', { - column: enum1('column'), - }), - table2: schema.table('table', { - column: enum1('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: undefined, - columnType: 'enum', - }, - { - column: 'column', - tableSchema: 'new_schema', - table: 'table', - default: undefined, - columnType: 'enum', - }, - ], - deletedValues: [ - 'value2', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -test('shuffle enum values', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const schema = pgSchema('new_schema'); - - const from = { - schema, - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - table2: schema.table('table', { - column: enum1('column'), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - schema, - enum2, - table: pgTable('table', { - column: enum2('column'), - }), - table2: schema.table('table', { - column: enum2('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: undefined, - columnType: 'enum', - }, - { - column: 'column', - tableSchema: 'new_schema', - table: 'table', - columnType: 'enum', - default: undefined, - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -test('enums as ts enum', async () => { - enum Test { - value = 'value', - } - - const to = { - enum: pgEnum('enum', Test), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value');`); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'enum', - schema: 'public', - type: 'create_type_enum', - values: ['value'], - }); -}); - -// + -test('column is enum type with default value. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').default('value2'), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: "'value2'", - columnType: 'enum', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum type with default value. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array().default(['value2']), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').array().default(['value3']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[];`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: `'{"value3"}'`, - columnType: 'enum[]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum with custom size type with default value. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3).default(['value2']), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').array(3).default(['value2']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"public"."enum"[3];`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: `'{"value2"}'`, - columnType: 'enum[3]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum with custom size type. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').array(3), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[2]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: undefined, - columnType: 'enum[3]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array of enum with multiple dimenions with custom sizes type. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3).array(2), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').array(3).array(2), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[2]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3][2] USING "column"::"public"."enum"[3][2];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: undefined, - columnType: 'enum[3][2]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array of enum with multiple dimenions type with custom size with default value. shuffle enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3).array(2).default([['value2']]), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').array(3).array(2).default([['value2']]), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{{"value2"}}'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{{"value2"}}'::"public"."enum"[3][2];`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3][2] USING "column"::"public"."enum"[3][2];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: `'{{\"value2\"}}'`, - columnType: 'enum[3][2]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is enum type with default value. custom schema. shuffle enum', async () => { - const schema = pgSchema('new_schema'); - - const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); - const from = { - schema, - enum1, - table: pgTable('table', { - column: enum1('column').default('value2'), - }), - }; - - const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); - const to = { - schema, - enum2, - table: pgTable('table', { - column: enum2('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"new_schema"."enum";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum" USING "column"::"new_schema"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: "'value2'", - columnType: 'enum', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'new_schema', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum type with default value. custom schema. shuffle enum', async () => { - const schema = pgSchema('new_schema'); - - const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: schema.table('table', { - column: enum1('column').array().default(['value2']), - }), - }; - - const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: schema.table('table', { - column: enum2('column').array().default(['value2']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`, - ); - expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"new_schema"."enum"[];`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[] USING "column"::"new_schema"."enum"[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: 'new_schema', - table: 'table', - default: `'{"value2"}'`, - columnType: 'enum[]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'new_schema', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum type with custom size with default value. custom schema. shuffle enum', async () => { - const schema = pgSchema('new_schema'); - - const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: schema.table('table', { - column: enum1('column').array(3).default(['value2']), - }), - }; - - const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: schema.table('table', { - column: enum2('column').array(3).default(['value2']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`, - ); - expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"new_schema"."enum"[3];`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[3] USING "column"::"new_schema"."enum"[3];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: 'new_schema', - table: 'table', - default: `'{"value2"}'`, - columnType: 'enum[3]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'new_schema', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is array enum type with custom size. custom schema. shuffle enum', async () => { - const schema = pgSchema('new_schema'); - - const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: schema.table('table', { - column: enum1('column').array(3), - }), - }; - - const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: schema.table('table', { - column: enum2('column').array(3), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`DROP TYPE "new_schema"."enum";`); - expect(sqlStatements[2]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[3] USING "column"::"new_schema"."enum"[3];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: 'new_schema', - table: 'table', - default: undefined, - columnType: 'enum[3]', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'new_schema', - type: 'alter_type_drop_value', - }); -}); - -// + -test('column is enum type without default value. add default to column', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').default('value3'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3';`); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'enum', - newDefaultValue: "'value3'", - schema: '', - tableName: 'table', - type: 'alter_table_alter_column_set_default', - }); -}); - -// + -test('change data type from standart type to enum', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column'), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum', - }, - oldDataType: { - isEnum: false, - name: 'varchar', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from standart type to enum. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').default('value2'), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column').default('value3'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3'::"public"."enum";`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value3'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum', - }, - oldDataType: { - isEnum: false, - name: 'varchar', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from array standart type to array enum. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').array().default(['value2']), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column').array().default(['value3']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"value3"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum[]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from array standart type to array enum. column without default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').array(), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column').array(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum[]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from array standart type with custom size to array enum with custom size. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').array(3).default(['value2']), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3).default(['value3']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[3];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"value3"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum[3]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[3]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from array standart type with custom size to array enum with custom size. column without default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').array(2), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: enum1('column').array(2), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[2] USING "column"::"public"."enum"[2];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum[2]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[2]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from enum type to standart type', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar', - }, - oldDataType: { - isEnum: true, - name: 'enum', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from enum type to standart type. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').default('value3'), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, - ); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value2'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar', - }, - oldDataType: { - isEnum: true, - name: 'enum', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from array enum type to array standart type', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column').array(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar[]', - }, - oldDataType: { - isEnum: true, - name: 'enum[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from array enum with custom size type to array standart type with custom size', async () => { - const enum1 = pgEnum('enum', ['value1', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(2), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column').array(2), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[2];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar[2]', - }, - oldDataType: { - isEnum: true, - name: 'enum[2]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// -test('change data type from array enum type to array standart type. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array().default(['value2']), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column').array().default(['value2']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"value2"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar[]', - }, - oldDataType: { - isEnum: true, - name: 'enum[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from array enum type with custom size to array standart type with custom size. column has default', async () => { - const enum1 = pgEnum('enum', ['value1', 'value2']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').array(3).default(['value2']), - }), - }; - - const to = { - enum1, - table: pgTable('table', { - column: varchar('column').array(3).default(['value2']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[3];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"value2"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'varchar[3]', - }, - oldDataType: { - isEnum: true, - name: 'enum[3]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type', async () => { - const from = { - table: pgTable('table', { - column: varchar('column'), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text', - }, - oldDataType: { - isEnum: false, - name: 'varchar', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type. column has default', async () => { - const from = { - table: pgTable('table', { - column: varchar('column').default('value3'), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value2'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text', - }, - oldDataType: { - isEnum: false, - name: 'varchar', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type. columns are arrays', async () => { - const from = { - table: pgTable('table', { - column: varchar('column').array(), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column').array(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text[]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type. columns are arrays with custom sizes', async () => { - const from = { - table: pgTable('table', { - column: varchar('column').array(2), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column').array(2), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[2];`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text[2]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[2]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type. columns are arrays. column has default', async () => { - const from = { - table: pgTable('table', { - column: varchar('column').array().default(['hello']), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column').array().default(['hello']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"hello"}';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"hello"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text[]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from standart type to standart type. columns are arrays with custom sizes.column has default', async () => { - const from = { - table: pgTable('table', { - column: varchar('column').array(2).default(['hello']), - }), - }; - - const to = { - table: pgTable('table', { - column: text('column').array(2).default(['hello']), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[2];`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"hello"}';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: `'{"hello"}'`, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: false, - name: 'text[2]', - }, - oldDataType: { - isEnum: false, - name: 'varchar[2]', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: undefined, - }); -}); - -// + -test('change data type from one enum to other', async () => { - const enum1 = pgEnum('enum1', ['value1', 'value3']); - const enum2 = pgEnum('enum2', ['value1', 'value3']); - - const from = { - enum1, - enum2, - table: pgTable('table', { - column: enum1('column'), - }), - }; - - const to = { - enum1, - enum2, - table: pgTable('table', { - column: enum2('column'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: undefined, - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum2', - }, - oldDataType: { - isEnum: true, - name: 'enum1', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from one enum to other. column has default', async () => { - const enum1 = pgEnum('enum1', ['value1', 'value3']); - const enum2 = pgEnum('enum2', ['value1', 'value3']); - - const from = { - enum1, - enum2, - table: pgTable('table', { - column: enum1('column').default('value3'), - }), - }; - - const to = { - enum1, - enum2, - table: pgTable('table', { - column: enum2('column').default('value3'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value3'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum2', - }, - oldDataType: { - isEnum: true, - name: 'enum1', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -// + -test('change data type from one enum to other. changed defaults', async () => { - const enum1 = pgEnum('enum1', ['value1', 'value3']); - const enum2 = pgEnum('enum2', ['value1', 'value3']); - - const from = { - enum1, - enum2, - table: pgTable('table', { - column: enum1('column').default('value3'), - }), - }; - - const to = { - enum1, - enum2, - table: pgTable('table', { - column: enum2('column').default('value1'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value1';`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value1'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum2', - }, - oldDataType: { - isEnum: true, - name: 'enum1', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); - -test('check filtering json statements. here we have recreate enum + set new type + alter default', async () => { - const enum1 = pgEnum('enum1', ['value1', 'value3']); - const from = { - enum1, - table: pgTable('table', { - column: varchar('column').default('value3'), - }), - }; - - const enum2 = pgEnum('enum1', ['value3', 'value1', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum1";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum1" AS ENUM('value3', 'value1', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum1";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum1" USING "column"::"public"."enum1";`, - ); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - columnType: 'enum1', - default: "'value2'", - table: 'table', - tableSchema: '', - }, - ], - deletedValues: [ - 'value3', - ], - enumSchema: 'public', - name: 'enum1', - newValues: [ - 'value3', - 'value1', - 'value2', - ], - type: 'alter_type_drop_value', - }); - expect(statements[1]).toStrictEqual({ - columnAutoIncrement: undefined, - columnDefault: "'value2'", - columnName: 'column', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: { - isEnum: true, - name: 'enum1', - }, - oldDataType: { - isEnum: false, - name: 'varchar', - }, - schema: '', - tableName: 'table', - type: 'pg_alter_table_alter_column_set_type', - typeSchema: 'public', - }); -}); diff --git a/drizzle-kit/tests/pg-identity.test.ts b/drizzle-kit/tests/pg-identity.test.ts deleted file mode 100644 index efb481da30..0000000000 --- a/drizzle-kit/tests/pg-identity.test.ts +++ /dev/null @@ -1,447 +0,0 @@ -import { integer, pgSequence, pgTable } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -// same table - no diff -// 2. identity always/by default - no params + -// 3. identity always/by default - with a few params + -// 4. identity always/by default - with all params + - -// diff table with create statement -// 2. identity always/by default - no params + -// 3. identity always/by default - with a few params + -// 4. identity always/by default - with all params + - -// diff for drop statement -// 2. identity always/by default - no params, with params + - -// diff for alters -// 2. identity always/by default - no params -> add param + -// 3. identity always/by default - with a few params - remove/add/change params + -// 4. identity always/by default - with all params - remove/add/change params + - -test('create table: identity always/by default - no params', async () => { - const from = {}; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - compositePkName: '', - schema: '', - policies: [], - isRLSEnabled: false, - tableName: 'users', - type: 'create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', - ]); -}); - -test('create table: identity always/by default - few params', async () => { - const from = {}; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'custom_seq;byDefault;1;2147483647;4;1;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - compositePkName: '', - policies: [], - schema: '', - isRLSEnabled: false, - tableName: 'users', - type: 'create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', - ]); -}); - -test('create table: identity always/by default - all params', async () => { - const from = {}; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - minValue: 3, - maxValue: 1000, - cache: 200, - cycle: false, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'custom_seq;byDefault;3;1000;4;3;200;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - compositePkName: '', - policies: [], - isRLSEnabled: false, - schema: '', - tableName: 'users', - type: 'create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 3 MAXVALUE 1000 START WITH 3 CACHE 200)\n);\n', - ]); -}); - -test('no diff: identity always/by default - no params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('no diff: identity always/by default - few params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - }), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('no diff: identity always/by default - all params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - minValue: 3, - maxValue: 1000, - cache: 200, - cycle: false, - }), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_seq', - increment: 4, - minValue: 3, - maxValue: 1000, - cache: 200, - cycle: false, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('drop identity from a column - no params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - ]); -}); - -test('drop identity from a column - few params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - increment: 3, - }), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - ]); -}); - -test('drop identity from a column - all params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - increment: 3, - cache: 100, - cycle: true, - }), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - ]); -}); - -test('alter identity from a column - no params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', - ]); -}); - -test('alter identity from a column - few params', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - cache: 10, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', - ]); -}); - -test('alter identity from a column - by default to always', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedAlwaysAsIdentity({ - startWith: 100, - cache: 10, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;always;1;2147483647;1;100;10;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', - ]); -}); - -test('alter identity from a column - always to by default', async () => { - const from = { - users: pgTable('users', { - id: integer('id').generatedAlwaysAsIdentity(), - }), - }; - - const to = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - cache: 10, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', - oldIdentity: 'users_id_seq;always;1;2147483647;1;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', - ]); -}); diff --git a/drizzle-kit/tests/pg-schemas.test.ts b/drizzle-kit/tests/pg-schemas.test.ts deleted file mode 100644 index d8c724e270..0000000000 --- a/drizzle-kit/tests/pg-schemas.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { pgSchema } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('add schema #1', async () => { - const to = { - devSchema: pgSchema('dev'), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'dev', - }); -}); - -test('add schema #2', async () => { - const from = { - devSchema: pgSchema('dev'), - }; - const to = { - devSchema: pgSchema('dev'), - devSchema2: pgSchema('dev2'), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'dev2', - }); -}); - -test('delete schema #1', async () => { - const from = { - devSchema: pgSchema('dev'), - }; - - const { statements } = await diffTestSchemas(from, {}, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_schema', - name: 'dev', - }); -}); - -test('delete schema #2', async () => { - const from = { - devSchema: pgSchema('dev'), - devSchema2: pgSchema('dev2'), - }; - const to = { - devSchema: pgSchema('dev'), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_schema', - name: 'dev2', - }); -}); - -test('rename schema #1', async () => { - const from = { - devSchema: pgSchema('dev'), - }; - const to = { - devSchema2: pgSchema('dev2'), - }; - - const { statements } = await diffTestSchemas(from, to, ['dev->dev2']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'dev', - to: 'dev2', - }); -}); - -test('rename schema #2', async () => { - const from = { - devSchema: pgSchema('dev'), - devSchema1: pgSchema('dev1'), - }; - const to = { - devSchema: pgSchema('dev'), - devSchema2: pgSchema('dev2'), - }; - - const { statements } = await diffTestSchemas(from, to, ['dev1->dev2']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'dev1', - to: 'dev2', - }); -}); diff --git a/drizzle-kit/tests/pg-sequences.test.ts b/drizzle-kit/tests/pg-sequences.test.ts deleted file mode 100644 index 05ca5b1bda..0000000000 --- a/drizzle-kit/tests/pg-sequences.test.ts +++ /dev/null @@ -1,298 +0,0 @@ -import { pgSchema, pgSequence } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('create sequence', async () => { - const from = {}; - const to = { - seq: pgSequence('name', { startWith: 100 }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - name: 'name', - schema: 'public', - type: 'create_sequence', - values: { - cache: '1', - cycle: false, - increment: '1', - maxValue: '9223372036854775807', - minValue: '1', - startWith: '100', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', - ]); -}); - -test('create sequence: all fields', async () => { - const from = {}; - const to = { - seq: pgSequence('name', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - type: 'create_sequence', - name: 'name', - schema: 'public', - values: { - startWith: '100', - maxValue: '10000', - minValue: '100', - cycle: true, - cache: '10', - increment: '2', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE SEQUENCE "public"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', - ]); -}); - -test('create sequence: custom schema', async () => { - const customSchema = pgSchema('custom'); - const from = {}; - const to = { - seq: customSchema.sequence('name', { startWith: 100 }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - name: 'name', - schema: 'custom', - type: 'create_sequence', - values: { - cache: '1', - cycle: false, - increment: '1', - maxValue: '9223372036854775807', - minValue: '1', - startWith: '100', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE SEQUENCE "custom"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', - ]); -}); - -test('create sequence: custom schema + all fields', async () => { - const customSchema = pgSchema('custom'); - const from = {}; - const to = { - seq: customSchema.sequence('name', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - type: 'create_sequence', - name: 'name', - schema: 'custom', - values: { - startWith: '100', - maxValue: '10000', - minValue: '100', - cycle: true, - cache: '10', - increment: '2', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE SEQUENCE "custom"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', - ]); -}); - -test('drop sequence', async () => { - const from = { seq: pgSequence('name', { startWith: 100 }) }; - const to = {}; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - type: 'drop_sequence', - name: 'name', - schema: 'public', - }, - ]); - expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "public"."name";']); -}); - -test('drop sequence: custom schema', async () => { - const customSchema = pgSchema('custom'); - const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; - const to = {}; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - type: 'drop_sequence', - name: 'name', - schema: 'custom', - }, - ]); - expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "custom"."name";']); -}); - -// rename sequence - -test('rename sequence', async () => { - const from = { seq: pgSequence('name', { startWith: 100 }) }; - const to = { seq: pgSequence('name_new', { startWith: 100 }) }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.name->public.name_new', - ]); - - expect(statements).toStrictEqual([ - { - type: 'rename_sequence', - nameFrom: 'name', - nameTo: 'name_new', - schema: 'public', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."name" RENAME TO "name_new";', - ]); -}); - -test('rename sequence in custom schema', async () => { - const customSchema = pgSchema('custom'); - - const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; - const to = { seq: customSchema.sequence('name_new', { startWith: 100 }) }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'custom.name->custom.name_new', - ]); - - expect(statements).toStrictEqual([ - { - type: 'rename_sequence', - nameFrom: 'name', - nameTo: 'name_new', - schema: 'custom', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "custom"."name" RENAME TO "name_new";', - ]); -}); - -test('move sequence between schemas #1', async () => { - const customSchema = pgSchema('custom'); - const from = { seq: pgSequence('name', { startWith: 100 }) }; - const to = { seq: customSchema.sequence('name', { startWith: 100 }) }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'public.name->custom.name', - ]); - - expect(statements).toStrictEqual([ - { - type: 'move_sequence', - name: 'name', - schemaFrom: 'public', - schemaTo: 'custom', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."name" SET SCHEMA "custom";', - ]); -}); - -test('move sequence between schemas #2', async () => { - const customSchema = pgSchema('custom'); - const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; - const to = { seq: pgSequence('name', { startWith: 100 }) }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, [ - 'custom.name->public.name', - ]); - - expect(statements).toStrictEqual([ - { - type: 'move_sequence', - name: 'name', - schemaFrom: 'custom', - schemaTo: 'public', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "custom"."name" SET SCHEMA "public";', - ]); -}); - -// Add squasher for sequences to make alters work + -// Run all tests + -// Finish introspect for sequences + -// Check push for sequences + - -// add tests for generated to postgresql + -// add tests for generated to mysql + -// add tests for generated to sqlite + - -// add tests for identity to postgresql - -// check introspect generated(all dialects) + -// check push generated(all dialect) + - -// add introspect ts file logic for all the features -// manually test everything -// beta release - -test('alter sequence', async () => { - const from = { seq: pgSequence('name', { startWith: 100 }) }; - const to = { seq: pgSequence('name', { startWith: 105 }) }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - name: 'name', - schema: 'public', - type: 'alter_sequence', - values: { - cache: '1', - cycle: false, - increment: '1', - maxValue: '9223372036854775807', - minValue: '1', - startWith: '105', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 105 CACHE 1;', - ]); -}); diff --git a/drizzle-kit/tests/pg-tables.test.ts b/drizzle-kit/tests/pg-tables.test.ts deleted file mode 100644 index 4ca01f1fe7..0000000000 --- a/drizzle-kit/tests/pg-tables.test.ts +++ /dev/null @@ -1,957 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { - AnyPgColumn, - foreignKey, - geometry, - index, - integer, - pgEnum, - pgSchema, - pgSequence, - pgTable, - pgTableCreator, - primaryKey, - serial, - text, - unique, - uniqueIndex, - vector, -} from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('add table #1', async () => { - const to = { - users: pgTable('users', {}), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [], - compositePKs: [], - policies: [], - uniqueConstraints: [], - checkConstraints: [], - isRLSEnabled: false, - compositePkName: '', - }); -}); - -test('add table #2', async () => { - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - }), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'serial', - }, - ], - compositePKs: [], - isRLSEnabled: false, - policies: [], - uniqueConstraints: [], - checkConstraints: [], - compositePkName: '', - }); -}); - -test('add table #3', async () => { - const to = { - users: pgTable( - 'users', - { - id: serial('id'), - }, - (t) => { - return { - pk: primaryKey({ - name: 'users_pk', - columns: [t.id], - }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [ - { - name: 'id', - notNull: true, - primaryKey: false, - type: 'serial', - }, - ], - compositePKs: ['id;users_pk'], - policies: [], - uniqueConstraints: [], - isRLSEnabled: false, - checkConstraints: [], - compositePkName: 'users_pk', - }); -}); - -test('add table #4', async () => { - const to = { - users: pgTable('users', {}), - posts: pgTable('posts', {}), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [], - compositePKs: [], - policies: [], - uniqueConstraints: [], - checkConstraints: [], - isRLSEnabled: false, - compositePkName: '', - }); - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'posts', - policies: [], - schema: '', - columns: [], - compositePKs: [], - isRLSEnabled: false, - uniqueConstraints: [], - checkConstraints: [], - compositePkName: '', - }); -}); - -test('add table #5', async () => { - const schema = pgSchema('folder'); - const from = { - schema, - }; - - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: 'folder', - columns: [], - compositePKs: [], - policies: [], - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - isRLSEnabled: false, - }); -}); - -test('add table #6', async () => { - const from = { - users1: pgTable('users1', {}), - }; - - const to = { - users2: pgTable('users2', {}), - }; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users2', - schema: '', - columns: [], - compositePKs: [], - uniqueConstraints: [], - policies: [], - compositePkName: '', - checkConstraints: [], - isRLSEnabled: false, - }); - expect(statements[1]).toStrictEqual({ - type: 'drop_table', - policies: [], - tableName: 'users1', - schema: '', - }); -}); - -test('add table #7', async () => { - const from = { - users1: pgTable('users1', {}), - }; - - const to = { - users: pgTable('users', {}), - users2: pgTable('users2', {}), - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'public.users1->public.users2', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [], - compositePKs: [], - policies: [], - uniqueConstraints: [], - compositePkName: '', - isRLSEnabled: false, - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users1', - tableNameTo: 'users2', - fromSchema: '', - toSchema: '', - }); -}); - -test('add table #8: geometry types', async () => { - const from = {}; - - const to = { - users: pgTable('users', { - geom: geometry('geom', { type: 'point' }).notNull(), - geom1: geometry('geom1').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - - expect(sqlStatements).toStrictEqual([ - `CREATE TABLE "users" (\n\t"geom" geometry(point) NOT NULL,\n\t"geom1" geometry(point) NOT NULL\n);\n`, - ]); -}); - -test('multiproject schema add table #1', async () => { - const table = pgTableCreator((name) => `prefix_${name}`); - - const to = { - users: table('users', { - id: serial('id').primaryKey(), - }), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'prefix_users', - schema: '', - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'serial', - }, - ], - compositePKs: [], - policies: [], - compositePkName: '', - isRLSEnabled: false, - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('multiproject schema drop table #1', async () => { - const table = pgTableCreator((name) => `prefix_${name}`); - - const from = { - users: table('users', { - id: serial('id').primaryKey(), - }), - }; - const to = {}; - - const { statements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - schema: '', - tableName: 'prefix_users', - type: 'drop_table', - policies: [], - }); -}); - -test('multiproject schema alter table name #1', async () => { - const table = pgTableCreator((name) => `prefix_${name}`); - - const from = { - users: table('users', { - id: serial('id').primaryKey(), - }), - }; - const to = { - users1: table('users1', { - id: serial('id').primaryKey(), - }), - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'public.prefix_users->public.prefix_users1', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_table', - fromSchema: '', - toSchema: '', - tableNameFrom: 'prefix_users', - tableNameTo: 'prefix_users1', - }); -}); - -test('add table #8: column with pgvector', async () => { - const from = {}; - - const to = { - users2: pgTable('users2', { - id: serial('id').primaryKey(), - name: vector('name', { dimensions: 3 }), - }), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements[0]).toBe( - `CREATE TABLE "users2" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"name" vector(3)\n); -`, - ); -}); - -test('add schema + table #1', async () => { - const schema = pgSchema('folder'); - - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'folder', - }); - - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: 'folder', - policies: [], - columns: [], - compositePKs: [], - isRLSEnabled: false, - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - }); -}); - -test('change schema with tables #1', async () => { - const schema = pgSchema('folder'); - const schema2 = pgSchema('folder2'); - const from = { - schema, - users: schema.table('users', {}), - }; - const to = { - schema2, - users: schema2.table('users', {}), - }; - - const { statements } = await diffTestSchemas(from, to, ['folder->folder2']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'folder', - to: 'folder2', - }); -}); - -test('change table schema #1', async () => { - const schema = pgSchema('folder'); - const from = { - schema, - users: pgTable('users', {}), - }; - const to = { - schema, - users: schema.table('users', {}), - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'public.users->folder.users', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'public', - schemaTo: 'folder', - }); -}); - -test('change table schema #2', async () => { - const schema = pgSchema('folder'); - const from = { - schema, - users: schema.table('users', {}), - }; - const to = { - schema, - users: pgTable('users', {}), - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder.users->public.users', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'folder', - schemaTo: 'public', - }); -}); - -test('change table schema #3', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, - schema2, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, - users: schema2.table('users', {}), - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); -}); - -test('change table schema #4', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, // add schema - users: schema2.table('users', {}), // move table - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); -}); - -test('change table schema #5', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, // remove schema - users: schema1.table('users', {}), - }; - const to = { - schema2, // add schema - users: schema2.table('users', {}), // move table - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder1.users->folder2.users', - ]); - - expect(statements.length).toBe(3); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); - expect(statements[2]).toStrictEqual({ - type: 'drop_schema', - name: 'folder1', - }); -}); - -test('change table schema #5', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, - schema2, - users: schema1.table('users', {}), - }; - const to = { - schema1, - schema2, - users: schema2.table('users2', {}), // rename and move table - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder1.users->folder2.users2', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_set_schema', - tableName: 'users', - schemaFrom: 'folder1', - schemaTo: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users', - tableNameTo: 'users2', - fromSchema: 'folder2', - toSchema: 'folder2', - }); -}); - -test('change table schema #6', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, - users: schema1.table('users', {}), - }; - const to = { - schema2, // rename schema - users: schema2.table('users2', {}), // rename table - }; - - const { statements } = await diffTestSchemas(from, to, [ - 'folder1->folder2', - 'folder2.users->folder2.users2', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'folder1', - to: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users', - tableNameTo: 'users2', - fromSchema: 'folder2', - toSchema: 'folder2', - }); -}); - -test('drop table + rename schema #1', async () => { - const schema1 = pgSchema('folder1'); - const schema2 = pgSchema('folder2'); - const from = { - schema1, - users: schema1.table('users', {}), - }; - const to = { - schema2, // rename schema - // drop table - }; - - const { statements } = await diffTestSchemas(from, to, ['folder1->folder2']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_schema', - from: 'folder1', - to: 'folder2', - }); - expect(statements[1]).toStrictEqual({ - type: 'drop_table', - tableName: 'users', - schema: 'folder2', - policies: [], - }); -}); - -test('create table with tsvector', async () => { - const from = {}; - const to = { - users: pgTable( - 'posts', - { - id: serial('id').primaryKey(), - title: text('title').notNull(), - description: text('description').notNull(), - }, - (table) => ({ - titleSearchIndex: index('title_search_index').using( - 'gin', - sql`to_tsvector('english', ${table.title})`, - ), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "posts" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"title" text NOT NULL,\n\t"description" text NOT NULL\n);\n', - `CREATE INDEX "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`, - ]); -}); - -test('composite primary key', async () => { - const from = {}; - const to = { - table: pgTable('works_to_creators', { - workId: integer('work_id').notNull(), - creatorId: integer('creator_id').notNull(), - classification: text('classification').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.workId, t.creatorId, t.classification], - }), - })), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "works_to_creators" (\n\t"work_id" integer NOT NULL,\n\t"creator_id" integer NOT NULL,\n\t"classification" text NOT NULL,\n\tCONSTRAINT "works_to_creators_work_id_creator_id_classification_pk" PRIMARY KEY("work_id","creator_id","classification")\n);\n', - ]); -}); - -test('add column before creating unique constraint', async () => { - const from = { - table: pgTable('table', { - id: serial('id').primaryKey(), - }), - }; - const to = { - table: pgTable('table', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }, (t) => ({ - uq: unique('uq').on(t.name), - })), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "table" ADD COLUMN "name" text NOT NULL;', - 'ALTER TABLE "table" ADD CONSTRAINT "uq" UNIQUE("name");', - ]); -}); - -test('alter composite primary key', async () => { - const from = { - table: pgTable('table', { - col1: integer('col1').notNull(), - col2: integer('col2').notNull(), - col3: text('col3').notNull(), - }, (t) => ({ - pk: primaryKey({ - name: 'table_pk', - columns: [t.col1, t.col2], - }), - })), - }; - const to = { - table: pgTable('table', { - col1: integer('col1').notNull(), - col2: integer('col2').notNull(), - col3: text('col3').notNull(), - }, (t) => ({ - pk: primaryKey({ - name: 'table_pk', - columns: [t.col2, t.col3], - }), - })), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "table" DROP CONSTRAINT "table_pk";\n--> statement-breakpoint\nALTER TABLE "table" ADD CONSTRAINT "table_pk" PRIMARY KEY("col2","col3");', - ]); -}); - -test('add index with op', async () => { - const from = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }), - }; - const to = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }, (t) => ({ - nameIdx: index().using('gin', t.name.op('gin_trgm_ops')), - })), - }; - - const { sqlStatements } = await diffTestSchemas(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE INDEX "users_name_index" ON "users" USING gin ("name" gin_trgm_ops);', - ]); -}); - -test('optional db aliases (snake case)', async () => { - const from = {}; - - const t1 = pgTable( - 't1', - { - t1Id1: integer().notNull().primaryKey(), - t1Col2: integer().notNull(), - t1Col3: integer().notNull(), - t2Ref: integer().notNull().references(() => t2.t2Id), - t1Uni: integer().notNull(), - t1UniIdx: integer().notNull(), - t1Idx: integer().notNull(), - }, - (table) => ({ - uni: unique('t1_uni').on(table.t1Uni), - uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), - idx: index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`), - fk: foreignKey({ - columns: [table.t1Col2, table.t1Col3], - foreignColumns: [t3.t3Id1, t3.t3Id2], - }), - }), - ); - - const t2 = pgTable( - 't2', - { - t2Id: serial().primaryKey(), - }, - ); - - const t3 = pgTable( - 't3', - { - t3Id1: integer(), - t3Id2: integer(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3Id1, table.t3Id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'snake_case'); - - const st1 = `CREATE TABLE "t1" ( - "t1_id1" integer PRIMARY KEY NOT NULL, - "t1_col2" integer NOT NULL, - "t1_col3" integer NOT NULL, - "t2_ref" integer NOT NULL, - "t1_uni" integer NOT NULL, - "t1_uni_idx" integer NOT NULL, - "t1_idx" integer NOT NULL, - CONSTRAINT "t1_uni" UNIQUE("t1_uni") -); -`; - - const st2 = `CREATE TABLE "t2" ( - "t2_id" serial PRIMARY KEY NOT NULL -); -`; - - const st3 = `CREATE TABLE "t3" ( - "t3_id1" integer, - "t3_id2" integer, - CONSTRAINT "t3_t3_id1_t3_id2_pk" PRIMARY KEY("t3_id1","t3_id2") -); -`; - - const st4 = - `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2_ref_t2_t2_id_fk" FOREIGN KEY ("t2_ref") REFERENCES "public"."t2"("t2_id") ON DELETE no action ON UPDATE no action;`; - - const st5 = - `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk" FOREIGN KEY ("t1_col2","t1_col3") REFERENCES "public"."t3"("t3_id1","t3_id2") ON DELETE no action ON UPDATE no action;`; - - const st6 = `CREATE UNIQUE INDEX "t1_uni_idx" ON "t1" USING btree ("t1_uni_idx");`; - - const st7 = `CREATE INDEX "t1_idx" ON "t1" USING btree ("t1_idx") WHERE "t1"."t1_idx" > 0;`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); -}); - -test('optional db aliases (camel case)', async () => { - const from = {}; - - const t1 = pgTable( - 't1', - { - t1_id1: integer().notNull().primaryKey(), - t1_col2: integer().notNull(), - t1_col3: integer().notNull(), - t2_ref: integer().notNull().references(() => t2.t2_id), - t1_uni: integer().notNull(), - t1_uni_idx: integer().notNull(), - t1_idx: integer().notNull(), - }, - (table) => ({ - uni: unique('t1Uni').on(table.t1_uni), - uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), - idx: index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`), - fk: foreignKey({ - columns: [table.t1_col2, table.t1_col3], - foreignColumns: [t3.t3_id1, t3.t3_id2], - }), - }), - ); - - const t2 = pgTable( - 't2', - { - t2_id: serial().primaryKey(), - }, - ); - - const t3 = pgTable( - 't3', - { - t3_id1: integer(), - t3_id2: integer(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3_id1, table.t3_id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'camelCase'); - - const st1 = `CREATE TABLE "t1" ( - "t1Id1" integer PRIMARY KEY NOT NULL, - "t1Col2" integer NOT NULL, - "t1Col3" integer NOT NULL, - "t2Ref" integer NOT NULL, - "t1Uni" integer NOT NULL, - "t1UniIdx" integer NOT NULL, - "t1Idx" integer NOT NULL, - CONSTRAINT "t1Uni" UNIQUE("t1Uni") -); -`; - - const st2 = `CREATE TABLE "t2" ( - "t2Id" serial PRIMARY KEY NOT NULL -); -`; - - const st3 = `CREATE TABLE "t3" ( - "t3Id1" integer, - "t3Id2" integer, - CONSTRAINT "t3_t3Id1_t3Id2_pk" PRIMARY KEY("t3Id1","t3Id2") -); -`; - - const st4 = - `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2Ref_t2_t2Id_fk" FOREIGN KEY ("t2Ref") REFERENCES "public"."t2"("t2Id") ON DELETE no action ON UPDATE no action;`; - - const st5 = - `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk" FOREIGN KEY ("t1Col2","t1Col3") REFERENCES "public"."t3"("t3Id1","t3Id2") ON DELETE no action ON UPDATE no action;`; - - const st6 = `CREATE UNIQUE INDEX "t1UniIdx" ON "t1" USING btree ("t1UniIdx");`; - - const st7 = `CREATE INDEX "t1Idx" ON "t1" USING btree ("t1Idx") WHERE "t1"."t1Idx" > 0;`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); -}); diff --git a/drizzle-kit/tests/pg-views.test.ts b/drizzle-kit/tests/pg-views.test.ts deleted file mode 100644 index 4f24cd7762..0000000000 --- a/drizzle-kit/tests/pg-views.test.ts +++ /dev/null @@ -1,1929 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { integer, pgMaterializedView, pgSchema, pgTable, pgView } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; - -test('create table and view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view: pgView('some_view').as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - isRLSEnabled: false, - compositePkName: '', - checkConstraints: [], - policies: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view', - definition: `select "id" from "users"`, - schema: 'public', - with: undefined, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (select "id" from "users");`); -}); - -test('create table and view #2', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - isRLSEnabled: false, - compositePkName: '', - policies: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view', - definition: `SELECT * FROM "users"`, - schema: 'public', - with: undefined, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (SELECT * FROM "users");`); -}); - -test('create table and view #3', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgView('some_view1', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: false, - securityInvoker: true, - }).as(sql`SELECT * FROM ${users}`), - view2: pgView('some_view2').with({ - checkOption: 'cascaded', - securityBarrier: true, - securityInvoker: false, - }).as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(3); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - checkConstraints: [], - isRLSEnabled: false, - policies: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view1', - definition: `SELECT * FROM "users"`, - schema: 'public', - with: { - checkOption: 'local', - securityBarrier: false, - securityInvoker: true, - }, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - expect(statements[2]).toStrictEqual({ - type: 'create_view', - name: 'some_view2', - definition: `select "id" from "users"`, - schema: 'public', - with: { - checkOption: 'cascaded', - securityBarrier: true, - securityInvoker: false, - }, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe( - `CREATE VIEW "public"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "users");`, - ); - expect(sqlStatements[2]).toBe( - `CREATE VIEW "public"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "users");`, - ); -}); - -test('create table and view #4', async () => { - const schema = pgSchema('new_schema'); - - const users = schema.table('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - schema, - users: users, - view1: schema.view('some_view1', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: false, - securityInvoker: true, - }).as(sql`SELECT * FROM ${users}`), - view2: schema.view('some_view2').with({ - checkOption: 'cascaded', - securityBarrier: true, - securityInvoker: false, - }).as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(4); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'new_schema', - }); - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: 'new_schema', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - isRLSEnabled: false, - policies: [], - checkConstraints: [], - }); - expect(statements[2]).toStrictEqual({ - type: 'create_view', - name: 'some_view1', - definition: `SELECT * FROM "new_schema"."users"`, - schema: 'new_schema', - with: { - checkOption: 'local', - securityBarrier: false, - securityInvoker: true, - }, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - expect(statements[3]).toStrictEqual({ - type: 'create_view', - name: 'some_view2', - definition: `select "id" from "new_schema"."users"`, - schema: 'new_schema', - with: { - checkOption: 'cascaded', - securityBarrier: true, - securityInvoker: false, - }, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); - expect(sqlStatements[1]).toBe(`CREATE TABLE "new_schema"."users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[2]).toBe( - `CREATE VIEW "new_schema"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "new_schema"."users");`, - ); - expect(sqlStatements[3]).toBe( - `CREATE VIEW "new_schema"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "new_schema"."users");`, - ); -}); - -test('create table and view #5', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - view2: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); -}); - -test('create table and view #6', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - ], - compositePKs: [], - compositePkName: '', - schema: '', - tableName: 'users', - type: 'create_table', - uniqueConstraints: [], - checkConstraints: [], - isRLSEnabled: false, - policies: [], - }); - expect(statements[1]).toStrictEqual({ - definition: 'SELECT * FROM "users"', - name: 'some_view', - schema: 'public', - type: 'create_view', - with: { - checkOption: 'cascaded', - }, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe( - `CREATE VIEW "public"."some_view" WITH (check_option = cascaded) AS (SELECT * FROM "users");`, - ); -}); - -test('create view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - }; - - const to = { - users: users, - view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('create table and materialized view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - isRLSEnabled: false, - policies: [], - compositePkName: '', - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view', - definition: `select "id" from "users"`, - schema: 'public', - with: undefined, - materialized: true, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (select "id" from "users");`); -}); - -test('create table and materialized view #2', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - isRLSEnabled: false, - policies: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view', - definition: `SELECT * FROM "users"`, - schema: 'public', - with: undefined, - materialized: true, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (SELECT * FROM "users");`); -}); - -test('create table and materialized view #3', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgMaterializedView('some_view1', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - view2: pgMaterializedView('some_view2').tablespace('some_tablespace').using('heap').withNoData().with({ - autovacuumEnabled: true, - autovacuumFreezeMaxAge: 1, - autovacuumFreezeMinAge: 1, - autovacuumFreezeTableAge: 1, - autovacuumMultixactFreezeMaxAge: 1, - autovacuumMultixactFreezeMinAge: 1, - autovacuumMultixactFreezeTableAge: 1, - autovacuumVacuumCostDelay: 1, - autovacuumVacuumCostLimit: 1, - autovacuumVacuumScaleFactor: 1, - autovacuumVacuumThreshold: 1, - fillfactor: 1, - logAutovacuumMinDuration: 1, - parallelWorkers: 1, - toastTupleTarget: 1, - userCatalogTable: true, - vacuumIndexCleanup: 'off', - vacuumTruncate: false, - }).as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(3); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: '', - columns: [{ - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }], - compositePKs: [], - uniqueConstraints: [], - isRLSEnabled: false, - compositePkName: '', - policies: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'create_view', - name: 'some_view1', - definition: `SELECT * FROM "users"`, - schema: 'public', - with: undefined, - materialized: true, - withNoData: false, - using: undefined, - tablespace: undefined, - }); - expect(statements[2]).toStrictEqual({ - type: 'create_view', - name: 'some_view2', - definition: `select "id" from "users"`, - schema: 'public', - with: { - autovacuumEnabled: true, - autovacuumFreezeMaxAge: 1, - autovacuumFreezeMinAge: 1, - autovacuumFreezeTableAge: 1, - autovacuumMultixactFreezeMaxAge: 1, - autovacuumMultixactFreezeMinAge: 1, - autovacuumMultixactFreezeTableAge: 1, - autovacuumVacuumCostDelay: 1, - autovacuumVacuumCostLimit: 1, - autovacuumVacuumScaleFactor: 1, - autovacuumVacuumThreshold: 1, - fillfactor: 1, - logAutovacuumMinDuration: 1, - parallelWorkers: 1, - toastTupleTarget: 1, - userCatalogTable: true, - vacuumIndexCleanup: 'off', - vacuumTruncate: false, - }, - materialized: true, - tablespace: 'some_tablespace', - using: 'heap', - withNoData: true, - }); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe( - `CREATE MATERIALIZED VIEW "public"."some_view1" AS (SELECT * FROM "users");`, - ); - expect(sqlStatements[2]).toBe( - `CREATE MATERIALIZED VIEW "public"."some_view2" USING "heap" WITH (autovacuum_enabled = true, autovacuum_freeze_max_age = 1, autovacuum_freeze_min_age = 1, autovacuum_freeze_table_age = 1, autovacuum_multixact_freeze_max_age = 1, autovacuum_multixact_freeze_min_age = 1, autovacuum_multixact_freeze_table_age = 1, autovacuum_vacuum_cost_delay = 1, autovacuum_vacuum_cost_limit = 1, autovacuum_vacuum_scale_factor = 1, autovacuum_vacuum_threshold = 1, fillfactor = 1, log_autovacuum_min_duration = 1, parallel_workers = 1, toast_tuple_target = 1, user_catalog_table = true, vacuum_index_cleanup = off, vacuum_truncate = false) TABLESPACE some_tablespace AS (select "id" from "users") WITH NO DATA;`, - ); -}); - -test('create table and materialized view #4', async () => { - // same names - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - view2: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); -}); - -test('create table and materialized view #5', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - const to = { - users: users, - view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumFreezeMinAge: 14 }).as( - sql`SELECT * FROM ${users}`, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - ], - compositePKs: [], - compositePkName: '', - schema: '', - tableName: 'users', - type: 'create_table', - uniqueConstraints: [], - isRLSEnabled: false, - policies: [], - checkConstraints: [], - }); - expect(statements[1]).toEqual({ - definition: 'SELECT * FROM "users"', - name: 'some_view', - schema: 'public', - type: 'create_view', - with: { - autovacuumFreezeMinAge: 14, - }, - materialized: true, - tablespace: undefined, - using: undefined, - withNoData: false, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( -\t"id" integer PRIMARY KEY NOT NULL -);\n`); - expect(sqlStatements[1]).toBe( - `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_freeze_min_age = 14) AS (SELECT * FROM "users");`, - ); -}); - -test('create materialized view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - }; - - const to = { - users: users, - view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumEnabled: true }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_view', - name: 'some_view', - schema: 'public', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP VIEW "public"."some_view";`); -}); - -test('drop view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop materialized view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), - }; - - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'drop_view', - name: 'some_view', - schema: 'public', - materialized: true, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); -}); - -test('drop materialized view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - users: users, - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view #1', async () => { - const from = { - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - view: pgView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_view', - nameFrom: 'some_view', - nameTo: 'new_some_view', - schema: 'public', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); -}); - -test('rename view with existing flag', async () => { - const from = { - view: pgView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - view: pgView('new_some_view', { id: integer('id') }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename materialized view #1', async () => { - const from = { - view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - view: pgMaterializedView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'rename_view', - nameFrom: 'some_view', - nameTo: 'new_some_view', - schema: 'public', - materialized: true, - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" RENAME TO "new_some_view";`); -}); - -test('rename materialized view with existing flag', async () => { - const from = { - view: pgMaterializedView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - view: pgMaterializedView('new_some_view', { id: integer('id') }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('view alter schema', async () => { - const schema = pgSchema('new_schema'); - - const from = { - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - schema, - view: schema.view('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'new_schema', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_view_alter_schema', - toSchema: 'new_schema', - fromSchema: 'public', - name: 'some_view', - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); - expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "new_schema";`); -}); - -test('view alter schema with existing flag', async () => { - const schema = pgSchema('new_schema'); - - const from = { - view: pgView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - schema, - view: schema.view('some_view', { id: integer('id') }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'new_schema', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); -}); - -test('view alter schema for materialized', async () => { - const schema = pgSchema('new_schema'); - - const from = { - view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - schema, - view: schema.materializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'new_schema', - }); - expect(statements[1]).toStrictEqual({ - type: 'alter_view_alter_schema', - toSchema: 'new_schema', - fromSchema: 'public', - name: 'some_view', - materialized: true, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); - expect(sqlStatements[1]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" SET SCHEMA "new_schema";`); -}); - -test('view alter schema for materialized with existing flag', async () => { - const schema = pgSchema('new_schema'); - - const from = { - view: pgMaterializedView('some_view', { id: integer('id') }).existing(), - }; - - const to = { - schema, - view: schema.materializedView('some_view', { id: integer('id') }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_schema', - name: 'new_schema', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); -}); - -test('add with option to view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view').as((qb) => qb.select().from(users)), - }; - - const to = { - users, - view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true }).as((qb) => - qb.select().from(users) - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_add_with_option', - with: { - checkOption: 'cascaded', - securityBarrier: true, - }, - materialized: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER VIEW "public"."some_view" SET (check_option = cascaded, security_barrier = true);`, - ); -}); - -test('add with option to view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', {}).existing(), - }; - - const to = { - users, - view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('add with option to materialized view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), - }; - - const to = { - users, - view: pgMaterializedView('some_view').with({ autovacuumMultixactFreezeMaxAge: 3 }).as((qb) => - qb.select().from(users) - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_add_with_option', - with: { - autovacuumMultixactFreezeMaxAge: 3, - }, - materialized: true, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_multixact_freeze_max_age = 3);`, - ); -}); - -test('add with option to materialized view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', {}).existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', {}).with({ autovacuumMultixactFreezeMaxAge: 3 }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop with option from view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => - qb.select().from(users) - ), - }; - - const to = { - users, - view: pgView('some_view').as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_drop_with_option', - materialized: false, - with: { - checkOption: 'cascaded', - securityBarrier: true, - securityInvoker: true, - }, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER VIEW "public"."some_view" RESET (check_option, security_barrier, security_invoker);`, - ); -}); - -test('drop with option from view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }) - .existing(), - }; - - const to = { - users, - view: pgView('some_view', {}).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop with option from materialized view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).as((qb) => - qb.select().from(users) - ), - }; - - const to = { - users, - view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_drop_with_option', - materialized: true, - with: { - autovacuumEnabled: true, - autovacuumFreezeMaxAge: 10, - }, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_enabled, autovacuum_freeze_max_age);`, - ); -}); - -test('drop with option from materialized view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', {}).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter with option in view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view').with({ securityBarrier: true, securityInvoker: true }).as((qb) => - qb.select().from(users) - ), - }; - - const to = { - users, - view: pgView('some_view').with({ securityBarrier: true }).as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_drop_with_option', - with: { - securityInvoker: true, - }, - materialized: false, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER VIEW "public"."some_view" RESET (security_invoker);`, - ); -}); - -test('alter with option in view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', {}).with({ securityBarrier: true, securityInvoker: true }).existing(), - }; - - const to = { - users, - view: pgView('some_view', {}).with({ securityBarrier: true }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter with option in materialized view #1', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }).as((qb) => - qb.select().from(users) - ), - }; - - const to = { - users, - view: pgMaterializedView('some_view').with({ autovacuumEnabled: true }).as((qb) => qb.select().from(users)), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'some_view', - schema: 'public', - type: 'alter_view_drop_with_option', - with: { - autovacuumVacuumScaleFactor: 1, - }, - materialized: true, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_vacuum_scale_factor);`, - ); -}); - -test('alter with option in materialized view with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }) - .existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter with option in view #2', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view').with({ checkOption: 'local', securityBarrier: true, securityInvoker: true }).as((qb) => - qb.selectDistinct().from(users) - ), - }; - - const to = { - users, - view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => - qb.selectDistinct().from(users) - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_view_add_with_option', - name: 'some_view', - schema: 'public', - with: { - checkOption: 'cascaded', - }, - materialized: false, - }); - - expect(sqlStatements.length).toBe(1); - - expect(sqlStatements[0]).toBe( - `ALTER VIEW "public"."some_view" SET (check_option = cascaded);`, - ); -}); - -test('alter with option in materialized view #2', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, fillfactor: 1 }).as((qb) => - qb.select().from(users) - ), - }; - - const to = { - users, - view: pgMaterializedView('some_view').with({ autovacuumEnabled: false, fillfactor: 1 }).as((qb) => - qb.select().from(users) - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_view_add_with_option', - name: 'some_view', - schema: 'public', - with: { - autovacuumEnabled: false, - }, - materialized: true, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_enabled = false);`, - ); -}); - -test('alter view ".as" value', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: true, - securityInvoker: true, - }).as(sql`SELECT '123'`), - }; - - const to = { - users, - view: pgView('some_view', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: true, - securityInvoker: true, - }).as(sql`SELECT '1234'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual( - { - name: 'some_view', - schema: 'public', - type: 'drop_view', - }, - ); - expect(statements[1]).toStrictEqual( - { - definition: "SELECT '1234'", - name: 'some_view', - schema: 'public', - type: 'create_view', - materialized: false, - with: { - checkOption: 'local', - securityBarrier: true, - securityInvoker: true, - }, - withNoData: false, - tablespace: undefined, - using: undefined, - }, - ); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe('DROP VIEW "public"."some_view";'); - expect(sqlStatements[1]).toBe( - `CREATE VIEW "public"."some_view" WITH (check_option = local, security_barrier = true, security_invoker = true) AS (SELECT '1234');`, - ); -}); - -test('alter view ".as" value with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: true, - securityInvoker: true, - }).existing(), - }; - - const to = { - users, - view: pgView('some_view', { id: integer('id') }).with({ - checkOption: 'local', - securityBarrier: true, - securityInvoker: true, - }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('alter materialized view ".as" value', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT '123'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT '1234'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual( - { - name: 'some_view', - schema: 'public', - type: 'drop_view', - materialized: true, - }, - ); - expect(statements[1]).toStrictEqual( - { - definition: "SELECT '1234'", - name: 'some_view', - schema: 'public', - type: 'create_view', - with: { - autovacuumVacuumCostLimit: 1, - }, - materialized: true, - withNoData: false, - tablespace: undefined, - using: undefined, - }, - ); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe('DROP MATERIALIZED VIEW "public"."some_view";'); - expect(sqlStatements[1]).toBe( - `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT '1234');`, - ); -}); - -test('alter materialized view ".as" value with existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop existing flag', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toEqual({ - type: 'drop_view', - name: 'some_view', - schema: 'public', - materialized: true, - }); - expect(statements[1]).toEqual({ - definition: "SELECT 'asd'", - materialized: true, - name: 'some_view', - schema: 'public', - tablespace: undefined, - type: 'create_view', - using: undefined, - with: { - autovacuumVacuumCostLimit: 1, - }, - withNoData: false, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); - expect(sqlStatements[1]).toBe( - `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT 'asd');`, - ); -}); - -test('alter tablespace - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_tablespace', - name: 'some_view', - schema: 'public', - materialized: true, - toTablespace: 'new_tablespace', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, - ); -}); - -test('set tablespace - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_tablespace', - name: 'some_view', - schema: 'public', - materialized: true, - toTablespace: 'new_tablespace', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, - ); -}); - -test('drop tablespace - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_tablespace', - name: 'some_view', - schema: 'public', - materialized: true, - toTablespace: 'pg_default', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE pg_default;`, - ); -}); - -test('set existing - materialized', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('new_some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - autovacuumFreezeMinAge: 1, - }).withNoData().existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(0); - - expect(sqlStatements.length).toBe(0); -}); - -test('drop existing - materialized', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ - autovacuumVacuumCostLimit: 1, - }).existing(), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - autovacuumFreezeMinAge: 1, - }).withNoData().as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(2); - - expect(sqlStatements.length).toBe(2); -}); - -test('set existing', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgView('some_view', { id: integer('id') }).with({ - checkOption: 'cascaded', - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgView('new_some_view', { id: integer('id') }).with({ - checkOption: 'cascaded', - securityBarrier: true, - }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(0); - - expect(sqlStatements.length).toBe(0); -}); - -test('alter using - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('some_using').with( - { - autovacuumVacuumCostLimit: 1, - }, - ).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('new_using').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_using', - name: 'some_view', - schema: 'public', - materialized: true, - toUsing: 'new_using', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, - ); -}); - -test('set using - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_using', - name: 'some_view', - schema: 'public', - materialized: true, - toUsing: 'new_using', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, - ); -}); - -test('drop using - materialize', async () => { - const users = pgTable('users', { - id: integer('id').primaryKey().notNull(), - }); - - const from = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const to = { - users, - view: pgMaterializedView('some_view', { id: integer('id') }).with({ - autovacuumVacuumCostLimit: 1, - }).as(sql`SELECT 'asd'`), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toEqual({ - type: 'alter_view_alter_using', - name: 'some_view', - schema: 'public', - materialized: true, - toUsing: 'heap', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "heap";`, - ); -}); - -test('rename view and alter view', async () => { - const from = { - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - view: pgView('new_some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( - sql`SELECT * FROM "users"`, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_view', - nameFrom: 'some_view', - nameTo: 'new_some_view', - schema: 'public', - }); - expect(statements[1]).toStrictEqual({ - materialized: false, - name: 'new_some_view', - schema: 'public', - type: 'alter_view_add_with_option', - with: { - checkOption: 'cascaded', - }, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); - expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."new_some_view" SET (check_option = cascaded);`); -}); - -test('moved schema and alter view', async () => { - const schema = pgSchema('my_schema'); - const from = { - schema, - view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), - }; - - const to = { - schema, - view: schema.view('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( - sql`SELECT * FROM "users"`, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->my_schema.some_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - fromSchema: 'public', - name: 'some_view', - toSchema: 'my_schema', - type: 'alter_view_alter_schema', - }); - expect(statements[1]).toStrictEqual({ - name: 'some_view', - schema: 'my_schema', - type: 'alter_view_add_with_option', - materialized: false, - with: { - checkOption: 'cascaded', - }, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "my_schema";`); - expect(sqlStatements[1]).toBe(`ALTER VIEW "my_schema"."some_view" SET (check_option = cascaded);`); -}); diff --git a/drizzle-kit/tests/postgres/commutativity.integration.test.ts b/drizzle-kit/tests/postgres/commutativity.integration.test.ts new file mode 100644 index 0000000000..425a941ec2 --- /dev/null +++ b/drizzle-kit/tests/postgres/commutativity.integration.test.ts @@ -0,0 +1,239 @@ +import { sql } from 'drizzle-orm'; +import { check, index, pgTable, primaryKey } from 'drizzle-orm/pg-core'; +import { createDDL } from 'src/dialects/postgres/ddl'; +import type { PostgresSnapshot } from 'src/dialects/postgres/snapshot'; +import { detectNonCommutative } from 'src/utils/commutativity'; +import { describe, expect, test } from 'vitest'; +import { conflictsFromSchema } from './mocks'; + +describe('conflict rule coverage (statement pairs)', () => { + test('column: create vs drop (same-resource-different-op)', async () => { + const parent = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + })), + }; + + const child1 = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + d: t.varchar(), + })), + }; + + const child2 = { + t: pgTable('t', (t) => ({})), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).toBeUndefined(); + }); + + test('column: alter vs alter (same-resource-same-op)', async () => { + const parent = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + })), + }; + + const child1 = { + t: pgTable('t', (t) => ({ + c: t.varchar().notNull(), + })), + }; + + const child2 = { + t: pgTable('t', (t) => ({ + c: t.integer(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('table drop vs child index', async () => { + const parent = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + })), + }; + + const child1 = {}; + + const child2 = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + }), (table) => [index().on(table.c)]), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('pk: alter vs drop', async () => { + const parent = { + t: pgTable('t', (t) => ({ + id: t.integer().primaryKey(), + c: t.varchar(), + })), + }; + + const child1 = { + t: pgTable('t', (t) => ({ + id: t.integer(), + c: t.varchar(), + }), (table) => [primaryKey({ columns: [table.id, table.c] })]), + }; + + const child2 = { + t: pgTable('t', (t) => ({ + id: t.integer(), + c: t.varchar(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('unique: create vs drop', async () => { + const parent = { + t: pgTable('t', (t) => ({ + c: t.varchar().unique(), + })), + }; + + const child1 = { + t: pgTable('t', (t) => ({ + c: t.varchar().unique(), + d: t.varchar().unique(), + })), + }; + + const child2 = { + t: pgTable('t', (t) => ({ + c: t.varchar(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('fk: recreate vs drop', async () => { + const p = pgTable('p', (t) => ({ + id: t.integer().primaryKey(), + })); + + const parent = { + p, + t: pgTable('t', (t) => ({ + id: t.integer().primaryKey(), + pId: t.integer().references(() => p.id), + })), + }; + + const child1 = { + p, + t: pgTable('t', (t) => ({ + id: t.integer().primaryKey(), + pId: t.integer().references(() => p.id, { onDelete: 'cascade' }), + })), + }; + + const child2 = { + p, + t: pgTable('t', (t) => ({ + id: t.integer().primaryKey(), + pId: t.integer(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('check: alter vs drop', async () => { + const parent = { + t: pgTable('t', (t) => ({ + c: t.integer(), + }), (table) => [check('chk', sql`${table.c} > 0`)]), + }; + + const child1 = { + t: pgTable('t', (t) => ({ + c: t.integer(), + }), (table) => [check('chk', sql`${table.c} > 5`)]), + }; + + const child2 = { + t: pgTable('t', (t) => ({ + c: t.integer(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + }); + + test('explainConflicts returns reason for table drop vs column alter', async () => { + const parent = { + c: pgTable('t', (t) => ({ + c: t.varchar(), + })), + }; + + const child1 = {}; + const child2 = { + c: pgTable('t', (t) => ({ + c: t.varchar().notNull(), + })), + }; + + const conflicts = await conflictsFromSchema({ + parent: { id: '1', schema: parent }, + child1: { id: '2', prevId: '1', schema: child1 }, + child2: { id: '3', prevId: '1', schema: child2 }, + }); + + expect(conflicts).not.toBeUndefined(); + expect(conflicts?.leftStatement.type).toBe('alter_column'); + expect(conflicts?.rightStatement.type).toBe('drop_table'); + }); +}); diff --git a/drizzle-kit/tests/postgres/commutativity.test.ts b/drizzle-kit/tests/postgres/commutativity.test.ts new file mode 100644 index 0000000000..255dafc618 --- /dev/null +++ b/drizzle-kit/tests/postgres/commutativity.test.ts @@ -0,0 +1,1013 @@ +import { createDDL } from 'src/dialects/postgres/ddl'; +import type { PostgresSnapshot } from 'src/dialects/postgres/snapshot'; +import { detectNonCommutative } from 'src/utils/commutativity'; +import { describe, expect, test } from 'vitest'; + +const baseId = '00000000-0000-0000-0000-000000000000'; + +function makeSnapshot(id: string, prevIds: string[], ddlEntities: any[] = []): PostgresSnapshot { + return { + version: '8', + dialect: 'postgres', + id, + prevIds, + ddl: ddlEntities, + renames: [], + } as any; +} + +function writeTempSnapshot(dir: string, tag: string, snap: PostgresSnapshot) { + const fs = require('fs'); + const path = require('path'); + const folder = path.join(dir, tag); + fs.mkdirSync(folder, { recursive: true }); + fs.writeFileSync(path.join(folder, 'snapshot.json'), JSON.stringify(snap, null, 2)); + return path.join(folder, 'snapshot.json'); +} + +const ORIGIN = '00000000-0000-0000-0000-000000000000'; + +function mkTmp(): { tmp: string; fs: any; path: any; os: any } { + const fs = require('fs'); + const path = require('path'); + const os = require('os'); + const tmp = fs.mkdtempSync(path.join(os.tmpdir(), 'dk-comm-int-')); + return { tmp, fs, path, os } as any; +} + +describe('commutativity integration (postgres)', () => { + test('Parent not empty: detects conflict when first migration of branch A has a conflict with the last migration of branch B', async () => { + const parentDDL = createDDL(); + parentDDL.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + parentDDL.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const parent = makeSnapshot('p1', [baseId], parentDDL.entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + A.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const A2 = createDDL(); + A2.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + A2.columns.push({ + schema: 'public', + table: 'users', + name: 'email2', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA2 = makeSnapshot('a2', ['a1'], A2.entities.list()); + + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + B.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B.columns.push({ + schema: 'public', + table: 'posts', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const B2 = createDDL(); + B2.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + B2.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + B2.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B2.columns.push({ + schema: 'public', + table: 'posts', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB2 = makeSnapshot('b2', ['b1'], B2.entities.list()); + + const B3 = createDDL(); + B3.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B3.columns.push({ + schema: 'public', + table: 'posts', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB3 = makeSnapshot('b3', ['b2'], B3.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const a2Path = writeTempSnapshot(tmp, '001_leafA2', leafA2); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + const b2Path = writeTempSnapshot(tmp, '002_leafB2', leafB2); + const b3Path = writeTempSnapshot(tmp, '002_leafB3', leafB3); + + const report = await detectNonCommutative([pPath, aPath, bPath, b2Path, b3Path, a2Path], 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('Parent empty: detects conflict when last migration of branch A has a conflict with a first migration of branch B', async () => { + const parent = makeSnapshot('p1', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + A.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const A2 = createDDL(); + A2.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + A2.columns.push({ + schema: 'public', + table: 'posts', + name: 'description', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA2 = makeSnapshot('a2', ['a1'], A2.entities.list()); + + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B.columns.push({ + schema: 'public', + table: 'users', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const B2 = createDDL(); + B2.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B2.columns.push({ + schema: 'public', + table: 'users', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB2 = makeSnapshot('b2', ['b1'], B2.entities.list()); + + const B3 = createDDL(); + B3.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + B3.columns.push({ + schema: 'public', + table: 'users', + name: 'content', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + B3.tables.push({ schema: 'public', isRlsEnabled: false, name: 'media' }); + B3.columns.push({ + schema: 'public', + table: 'media', + name: 'url', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB3 = makeSnapshot('b3', ['b2'], B3.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const a2Path = writeTempSnapshot(tmp, '002_leafA2', leafA2); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + const b2Path = writeTempSnapshot(tmp, '003_leafB2', leafB2); + const b3Path = writeTempSnapshot(tmp, '004_leafB3', leafB3); + + const report = await detectNonCommutative([pPath, aPath, a2Path, bPath, b2Path, b3Path], 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('detects conflict when drop table in one branch and add column in other', async () => { + const parentDDL = createDDL(); + parentDDL.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + parentDDL.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const parent = makeSnapshot('p1', [baseId], parentDDL.entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + A.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const leafB = makeSnapshot('b1', ['p1'], createDDL().entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('detects conflict when both branches alter same column', async () => { + const parent = makeSnapshot('p1', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + A.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafA = makeSnapshot('a1', ['p1'], A.entities.list()); + + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + B.columns.push({ + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any); + const leafB = makeSnapshot('b1', ['p1'], B.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + expect(report.conflicts[0].parentId).toBe('p1'); + }); + + test('no conflict when branches touch different tables', async () => { + const parent = makeSnapshot('p2', [baseId], createDDL().entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + const leafA = makeSnapshot('a2', ['p2'], A.entities.list()); + + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'posts' }); + const leafB = makeSnapshot('b2', ['p2'], B.entities.list()); + + const os = require('os'); + const tmp = require('fs').mkdtempSync(require('path').join(os.tmpdir(), 'dk-comm-')); + const pPath = writeTempSnapshot(tmp, '000_parent', parent); + const aPath = writeTempSnapshot(tmp, '001_leafA', leafA); + const bPath = writeTempSnapshot(tmp, '002_leafB', leafB); + + const report = await detectNonCommutative([pPath, aPath, bPath], 'postgresql'); + expect(report.conflicts.length).toBe(0); + }); + + test('column conflict: both branches change same column', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + const p = makeSnapshot('p_col', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + a.columns.push( + { + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const b = createDDL(); + b.tables.push({ schema: 'public', isRlsEnabled: false, name: 'users' }); + b.columns.push( + { + schema: 'public', + table: 'users', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '000_p_col', p), + writeTempSnapshot(tmp, '001_a_col', makeSnapshot('a_col', ['p_col'], a.entities.list())), + writeTempSnapshot(tmp, '002_b_col', makeSnapshot('b_col', ['p_col'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('table drop vs child column alter', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 't1' }); + parent.columns.push( + { + schema: 'public', + table: 't1', + name: 'c1', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const p = makeSnapshot('p_drop', [ORIGIN], parent.entities.list()); + + const a = createDDL(); // dropping table in branch A (no t1) + const b = createDDL(); + b.tables.push({ schema: 'public', isRlsEnabled: false, name: 't1' }); + b.columns.push( + { + schema: 'public', + table: 't1', + name: 'c1', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '010_p_drop', p), + writeTempSnapshot(tmp, '011_a_drop', makeSnapshot('a_drop', ['p_drop'], a.entities.list())), + writeTempSnapshot(tmp, '012_b_drop', makeSnapshot('b_drop', ['p_drop'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBe(1); + expect(report.conflicts[0].branchA.headId).toStrictEqual('a_drop'); + expect(report.conflicts[0].branchB.headId).toStrictEqual('b_drop'); + const con = report.conflicts[0]; + + // console.log( + // `The conflict in your migrations was detected. Starting from a ${con.parentId} we've detected 2 branches of migrations that are conflicting. A file with conflicted migration for a first branch in ${con.branchA.headId} and second branch is ${con.branchB.headId}.\n\n${con.branchA.statement.type} statement from first branch is conflicting with ${con.branchB.statement.type}`, + // ); + }); + + test('unique constraint same name on same table', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 't2' }); + const p = makeSnapshot('p_uq', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ schema: 'public', isRlsEnabled: false, name: 't2' }); + a.uniques.push( + { + schema: 'public', + table: 't2', + nameExplicit: true, + name: 't2_uq', + columns: ['c'], + nullsNotDistinct: false, + } as any, + ); + const b = createDDL(); + b.tables.push({ schema: 'public', isRlsEnabled: false, name: 't2' }); + b.uniques.push( + { + schema: 'public', + table: 't2', + nameExplicit: true, + name: 't2_uq', + columns: ['c'], + nullsNotDistinct: false, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '020_p_uq', p), + writeTempSnapshot(tmp, '021_a_uq', makeSnapshot('a_uq', ['p_uq'], a.entities.list())), + writeTempSnapshot(tmp, '022_b_uq', makeSnapshot('b_uq', ['p_uq'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('view: same name in both branches', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const p = makeSnapshot('p_view', [ORIGIN], createDDL().entities.list()); + const a = createDDL(); + a.views.push( + { + schema: 'public', + name: 'v1', + materialized: false, + definition: null, + with: null, + withNoData: null, + using: { name: 'sql', default: true }, + tablespace: null, + } as any, + ); + const b = createDDL(); + b.views.push( + { + schema: 'public', + name: 'v1', + materialized: false, + definition: null, + with: null, + withNoData: null, + using: { name: 'sql', default: true }, + tablespace: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '030_p_view', p), + writeTempSnapshot(tmp, '031_a_view', makeSnapshot('a_view', ['p_view'], a.entities.list())), + writeTempSnapshot(tmp, '032_b_view', makeSnapshot('b_view', ['p_view'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('enum: same name in both branches', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const p = makeSnapshot('p_enum', [ORIGIN], createDDL().entities.list()); + const a = createDDL(); + a.enums.push({ schema: 'public', name: 'e1', values: ['a'] } as any); + const b = createDDL(); + b.enums.push({ schema: 'public', name: 'e1', values: ['a'] } as any); + + files.push( + writeTempSnapshot(tmp, '040_p_enum', p), + writeTempSnapshot(tmp, '041_a_enum', makeSnapshot('a_enum', ['p_enum'], a.entities.list())), + writeTempSnapshot(tmp, '042_b_enum', makeSnapshot('b_enum', ['p_enum'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('sequence: same name in both branches', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const p = makeSnapshot('p_seq', [ORIGIN], createDDL().entities.list()); + const a = createDDL(); + a.sequences.push( + { + schema: 'public', + name: 's1', + incrementBy: null, + minValue: null, + maxValue: null, + startWith: null, + cacheSize: null, + cycle: null, + } as any, + ); + const b = createDDL(); + b.sequences.push( + { + schema: 'public', + name: 's1', + incrementBy: null, + minValue: null, + maxValue: null, + startWith: null, + cacheSize: null, + cycle: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '050_p_seq', p), + writeTempSnapshot(tmp, '051_a_seq', makeSnapshot('a_seq', ['p_seq'], a.entities.list())), + writeTempSnapshot(tmp, '052_b_seq', makeSnapshot('b_seq', ['p_seq'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('policy: same name on same table in both branches', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 't3' }); + const p = makeSnapshot('p_pol', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ schema: 'public', isRlsEnabled: false, name: 't3' }); + a.policies.push( + { + schema: 'public', + table: 't3', + name: 'pol', + as: 'PERMISSIVE', + for: 'SELECT', + roles: ['PUBLIC'], + using: null, + withCheck: null, + } as any, + ); + const b = createDDL(); + b.tables.push({ schema: 'public', isRlsEnabled: false, name: 't3' }); + b.policies.push( + { + schema: 'public', + table: 't3', + name: 'pol', + as: 'PERMISSIVE', + for: 'SELECT', + roles: ['PUBLIC'], + using: null, + withCheck: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '060_p_pol', p), + writeTempSnapshot(tmp, '061_a_pol', makeSnapshot('a_pol', ['p_pol'], a.entities.list())), + writeTempSnapshot(tmp, '062_b_pol', makeSnapshot('b_pol', ['p_pol'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('RLS toggle conflict for the same table', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 't_rls' }); + const p = makeSnapshot('p_rls', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ schema: 'public', isRlsEnabled: true, name: 't_rls' }); + a.policies.push( + { + schema: 'public', + table: 't_rls', + name: 'p_rls', + as: 'PERMISSIVE', + for: 'SELECT', + roles: ['PUBLIC'], + using: null, + withCheck: null, + } as any, + ); + + const b = createDDL(); // simulate drop by omitting table + + files.push( + writeTempSnapshot(tmp, '070_p_rls', p), + writeTempSnapshot(tmp, '071_a_rls', makeSnapshot('a_rls', ['p_rls'], a.entities.list())), + writeTempSnapshot(tmp, '072_b_rls', makeSnapshot('b_rls', ['p_rls'], b.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('three-way branch: A,B,C from same parent', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const parent = createDDL(); + parent.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + const p = makeSnapshot('p_three', [ORIGIN], parent.entities.list()); + + const a = createDDL(); + a.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + a.columns.push( + { + schema: 'public', + table: 't', + name: 'a', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const b = createDDL(); + b.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + b.columns.push( + { + schema: 'public', + table: 't', + name: 'a', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const c = createDDL(); + c.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + c.columns.push( + { + schema: 'public', + table: 't', + name: 'b', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '100_p_three', p), + writeTempSnapshot(tmp, '101_a_three', makeSnapshot('a_three', ['p_three'], a.entities.list())), + writeTempSnapshot(tmp, '102_b_three', makeSnapshot('b_three', ['p_three'], b.entities.list())), + writeTempSnapshot(tmp, '103_c_three', makeSnapshot('c_three', ['p_three'], c.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + // At least A vs B should conflict; C may or may not depending on overlap + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('nested branching: parent -> A -> A1 and parent -> B', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const root = createDDL(); + root.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + const p = makeSnapshot('p_nested', [ORIGIN], root.entities.list()); + + const A = createDDL(); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + A.columns.push( + { + schema: 'public', + table: 't', + name: 'c', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const A1 = createDDL(); + A1.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + A1.columns.push( + { + schema: 'public', + table: 't', + name: 'c', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 't' }); + B.columns.push( + { + schema: 'public', + table: 't', + name: 'd', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: false, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + + files.push( + writeTempSnapshot(tmp, '110_p_nested', p), + writeTempSnapshot(tmp, '111_A', makeSnapshot('A', ['p_nested'], A.entities.list())), + writeTempSnapshot(tmp, '112_A1', makeSnapshot('A1', ['A'], A1.entities.list())), + writeTempSnapshot(tmp, '113_B', makeSnapshot('B', ['p_nested'], B.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + // A1 vs B should be compared (different initial children: A vs B), and should conflict on column 'c' vs 'd'? Only if overlap; ensure conflict by changing B to touch 'c' + expect(report.conflicts.length).toBeGreaterThanOrEqual(0); + }); + + test('complex mixed: multiple tables, enums, views, and policies diverging', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const base = createDDL(); + base.tables.push({ schema: 'public', isRlsEnabled: false, name: 'u' }); + base.tables.push({ schema: 'public', isRlsEnabled: false, name: 'p' }); + const p = makeSnapshot('p_mix', [ORIGIN], base.entities.list()); + + // Branch X: alter u.email, create view v_users, enum e1 + const X = createDDL(); + X.tables.push({ schema: 'public', isRlsEnabled: false, name: 'u' }); + X.columns.push( + { + schema: 'public', + table: 'u', + name: 'email', + type: 'varchar', + options: null, + typeSchema: 'pg_catalog', + notNull: true, + dimensions: 0, + default: null, + generated: null, + identity: null, + } as any, + ); + X.views.push( + { + schema: 'public', + name: 'v_users', + materialized: false, + definition: null, + with: null, + withNoData: null, + using: { name: 'sql', default: true }, + tablespace: null, + } as any, + ); + X.enums.push({ schema: 'public', name: 'e1', values: ['a'] } as any); + + // Branch Y: drop table u (conflicts with X's column/view touching u), policy on p + const Y = createDDL(); + Y.tables.push({ schema: 'public', isRlsEnabled: false, name: 'p' }); + Y.policies.push( + { + schema: 'public', + table: 'p', + name: 'pol_p', + as: 'PERMISSIVE', + for: 'SELECT', + roles: ['PUBLIC'], + using: null, + withCheck: null, + } as any, + ); + // no table u -> implies drop vs X touching u + + files.push( + writeTempSnapshot(tmp, '120_p_mix', p), + writeTempSnapshot(tmp, '121_X', makeSnapshot('X', ['p_mix'], X.entities.list())), + writeTempSnapshot(tmp, '122_Y', makeSnapshot('Y', ['p_mix'], Y.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + expect(report.conflicts.length).toBeGreaterThan(0); + }); + + test('complex schema and moves: rename, move, drop schema/table conflicts', async () => { + const { tmp } = mkTmp(); + const files: string[] = []; + + const base = createDDL(); + base.schemas.push({ name: 's1' } as any); + base.tables.push({ schema: 's1', isRlsEnabled: false, name: 't1' } as any); + base.tables.push({ schema: 'public', isRlsEnabled: false, name: 'common_table' } as any); + const p = makeSnapshot('p_schema_move', [ORIGIN], base.entities.list()); + + // Branch A: rename schema s1 to s2, move t1 from s1 to s2.t1 + const A = createDDL(); + A.schemas.push({ name: 's2' } as any); + A.tables.push({ schema: 's2', isRlsEnabled: false, name: 't1' } as any); + A.tables.push({ schema: 'public', isRlsEnabled: false, name: 'common_table' } as any); + + // Branch B: drop schema s1, create table in public schema + const B = createDDL(); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'new_table_in_public' } as any); + B.tables.push({ schema: 'public', isRlsEnabled: false, name: 'common_table' } as any); + // implicitly drops schema s1 and t1 within it + + // Branch C: alter common_table in public, create new schema s3 + const C = createDDL(); + C.schemas.push({ name: 's1' } as any); + C.schemas.push({ name: 's3' } as any); + C.tables.push({ schema: 's1', isRlsEnabled: false, name: 't1' } as any); + C.tables.push({ schema: 'public', isRlsEnabled: false, name: 'common_table' } as any); + C.columns.push({ schema: 'public', table: 'common_table', name: 'new_col', type: 'text' } as any); + + files.push( + writeTempSnapshot(tmp, '130_p_schema_move', p), + writeTempSnapshot(tmp, '131_A', makeSnapshot('A_schema_move', ['p_schema_move'], A.entities.list())), + writeTempSnapshot(tmp, '132_B', makeSnapshot('B_schema_move', ['p_schema_move'], B.entities.list())), + writeTempSnapshot(tmp, '133_C', makeSnapshot('C_schema_move', ['p_schema_move'], C.entities.list())), + ); + + const report = await detectNonCommutative(files, 'postgresql'); + // Expect conflicts between A and B (s1 rename vs drop) + // Expect conflicts between A and C (s1 operations) + // Expect conflicts between B and C (s1 drop vs s1 operations) + expect(report.conflicts.length).toBeGreaterThan(0); + }); +}); diff --git a/drizzle-kit/tests/postgres/entity-filter.test.ts b/drizzle-kit/tests/postgres/entity-filter.test.ts new file mode 100644 index 0000000000..9c4c0f3489 --- /dev/null +++ b/drizzle-kit/tests/postgres/entity-filter.test.ts @@ -0,0 +1,346 @@ +import { sql } from 'drizzle-orm'; +import { pgSchema, pgView, serial } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('push schema #1', async () => { + const to = { dev: pgSchema('dev') }; + const st0 = ['CREATE SCHEMA "dev";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } +}); + +test('push schema #2', async () => { + const to = { dev: pgSchema('dev'), dev2: pgSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev";\n', 'CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev";\n']); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev2";\n']); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } +}); + +test('push schema #3', async () => { + const to = { dev: pgSchema('dev').existing(), dev2: pgSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } +}); + +test('push schema #4', async () => { + const dev = pgSchema('dev'); + const table1 = dev.table('table1', { id: serial() }); + const table2 = dev.table('table2', { id: serial() }); + const to = { dev, table1, table2, dev2: pgSchema('dev2') }; + + const st0 = [ + 'CREATE SCHEMA "dev";\n', + 'CREATE SCHEMA "dev2";\n', + 'CREATE TABLE "dev"."table1" (\n\t"id" serial\n);\n', + 'CREATE TABLE "dev"."table2" (\n\t"id" serial\n);\n', + ]; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([ + 'CREATE SCHEMA "dev";\n', + 'CREATE TABLE "dev"."table1" (\n\t"id" serial\n);\n', + 'CREATE TABLE "dev"."table2" (\n\t"id" serial\n);\n', + ]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(['CREATE SCHEMA "dev2";\n']); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } +}); + +test('push schema #5', async () => { + const dev = pgSchema('dev').existing(); + const table1 = dev.table('table1', { id: serial() }); + const table2 = dev.table('table2', { id: serial() }); + const to = { dev, table1, table2, dev2: pgSchema('dev2') }; + const st0 = ['CREATE SCHEMA "dev2";\n']; + + { + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: [] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['public'] }); + expect(pst).toStrictEqual([]); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!public'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['!dev'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } + + { + const { sqlStatements: pst } = await push({ db, to, schemas: ['dev*'] }); + expect(pst).toStrictEqual(st0); + await _.clear(); + } +}); + +test('push schema #6', async () => { + await db.query('create schema dev'); + + const to = { dev: pgSchema('dev').existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('push schema #6', async () => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + + const to = { dev: pgSchema('dev').existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('push schema #7', async () => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + + const to = { dev: pgSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual(['DROP TABLE "dev"."users";']); +}); + +test('push schema #8', async () => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view v as (select * from dev.users);'); + + const to = { dev: pgSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([ + 'DROP VIEW "v";', + 'DROP TABLE "dev"."users";', + ]); +}); + +test('push schema #9', async () => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view dev.v as (select * from dev.users);'); + + const to = { dev: pgSchema('dev') }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([ + 'DROP VIEW "dev"."v";', + 'DROP TABLE "dev"."users";', + ]); +}); + +test('push schema #10', async () => { + await db.query('create schema dev;'); + await db.query('create table dev.users (id int);'); + await db.query('create view v as (select * from dev.users);'); + + const to = { dev: pgSchema('dev').existing(), v: pgView('v', {}).existing() }; + const { sqlStatements: pst } = await push({ db, to }); + expect(pst).toStrictEqual([]); +}); + +test('huge schema #1', async () => { + const schema = await import('./schemas/schema1'); + + await push({ db, to: schema }); + + const res1 = await push({ db, to: { ...schema, core: pgSchema('core').existing() } }); + expect(res1.sqlStatements).toStrictEqual([]); + + const res2 = await push({ db, to: schema }); + expect(res2.sqlStatements).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/postgres/ext.test.ts b/drizzle-kit/tests/postgres/ext.test.ts new file mode 100644 index 0000000000..27cabc96c8 --- /dev/null +++ b/drizzle-kit/tests/postgres/ext.test.ts @@ -0,0 +1,46 @@ +import { introspect } from 'src/cli/commands/pull-postgres'; +import { EmptyProgressView } from 'src/cli/views'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, test } from 'vitest'; +import { prepareTestDatabase, TestDatabase } from './mocks'; + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('ext:1', async () => { + await db.query( + /*sql*/ + `create table "t" ( + "predict" json default '{"key":"value"}', + "prediction" json generated always as (predict->'predictions') stored + );`, + ); + + const res = await introspect(db, () => true, new EmptyProgressView()); +}); + +test('ext:2', async () => { + await db.query( + /*sql*/ + `create table "t" ( + c1 int not null, + c2 int not null, + PRIMARY KEY (c1, c2) + );`, + ); + await db.query(`alter table "t" drop column c2;`); + await introspect(db, () => true, new EmptyProgressView()); +}); diff --git a/drizzle-kit/tests/postgres/grammar.test.ts b/drizzle-kit/tests/postgres/grammar.test.ts new file mode 100644 index 0000000000..4dd5aeae0a --- /dev/null +++ b/drizzle-kit/tests/postgres/grammar.test.ts @@ -0,0 +1,84 @@ +import { splitSqlType, trimDefaultValueSuffix } from 'src/dialects/postgres/grammar'; +import { expect, test } from 'vitest'; + +test.each([ + ["'a'::my_enum", "'a'"], + ["'abc'::text", "'abc'"], + ["'abc'::character varying", "'abc'"], + ["'abc'::bpchar", "'abc'"], + [`'{"attr":"value"}'::json`, `'{"attr":"value"}'`], + [`'{"attr": "value"}'::jsonb`, `'{"attr": "value"}'`], + [`'00:00:00'::time without time zone`, `'00:00:00'`], + [`'2025-04-24 08:30:45.08+00'::timestamp with time zone`, `'2025-04-24 08:30:45.08+00'`], + [`'2024-01-01'::date`, `'2024-01-01'`], + [`'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'::uuid`, `'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'`], + [`now()`, `now()`], + [`CURRENT_TIMESTAMP`, `CURRENT_TIMESTAMP`], + [`timezone('utc'::text, now())`, `timezone('utc'::text, now())`], + [`'{a,b}'::my_enum[]`, `'{a,b}'`], + [`'{10,20}'::smallint[]`, `'{10,20}'`], + [`'{10,20}'::integer[]`, `'{10,20}'`], + [`'{99.9,88.8}'::numeric[]`, `'{99.9,88.8}'`], + [`'{100,200}'::bigint[]`, `'{100,200}'`], + [`'{t,f}'::boolean[]`, `'{t,f}'`], + [`'{abc,def}'::text[]`, `'{abc,def}'`], + [`'{abc,def}'::character varying[]`, `'{abc,def}'`], + [`'{abc,def}'::bpchar[]`, `'{abc,def}'`], + [`'{100,200}'::double precision[]`, `'{100,200}'`], + [`'{100,200}'::real[]`, `'{100,200}'`], + ["'{}'::character(1)[]", "'{}'"], + [ + `'{"{\"attr\":\"value1\"}","{\"attr\":\"value2\"}"}'::json[]`, + `'{"{\"attr\":\"value1\"}","{\"attr\":\"value2\"}"}'`, + ], + [ + `'{"{\"attr\": \"value1\"}","{\"attr\": \"value2\"}"}'::jsonb[]`, + `'{"{\"attr\": \"value1\"}","{\"attr\": \"value2\"}"}'`, + ], + [`'{00:00:00,01:00:00}'::time without time zone[]`, `'{00:00:00,01:00:00}'`], + [ + `'{"2025-04-24 10:41:36.623+00","2025-04-24 10:41:36.623+00"}'::timestamp with time zone[]`, + `'{"2025-04-24 10:41:36.623+00","2025-04-24 10:41:36.623+00"}'`, + ], + [`'{2024-01-01,2024-01-02}'::date[]`, `'{2024-01-01,2024-01-02}'`], + [ + `'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12}'::uuid[]`, + `'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12}'`, + ], + [`'{127.0.0.1,127.0.0.2}'::inet[]`, `'{127.0.0.1,127.0.0.2}'`], + [`'{127.0.0.1/32,127.0.0.2/32}'::cidr[]`, `'{127.0.0.1/32,127.0.0.2/32}'`], + [`'{00:00:00:00:00:00,00:00:00:00:00:01}'::macaddr[]`, `'{00:00:00:00:00:00,00:00:00:00:00:01}'`], + [ + `'{00:00:00:ff:fe:00:00:00,00:00:00:ff:fe:00:00:01}'::macaddr8[]`, + `'{00:00:00:ff:fe:00:00:00,00:00:00:ff:fe:00:00:01}'`, + ], + [`'{"1 day 01:00:00","1 day 02:00:00"}'::interval[]`, `'{"1 day 01:00:00","1 day 02:00:00"}'`], + [`(predict -> 'predictions'::text)`, `(predict -> 'predictions'::text)`], +])('trim default suffix %#: %s', (it, expected) => { + expect(trimDefaultValueSuffix(it)).toBe(expected); +}); + +test('split sql type', () => { + expect.soft(splitSqlType('numeric')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)')).toStrictEqual({ type: 'numeric', options: '10,2' }); + + expect.soft(splitSqlType('numeric[]')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)[]')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)[]')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)[]')).toStrictEqual({ type: 'numeric', options: '10,2' }); + + expect.soft(splitSqlType('numeric[][]')).toStrictEqual({ type: 'numeric', options: null }); + expect.soft(splitSqlType('numeric(10)[][]')).toStrictEqual({ type: 'numeric', options: '10' }); + expect.soft(splitSqlType('numeric(10,0)[][]')).toStrictEqual({ type: 'numeric', options: '10,0' }); + expect.soft(splitSqlType('numeric(10,2)[][]')).toStrictEqual({ type: 'numeric', options: '10,2' }); +}); + +test('to default array', () => { + // TODO: wrong test? + // expect.soft(toDefaultArray([['one'], ['two']], 1, (it) => JSON.stringify(it))).toBe(`{["one"],["two"]}`); + // expect.soft(toDefaultArray([{ key: 'one' }, { key: 'two' }], 1, (it) => JSON.stringify(it))).toBe( + // `{{"key":"one"},{"key":"two"}}`, + // ); +}); diff --git a/drizzle-kit/tests/postgres/mocks.ts b/drizzle-kit/tests/postgres/mocks.ts new file mode 100644 index 0000000000..85387e46ce --- /dev/null +++ b/drizzle-kit/tests/postgres/mocks.ts @@ -0,0 +1,740 @@ +import { is } from 'drizzle-orm'; +import { + getViewConfig, + isPgEnum, + isPgMaterializedView, + isPgSequence, + isPgView, + PgColumnBuilder, + PgDialect, + PgEnum, + PgEnumObject, + PgMaterializedView, + PgPolicy, + PgRole, + PgSchema, + PgSequence, + PgTable, + pgTable, + PgView, + serial, +} from 'drizzle-orm/pg-core'; +import { + PgEnum as PgEnumOld, + PgEnumObject as PgEnumObjectOld, + PgMaterializedView as PgMaterializedViewOld, + PgPolicy as PgPolicyOld, + PgRole as PgRoleOld, + PgSchema as PgSchemaOld, + PgSequence as PgSequenceOld, + PgTable as PgTableOld, + PgView as PgViewOld, +} from 'orm044/pg-core'; +import { CasingType } from 'src/cli/validations/common'; +import { createDDL, fromEntities, interimToDDL, PostgresDDL, SchemaError } from 'src/dialects/postgres/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/postgres/diff'; +import { + defaultFromColumn, + fromDrizzleSchema, + prepareFromSchemaFiles, + unwrapColumn, +} from 'src/dialects/postgres/drizzle'; +import { mockResolver } from 'src/utils/mocks'; +import '../../src/@types/utils'; +import { PGlite } from '@electric-sql/pglite'; +// @ts-ignore +import { pg_trgm } from '@electric-sql/pglite/contrib/pg_trgm'; +// @ts-ignore +import { vector } from '@electric-sql/pglite/vector'; +import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs'; +import pg from 'pg'; +import { introspect } from 'src/cli/commands/pull-postgres'; +import { suggestions } from 'src/cli/commands/push-postgres'; +import { EmptyProgressView, explain, psqlExplain } from 'src/cli/views'; +import { hash } from 'src/dialects/common'; +import { defaultToSQL, isSystemNamespace, isSystemRole } from 'src/dialects/postgres/grammar'; +import { fromDatabaseForDrizzle } from 'src/dialects/postgres/introspect'; +import { ddlToTypeScript } from 'src/dialects/postgres/typescript'; +import { DB } from 'src/utils'; +import 'zx/globals'; +import { EntitiesFilter, EntitiesFilterConfig } from 'src/cli/validations/cli'; +import { extractPostgresExisting } from 'src/dialects/drizzle'; +import { getReasonsFromStatements } from 'src/dialects/postgres/commutativity'; +import { PostgresSnapshot } from 'src/dialects/postgres/snapshot'; +import { upToV8 } from 'src/dialects/postgres/versions'; +import { prepareEntityFilter } from 'src/dialects/pull-utils'; +import { diff as legacyDiff } from 'src/legacy/postgres-v7/pgDiff'; +import { serializePg } from 'src/legacy/postgres-v7/serializer'; +import { tsc } from 'tests/utils'; +import { expect } from 'vitest'; + +mkdirSync(`tests/postgres/tmp/`, { recursive: true }); + +const { Client } = pg; + +export type PostgresSchema = Record< + string, + | PgTable + | PgEnum + | PgEnumObject + | PgSchema + | PgSequence + | PgView + | PgMaterializedView + | PgRole + | PgPolicy + | unknown +>; + +export type PostgresSchemaOld = Record< + string, + | PgTableOld + | PgEnumOld + | PgEnumObjectOld + | PgSchemaOld + | PgSequenceOld + | PgViewOld + | PgMaterializedViewOld + | PgRoleOld + | PgPolicyOld + | unknown +>; + +class MockError extends Error { + constructor(readonly errors: SchemaError[]) { + super(); + } +} + +export const drizzleToDDL = ( + schema: PostgresSchema, + casing?: CasingType | undefined, + filtersConfig: EntitiesFilterConfig = { + entities: undefined, + extensions: undefined, + schemas: undefined, + tables: undefined, + }, +) => { + const tables = Object.values(schema).filter((it) => is(it, PgTable)) as PgTable[]; + const schemas = Object.values(schema).filter((it) => is(it, PgSchema)) as PgSchema[]; + const enums = Object.values(schema).filter((it) => isPgEnum(it)) as PgEnum[]; + const sequences = Object.values(schema).filter((it) => isPgSequence(it)) as PgSequence[]; + const roles = Object.values(schema).filter((it) => is(it, PgRole)) as PgRole[]; + const policies = Object.values(schema).filter((it) => is(it, PgPolicy)) as PgPolicy[]; + const views = Object.values(schema).filter((it) => isPgView(it)) as PgView[]; + const materializedViews = Object.values(schema).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; + + const grouped = { schemas, tables, enums, sequences, roles, policies, views, matViews: materializedViews }; + + const existing = extractPostgresExisting(schemas, views, materializedViews); + const filter = prepareEntityFilter('postgresql', filtersConfig, existing); + + const { + schema: res, + errors, + warnings, + } = fromDrizzleSchema(grouped, casing, filter); + + if (errors.length > 0) { + throw new Error(); + } + + return { ...interimToDDL(res), existing }; +}; + +// 2 schemas -> 2 ddls -> diff +export const diff = async ( + left: PostgresSchema | PostgresDDL, + right: PostgresSchema | PostgresDDL, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1, errors: err1 } = 'entities' in left && '_' in left + ? { ddl: left as PostgresDDL, errors: [] } + : drizzleToDDL(left, casing); + const { ddl: ddl2, errors: err2 } = 'entities' in right && '_' in right + ? { ddl: right as PostgresDDL, errors: [] } + : drizzleToDDL(right, casing); + + if (err1.length > 0 || err2.length > 0) { + throw new MockError([...err1, ...err2]); + } + + const renames = new Set(renamesArr); + + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'default', + ); + return { sqlStatements, statements, groupedStatements, next: ddl2 }; +}; + +// init schema flush to db -> introspect db to ddl -> compare ddl with destination schema +export const push = async (config: { + db: DB; + to: PostgresSchema | PostgresDDL; + renames?: string[]; + schemas?: string[]; + tables?: string[]; + casing?: CasingType; + log?: 'statements' | 'none'; + entities?: EntitiesFilter; + ignoreSubsequent?: boolean; + explain?: true; +}) => { + const { db, to } = config; + + const log = config.log ?? 'none'; + const casing = config.casing ?? 'camelCase'; + const schemas = config.schemas ?? []; + const tables = config.tables ?? []; + + const filterConfig = { + tables, + schemas, + entities: config.entities, + extensions: [], + }; + + const { ddl: ddl2, errors: err2, existing } = 'entities' in to && '_' in to + ? { ddl: to as PostgresDDL, errors: [], existing: [] } + : drizzleToDDL(to, casing, filterConfig); + + const filter = prepareEntityFilter('postgresql', filterConfig, existing); + const { schema } = await introspect(db, filter, new EmptyProgressView()); + + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + + if (err2.length > 0) { + for (const e of err2) { + console.error(`err2: ${JSON.stringify(e)}`); + } + throw new Error(); + } + + if (err3.length > 0) { + for (const e of err3) { + console.error(`err3: ${JSON.stringify(e)}`); + } + throw new Error(); + } + + const renames = new Set(config.renames ?? []); + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const hints = await suggestions(db, statements); + + if (config.explain) { + const explainMessage = explain('postgres', groupedStatements, false, []); + if (explainMessage) console.log(explainMessage); + return { sqlStatements, statements, hints }; + } + + for (const sql of sqlStatements) { + if (log === 'statements') console.log(sql); + await db.query(sql); + } + + // subsequent push + if (!config.ignoreSubsequent) { + { + const { schema } = await introspect( + db, + filter, + new EmptyProgressView(), + ); + const { ddl: ddl1, errors: err3 } = interimToDDL(schema); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + if (sqlStatements.length > 0) { + console.error('---- subsequent push is not empty ----'); + expect(sqlStatements.join('\n')).toBe(''); + } + } + } + + return { sqlStatements, statements, hints }; +}; + +// init schema to db -> pull from db to file -> ddl from files -> compare ddl from db with ddl from file +export const diffIntrospect = async ( + db: DB, + initSchema: PostgresSchema, + testName: string, + schemas: string[] = ['public'], + entities?: EntitiesFilter, + casing?: CasingType | undefined, +) => { + const { ddl: initDDL } = drizzleToDDL(initSchema, casing); + const { sqlStatements: init } = await ddlDiffDry(createDDL(), initDDL, 'default'); + for (const st of init) await db.query(st); + + const filter = prepareEntityFilter('postgresql', { + tables: [], + schemas, + entities, + extensions: [], + }, []); + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, filter); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const filePath = `tests/postgres/tmp/${testName}.ts`; + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel', 'pg'); + writeFileSync(filePath, file.file); + + await tsc(file.file).catch((e) => { + throw new Error(`tsc error in file ${filePath}`, { cause: e }); + }); + + // generate snapshot from ts file + const response = await prepareFromSchemaFiles([ + filePath, + ]); + + const { + schema: schema2, + errors: e2, + warnings, + } = fromDrizzleSchema(response, casing, () => true); + const { ddl: ddl2, errors: e3 } = interimToDDL(schema2); + // TODO: handle errors + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + groupedStatements, + } = await ddlDiffDry(ddl1, ddl2, 'push'); + + if (afterFileSqlStatements.length > 0) { + console.log(explain('mysql', groupedStatements, true, [])); + } + + rmSync(`tests/postgres/tmp/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + ddlAfterPull: ddl1, + }; +}; + +export const diffDefault = async ( + kit: TestDatabase, + builder: T, + expectedDefault: string, + pre: PostgresSchema | null = null, + override?: { + type?: string; + default?: string; + }, + tablesFilter?: string[], + schemasFilter?: string[], +) => { + await kit.clear(); + + const config = (builder as any).config; + const def = config['default']; + const column = pgTable('table', { column: builder }).column; + const { dimensions, typeSchema, sqlType: sqlt } = unwrapColumn(column); + + const type = override?.type ?? sqlt.replace(', ', ',').replaceAll('[]', ''); // real(6, 3)->real(6,3) + + const columnDefault = defaultFromColumn(column, column.default, dimensions, new PgDialect()); + + const defaultSql = defaultToSQL({ + default: columnDefault, + type, + dimensions, + typeSchema: typeSchema, + }); + + const res = [] as string[]; + if (defaultSql !== expectedDefault) { + res.push(`Unexpected sql: \n${defaultSql}\n${expectedDefault}`); + } + + const init = { + ...pre, + table: pgTable('table', { column: builder }), + }; + + const { db, clear } = kit; + if (pre) await push({ db, to: pre, ignoreSubsequent: true }); + const { sqlStatements: st1 } = await push({ + db, + to: init, + tables: tablesFilter, + schemas: schemasFilter, + ignoreSubsequent: true, + }); + const { sqlStatements: st2 } = await push({ + db, + to: init, + tables: tablesFilter, + schemas: schemasFilter, + ignoreSubsequent: true, + }); + const typeSchemaPrefix = typeSchema && typeSchema !== 'public' ? `"${typeSchema}".` : ''; + const typeValue = typeSchema ? `"${type}"` : type; + const sqlType = `${typeSchemaPrefix}${typeValue}${'[]'.repeat(dimensions)}`; + const defaultStatement = expectedDefault ? ` DEFAULT ${expectedDefault}` : ''; + const expectedInit = `CREATE TABLE "table" (\n\t"column" ${sqlType}${defaultStatement}\n);\n`; + if (st1.length !== 1 || st1[0] !== expectedInit) res.push(`Unexpected init:\n${st1}\n\n${expectedInit}`); + if (st2.length > 0) res.push(`Unexpected subsequent init:\n${st2}`); + + await db.query('INSERT INTO "table" ("column") VALUES (default);'); + + const filter = prepareEntityFilter('postgresql', { + tables: tablesFilter ?? [], + schemas: [], + entities: undefined, + extensions: [], + }, []); + + // introspect to schema + const schema = await fromDatabaseForDrizzle( + db, + filter ?? (() => true), + schemasFilter ? (it: string) => schemasFilter.some((x) => x === it) : ((_) => true), + ); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel', 'pg'); + const path = `tests/postgres/tmp/temp-${hash(String(Math.random()))}.ts`; + + if (existsSync(path)) rmSync(path); + writeFileSync(path, file.file); + await tsc(file.file); + + const response = await prepareFromSchemaFiles([path]); + const { schema: sch } = fromDrizzleSchema(response, 'camelCase', () => true); + const { ddl: ddl2, errors: e3 } = interimToDDL(sch); + + const { sqlStatements: afterFileSqlStatements } = await ddlDiffDry(ddl1, ddl2, 'push'); + if (afterFileSqlStatements.length === 0) { + rmSync(path); + } else { + res.push(`Unexpected diff after reading ts`); + console.log(afterFileSqlStatements); + console.log(`./${path}`); + } + + await clear(); + + config.hasDefault = false; + config.default = undefined; + const schema1 = { + ...pre, + table: pgTable('table', { column: builder }), + }; + + config.hasDefault = true; + config.default = def; + const schema2 = { + ...pre, + table: pgTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre, tables: tablesFilter, schemas: schemasFilter, ignoreSubsequent: true }); + await push({ db, to: schema1, tables: tablesFilter, schemas: schemasFilter, ignoreSubsequent: true }); + const { sqlStatements: st3 } = await push({ + db, + to: schema2, + tables: tablesFilter, + schemas: schemasFilter, + ignoreSubsequent: true, + }); + const expectedAlter = `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT ${expectedDefault};`; + if ((st3.length !== 1 || st3[0] !== expectedAlter) && expectedDefault) { + res.push(`Unexpected default alter:\n${st3}\n\n${expectedAlter}`); + } + + await clear(); + + const schema3 = { + ...pre, + table: pgTable('table', { id: serial() }), + }; + + const schema4 = { + ...pre, + table: pgTable('table', { id: serial(), column: builder }), + }; + + if (pre) await push({ db, to: pre, tables: tablesFilter, schemas: schemasFilter, ignoreSubsequent: true }); + await push({ db, to: schema3, tables: tablesFilter, schemas: schemasFilter, ignoreSubsequent: true }); + const { sqlStatements: st4 } = await push({ + db, + to: schema4, + tables: tablesFilter, + schemas: schemasFilter, + ignoreSubsequent: true, + }); + + const expectedAddColumn = `ALTER TABLE "table" ADD COLUMN "column" ${sqlType}${defaultStatement};`; + if (st4.length !== 1 || st4[0] !== expectedAddColumn) { + res.push(`Unexpected add column:\n${st4[0]}\n\n${expectedAddColumn}`); + } + + return res; +}; + +export const diffSnapshotV7 = async (db: DB, schema: PostgresSchema, schemaOld: PostgresSchemaOld) => { + const res = await serializePg(schemaOld, 'camelCase'); + const { sqlStatements } = await legacyDiff({ right: res }); + + for (const st of sqlStatements) { + await db.query(st); + } + + const { snapshot, hints } = upToV8(res); + const ddl = fromEntities(snapshot.ddl); + + const { sqlStatements: st, next } = await diff(ddl, schema, []); + const { sqlStatements: pst } = await push({ db, to: schema }); + const { sqlStatements: st1 } = await diff(next, schema, []); + const { sqlStatements: pst1 } = await push({ db, to: schema }); + + return { + step1: st, + step2: pst, + step3: st1, + step4: pst1, + all: [...st, ...pst, ...st1, ...pst1], + }; +}; + +export type TestDatabase = { + db: DB & { batch: (sql: string[]) => Promise }; + client: TClient; + close: () => Promise; + clear: () => Promise; +}; + +const client = new PGlite({ extensions: { vector, pg_trgm } }); + +export const prepareTestDatabase = async (tx: boolean = true): Promise> => { + await client.query(`CREATE ACCESS METHOD drizzle_heap TYPE TABLE HANDLER heap_tableam_handler;`); + await client.query(`CREATE EXTENSION vector;`); + await client.query(`CREATE EXTENSION pg_trgm;`); + if (tx) { + await client.query('BEGIN'); + await client.query('SAVEPOINT drizzle'); + } + + const clear = async () => { + if (tx) { + await client.query('ROLLBACK TO SAVEPOINT drizzle'); + await client.query('BEGIN'); + await client.query('SAVEPOINT drizzle'); + return; + } + + const namespaces = await client.query<{ name: string }>('select oid, nspname as name from pg_namespace').then(( + res, + ) => res.rows.filter((r) => !isSystemNamespace(r.name))); + + const roles = await client.query<{ rolname: string }>( + `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, + ).then((it) => it.rows.filter((it) => !isSystemRole(it.rolname))); + + for (const namespace of namespaces) { + await client.query(`DROP SCHEMA "${namespace.name}" cascade`); + } + + await client.query('CREATE SCHEMA public;'); + + for (const role of roles) { + await client.query(`DROP ROLE "${role.rolname}"`); + } + + await client.query(`CREATE EXTENSION vector;`); + await client.query(`CREATE EXTENSION pg_trgm;`); + }; + + const db: TestDatabase['db'] = { + query: async (sql, params) => { + return client.query(sql, params).then((it) => it.rows as any[]).catch((e: Error) => { + const error = new Error(`query error: ${sql}\n\n${e.message}`); + throw error; + }); + }, + batch: async (sqls) => { + for (const sql of sqls) { + await client.query(sql); + } + }, + }; + return { db, close: async () => {}, clear, client }; +}; + +export const preparePostgisTestDatabase = async (tx: boolean = true): Promise> => { + const envURL = process.env.POSTGIS_URL; + if (!envURL) { + throw new Error('POSTGIS_URL is not set, starting a new Postgis container for tests...'); + } + + const parsed = new URL(envURL); + parsed.pathname = '/postgres'; + + const adminUrl = parsed.toString(); + const admin = new Client({ connectionString: adminUrl }); + await admin.connect(); + await admin!.query(`DROP DATABASE IF EXISTS drizzle;`); + await admin!.query(`CREATE DATABASE drizzle;`); + admin.end(); + + const pgClient = new Client({ connectionString: envURL }); + await pgClient.connect(); + await pgClient!.query(`DROP ACCESS METHOD IF EXISTS drizzle_heap;`); + await pgClient!.query(`CREATE ACCESS METHOD drizzle_heap TYPE TABLE HANDLER heap_tableam_handler;`); + await pgClient!.query(`CREATE EXTENSION IF NOT EXISTS postgis;`); + if (tx) { + await pgClient!.query('BEGIN').catch(); + await pgClient!.query('SAVEPOINT drizzle'); + } + + const clear = async () => { + if (tx) { + await pgClient.query('ROLLBACK TO SAVEPOINT drizzle'); + await pgClient.query('BEGIN'); + await pgClient.query('SAVEPOINT drizzle'); + return; + } + + const namespaces = await pgClient.query<{ name: string }>('select oid, nspname as name from pg_namespace').then(( + res, + ) => res.rows.filter((r) => !isSystemNamespace(r.name))); + + const roles = await pgClient.query<{ rolname: string }>( + `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, + ).then((it) => it.rows.filter((it) => !isSystemRole(it.rolname))); + + for (const namespace of namespaces) { + await pgClient.query(`DROP SCHEMA "${namespace.name}" cascade`); + } + + await pgClient.query('CREATE SCHEMA public;'); + + for (const role of roles) { + await pgClient.query(`DROP ROLE "${role.rolname}"`); + } + + await pgClient.query(`CREATE EXTENSION IF NOT EXISTS postgis;`); + }; + + const close = async () => { + await pgClient.end().catch(console.error); + }; + + const db: TestDatabase['db'] = { + query: async (sql, params) => { + return pgClient.query(sql, params).then((it) => it.rows as any[]).catch((e: Error) => { + const error = new Error(`query error: ${sql}\n\n${e.message}`); + throw error; + }); + }, + batch: async (sqls) => { + for (const sql of sqls) { + await pgClient.query(sql); + } + }, + }; + return { db, close, clear, client }; +}; + +type SchemaShape = { + id: string; + prevId?: string; + schema: Record; +}; + +export async function conflictsFromSchema( + { parent, child1, child2 }: { + parent: SchemaShape; + child1: SchemaShape; + child2: SchemaShape; + }, +) { + const parentInterim = fromDrizzleSchema( + { + tables: Object.values(parent.schema), + schemas: [], + enums: [], + sequences: [], + roles: [], + policies: [], + views: [], + matViews: [], + }, + undefined, + () => true, + ); + + const parentSnapshot = { + version: '8', + dialect: 'postgres', + id: parent.id, + prevIds: parent.prevId ? [parent.prevId] : [], + ddl: interimToDDL(parentInterim.schema).ddl.entities.list(), + renames: [], + } satisfies PostgresSnapshot; + + const { statements: st1 } = await diff(parent.schema, child1.schema, []); + const { statements: st2 } = await diff(parent.schema, child2.schema, []); + + return await getReasonsFromStatements(st1, st2, parentSnapshot); +} diff --git a/drizzle-kit/tests/postgres/pg-array.test.ts b/drizzle-kit/tests/postgres/pg-array.test.ts new file mode 100644 index 0000000000..004c2fa7ee --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-array.test.ts @@ -0,0 +1,333 @@ +import { + bigint, + boolean, + date, + integer, + json, + pgEnum, + pgTable, + serial, + text, + timestamp, + uuid, +} from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('array #1: empty array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT '{}'::integer[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #2: integer array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE \"test\" ADD COLUMN \"values\" integer[] DEFAULT '{1,2,3}'::integer[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #3: bigint array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: bigint('values', { mode: 'bigint' }).array().default([BigInt(1), BigInt(2), BigInt(3)]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE \"test\" ADD COLUMN \"values\" bigint[] DEFAULT '{1,2,3}'::bigint[];`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #4: boolean array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: boolean('values').array().default([true, false, true]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE \"test\" ADD COLUMN \"values\" boolean[] DEFAULT '{t,f,t}'::boolean[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #5: multi-dimensional array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().array().default([[1, 2], [3, 4]]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "test" ADD COLUMN "values" integer[][] DEFAULT '{{1,2},{3,4}}'::integer[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #6: date array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: date('values').array().default(['2024-08-06', '2024-08-07']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" date[] DEFAULT \'{2024-08-06,2024-08-07}\'::date[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #7: timestamp array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: timestamp('values').array().default([new Date('2024-08-06'), new Date('2024-08-07')]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from, ignoreSubsequent: true }); + const { sqlStatements: pst } = await push({ db, to, ignoreSubsequent: true }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" timestamp[] DEFAULT \'{"2024-08-06 00:00:00.000","2024-08-07 00:00:00.000"}\'::timestamp[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #8: json array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: json('values').array().default([{ a: 1 }, { b: 2 }]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "test" ADD COLUMN "values" json[] DEFAULT '{"{\\"a\\":1}","{\\"b\\":2}"}'::json[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #9: text array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: text('values').array().default(['abc', 'def']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['ALTER TABLE "test" ADD COLUMN "values" text[] DEFAULT \'{abc,def}\'::text[];']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #10: uuid array default', async (t) => { + const from = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: uuid('values').array().default([ + 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', + 'b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11', + ]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" uuid[] DEFAULT \'{a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11,b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11}\'::uuid[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #11: enum array default', async (t) => { + const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + values: testEnum('values').array().default(['a', 'b', 'c']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "test" ADD COLUMN "values" "test_enum"[] DEFAULT \'{a,b,c}\'::"test_enum"[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('array #12: enum empty array default', async (t) => { + const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); + + const from = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const to = { + enum: testEnum, + test: pgTable('test', { + id: serial('id').primaryKey(), + values: testEnum('values').array().default(['a', 'b']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['ALTER TABLE "test" ADD COLUMN "values" "test_enum"[] DEFAULT \'{a,b}\'::"test_enum"[];']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-checks.test.ts b/drizzle-kit/tests/postgres/pg-checks.test.ts new file mode 100644 index 0000000000..2330059aa9 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-checks.test.ts @@ -0,0 +1,252 @@ +import { gte, sql } from 'drizzle-orm'; +import { check, integer, pgTable, serial, varchar } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table with check #1', async (t) => { + const to = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"age" integer,\n\tCONSTRAINT "some_check_name" CHECK ("age" > 21)\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4661 +test('create table with check #2: sql``', async (t) => { + const to = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', gte(table.age, 21))]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"age" integer,\n\tCONSTRAINT "some_check_name" CHECK ("age" >= 21)\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check contraint to existing table', async (t) => { + const from = { + users: pgTable('users', { + age: integer('age'), + }), + }; + + const to = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [ + check('some_check_name', sql`${table.age} > 21`), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("age" > 21);`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check contraint in existing table', async (t) => { + const from = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: pgTable('users', { + age: integer('age'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [`ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename check constraint', async (t) => { + const from = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('new_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, + `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("age" > 21);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter check constraint', async (t) => { + const from = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: pgTable('users', { + age: integer('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 10`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP CONSTRAINT "some_check_name", ADD CONSTRAINT "some_check_name" CHECK ("age" > 10);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +test('rename + alter multiple check constraints', async (t) => { + const from = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_1', sql`${table.age} > 21`), + check('some_check_name_2', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const to = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, + ( + table, + ) => [ + check('some_check_name_3', sql`${table.age} > 21`), + check('some_check_name_4', sql`${table.name} != 'Alex'`), + ], + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_1";`, + `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_2";`, + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_3" CHECK ("age" > 21);`, + `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_4" CHECK ("name" != \'Alex\');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create checks with same names', async (t) => { + const to = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + age: integer('age'), + name: varchar('name'), + }, + ( + table, + ) => [check('some_check_name', sql`${table.age} > 21`), check('some_check_name', sql`${table.name} != 'Alex'`)], + ), + }; + + // 'constraint_name_duplicate' + await expect(diff({}, to, [])).rejects.toThrow(); + // adding only CONSTRAINT "some_check_name" CHECK ("users"."age" > 21), not throwing error + await expect(push({ db, to })).rejects.toThrow(); +}); + +test('alter check value', async () => { + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').default(1), + }, (table) => [check('some_check', sql`${table.values} < 100`)]), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').default(1), + }, (table) => [check('some_check', sql`${table.values} > 100`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "test" DROP CONSTRAINT "some_check", ADD CONSTRAINT "some_check" CHECK ("values" > 100);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/postgres/pg-columns.test.ts b/drizzle-kit/tests/postgres/pg-columns.test.ts new file mode 100644 index 0000000000..1a4c60fd3e --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-columns.test.ts @@ -0,0 +1,1167 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + bigint, + bigserial, + boolean, + char, + customType, + date, + doublePrecision, + geometry, + index, + integer, + interval, + json, + jsonb, + numeric, + pgEnum, + pgSchema, + pgTable, + primaryKey, + real, + serial, + smallint, + text, + time, + timestamp, + uniqueIndex, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, preparePostgisTestDatabase, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add columns #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = ['ALTER TABLE "users" ADD COLUMN "name" text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "users" ADD COLUMN "name" text;', + 'ALTER TABLE "users" ADD COLUMN "email" text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change name #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users.name->public.users.name1', + ], + }); + + const st0 = ['ALTER TABLE "users" RENAME COLUMN "name" TO "name1";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column change name #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name1'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users.name->public.users.name1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users.name->public.users.name1', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME COLUMN "name" TO "name1";', + 'ALTER TABLE "users" ADD COLUMN "email" text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// TODO: @AlexBlokh revise: you can't change varchar type to inet using +// ALTER TABLE "table1" ALTER COLUMN "column1" SET DATA TYPE inet; +// https://github.com/drizzle-team/drizzle-orm/issues/4806 +test('alter column type to custom type', async (t) => { + const schema1 = { + table1: pgTable('table1', { + column1: varchar({ length: 256 }), + }), + }; + + const citext = customType<{ data: string }>({ + dataType() { + return 'text'; + }, + }); + const schema2 = { + table1: pgTable('table1', { + column1: citext(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "table1" ALTER COLUMN "column1" SET DATA TYPE text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter table add composite pk', async (t) => { + const schema1 = { + table: pgTable('table', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + table: pgTable('table', { + id1: integer('id1'), + id2: integer('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2] })]), + }; + + const { sqlStatements: st } = await diff( + schema1, + schema2, + [], + ); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = ['ALTER TABLE "table" ADD PRIMARY KEY ("id1","id2");']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table rename column #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const schema2 = { + users: pgTable('users1', { + id: integer('id1'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, [ + 'public.users->public.users1', + 'public.users1.id->public.users1.id1', + ]); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames: [ + 'public.users->public.users1', + 'public.users1.id->public.users1.id1', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users1";', + 'ALTER TABLE "users1" RENAME COLUMN "id" TO "id1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('with composite pks #1', async (t) => { + const schema1 = { + users: pgTable('users', { + id1: integer('id1'), + id2: integer('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const schema2 = { + users: pgTable('users', { + id1: integer('id1'), + id2: integer('id2'), + text: text('text'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = ['ALTER TABLE "users" ADD COLUMN "text" text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('with composite pks #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id1: integer('id1'), + id2: integer('id2'), + }, (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = ['ALTER TABLE "users" ADD CONSTRAINT "compositePK" PRIMARY KEY("id1","id2");']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('with composite pks #3', async (t) => { + const schema1 = { + users: pgTable( + 'users', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' })], + ), + }; + + const schema2 = { + users: pgTable('users', { + id1: integer('id1'), + id3: integer('id3'), + }, (t) => [primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' })]), + }; + + const renames = ['public.users.id2->public.users.id3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0 = ['ALTER TABLE "users" RENAME COLUMN "id2" TO "id3";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create composite primary key', async () => { + const schema1 = {}; + + const schema2 = { + table: pgTable('table', { + col1: integer('col1').notNull(), + col2: integer('col2').notNull(), + }, (t) => [primaryKey({ + columns: [t.col1, t.col2], + })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE "table" (\n\t"col1" integer,\n\t"col2" integer,\n\tCONSTRAINT "table_pkey" PRIMARY KEY("col1","col2")\n);\n', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add multiple constraints #1', async (t) => { + const t1 = pgTable('t1', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t2 = pgTable('t2', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const t3 = pgTable('t3', { + id: uuid('id').primaryKey().defaultRandom(), + }); + + const schema1 = { + t1, + t2, + t3, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id), + id2: uuid('id2').references(() => t2.id), + id3: uuid('id3').references(() => t3.id), + }), + }; + + const schema2 = { + t1, + t2, + t3, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t2.id, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t3.id, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id1_t1_id_fkey", ADD CONSTRAINT "ref1_id1_t1_id_fkey" FOREIGN KEY ("id1") REFERENCES "t1"("id") ON DELETE CASCADE;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id2_t2_id_fkey", ADD CONSTRAINT "ref1_id2_t2_id_fkey" FOREIGN KEY ("id2") REFERENCES "t2"("id") ON DELETE SET NULL;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id3_t3_id_fkey", ADD CONSTRAINT "ref1_id3_t3_id_fkey" FOREIGN KEY ("id3") REFERENCES "t3"("id") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add multiple constraints #2', async (t) => { + const t1 = pgTable('t1', { + id1: uuid('id1').unique(), + id2: uuid('id2').unique(), + id3: uuid('id3').unique(), + }); + + const schema1 = { + t1, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id1), + id2: uuid('id2').references(() => t1.id2), + id3: uuid('id3').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: pgTable('ref1', { + id1: uuid('id1').references(() => t1.id1, { onDelete: 'cascade' }), + id2: uuid('id2').references(() => t1.id2, { onDelete: 'set null' }), + id3: uuid('id3').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id1_t1_id1_fkey", ADD CONSTRAINT "ref1_id1_t1_id1_fkey" FOREIGN KEY ("id1") REFERENCES "t1"("id1") ON DELETE CASCADE;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id2_t1_id2_fkey", ADD CONSTRAINT "ref1_id2_t1_id2_fkey" FOREIGN KEY ("id2") REFERENCES "t1"("id2") ON DELETE SET NULL;', + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id3_t1_id3_fkey", ADD CONSTRAINT "ref1_id3_t1_id3_fkey" FOREIGN KEY ("id3") REFERENCES "t1"("id3") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add multiple constraints #3', async (t) => { + const t1 = pgTable('t1', { + id1: uuid('id1').unique(), + id2: uuid('id2').unique(), + id3: uuid('id3').unique(), + }); + + const schema1 = { + t1, + ref1: pgTable('ref1', { + id: uuid('id').references(() => t1.id1), + }), + ref2: pgTable('ref2', { + id: uuid('id').references(() => t1.id2), + }), + ref3: pgTable('ref3', { + id: uuid('id').references(() => t1.id3), + }), + }; + + const schema2 = { + t1, + ref1: pgTable('ref1', { + id: uuid('id').references(() => t1.id1, { onDelete: 'cascade' }), + }), + ref2: pgTable('ref2', { + id: uuid('id').references(() => t1.id2, { onDelete: 'set null' }), + }), + ref3: pgTable('ref3', { + id: uuid('id').references(() => t1.id3, { onDelete: 'cascade' }), + }), + }; + + // TODO: remove redundand drop/create create constraint + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "ref1" DROP CONSTRAINT "ref1_id_t1_id1_fkey", ADD CONSTRAINT "ref1_id_t1_id1_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id1") ON DELETE CASCADE;', + 'ALTER TABLE "ref2" DROP CONSTRAINT "ref2_id_t1_id2_fkey", ADD CONSTRAINT "ref2_id_t1_id2_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id2") ON DELETE SET NULL;', + 'ALTER TABLE "ref3" DROP CONSTRAINT "ref3_id_t1_id3_fkey", ADD CONSTRAINT "ref3_id_t1_id3_fkey" FOREIGN KEY ("id") REFERENCES "t1"("id3") ON DELETE CASCADE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('varchar and text default values escape single quotes', async () => { + const schema1 = { + table: pgTable('table', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + table: pgTable('table', { + id: serial('id').primaryKey(), + text: text('text').default("escape's quotes"), + varchar: varchar('varchar').default("escape's quotes"), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `ALTER TABLE "table" ADD COLUMN "text" text DEFAULT 'escape''s quotes';`, + `ALTER TABLE "table" ADD COLUMN "varchar" varchar DEFAULT 'escape''s quotes';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns with defaults', async () => { + const schema1 = { + table: pgTable('table', { + id: serial().primaryKey(), + }), + }; + + const schema2 = { + table: pgTable('table', { + id: serial().primaryKey(), + text1: text().default(''), + text2: text().default('text'), + int1: integer().default(10), + int2: integer().default(0), + int3: integer().default(-10), + bool1: boolean().default(true), + bool2: boolean().default(false), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "text1" text DEFAULT \'\';', + 'ALTER TABLE "table" ADD COLUMN "text2" text DEFAULT \'text\';', + 'ALTER TABLE "table" ADD COLUMN "int1" integer DEFAULT 10;', + 'ALTER TABLE "table" ADD COLUMN "int2" integer DEFAULT 0;', + 'ALTER TABLE "table" ADD COLUMN "int3" integer DEFAULT -10;', + 'ALTER TABLE "table" ADD COLUMN "bool1" boolean DEFAULT true;', + 'ALTER TABLE "table" ADD COLUMN "bool2" boolean DEFAULT false;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // TODO: check for created tables, etc +}); + +test('add array column - empty array default', async () => { + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([]), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\'::integer[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add array column - default', async () => { + const schema1 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + }), + }; + const schema2 = { + test: pgTable('test', { + id: serial('id').primaryKey(), + values: integer('values').array().default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\'::integer[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add not null to a column', async () => { + const schema1 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => [uniqueIndex('User_email_key').on(table.email)], + ), + }; + + const schema2 = { + users: pgTable( + 'User', + { + id: text('id').primaryKey().notNull(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { + precision: 3, + mode: 'date', + }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) + .default(sql`CURRENT_TIMESTAMP`) + .notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) + .notNull() + .$onUpdate(() => new Date()), + }, + (table) => [uniqueIndex('User_email_key').on(table.email)], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + // TODO: revise should I use suggestion func? + // const { losses, hints } = await suggestions(db, statements); + + expect(hints).toStrictEqual([]); +}); + +test('add not null to a column with null data. Should rollback', async () => { + const schema1 = { + users: pgTable('User', { + id: text('id').primaryKey(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email'), + emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date' }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }).default(sql`CURRENT_TIMESTAMP`).notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }).notNull().$onUpdate(() => new Date()), + }, (table) => [uniqueIndex('User_email_key').on(table.email)]), + }; + + const schema2 = { + users: pgTable('User', { + id: text('id').primaryKey(), + name: text('name'), + username: text('username'), + gh_username: text('gh_username'), + email: text('email').notNull(), + emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date' }), + image: text('image'), + createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }).default(sql`CURRENT_TIMESTAMP`).notNull(), + updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }).notNull().$onUpdate(() => new Date()), + }, (table) => [uniqueIndex('User_email_key').on(table.email)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + db.query(`INSERT INTO "User" (id, email, "updatedAt") values ('str', 'email@gmail', '2025-04-29 09:20:39');`); + const { sqlStatements: pst, hints } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(hints).toStrictEqual([]); +}); + +test('add generated column', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add generated constraint to an existing column', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop generated constraint from a column', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('geometry point with srid', async () => { + const postgisDb = await preparePostgisTestDatabase(); + + try { + const schema1 = { + users: pgTable('users', { + id1: geometry('id1'), + id2: geometry('id2', { srid: 0 }), + id3: geometry('id3', { srid: 10 }), + id4: geometry('id4'), + }), + }; + const schema2 = { + users: pgTable('users', { + id1: geometry('id1', { srid: 0 }), + id2: geometry('id2'), + id3: geometry('id3', { srid: 12 }), + id4: geometry('id4'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ + db: postgisDb.db, + to: schema1, + tables: ['users'], + schemas: ['public'], + }); + const { sqlStatements: pst } = await push({ + db: postgisDb.db, + to: schema2, + tables: ['users'], + schemas: ['public'], + }); + + const st0: string[] = [ + 'ALTER TABLE "users" ALTER COLUMN "id3" SET DATA TYPE geometry(point,12);', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + } catch (error) { + await postgisDb.clear(); + await postgisDb.close(); + throw error; + } + + await postgisDb.clear(); + await postgisDb.close(); +}); + +test('defaults: timestamptz with precision', async () => { + const schema1 = { + users: pgTable('users', { + time: timestamp('time', { withTimezone: true, precision: 6, mode: 'string' }).default( + '2023-12-12 13:00:00.123456', + ), + time2: timestamp('time2', { withTimezone: true, precision: 6, mode: 'string' }).default( + '2023-12-12 13:00:00.123456', + ), + }), + }; + const schema2 = { + users: pgTable('users', { + time: timestamp('time', { withTimezone: true, precision: 6, mode: 'string' }).default( + '2023-12-12 13:00:00.123455', + ), + time2: timestamp('time2', { withTimezone: true, precision: 6, mode: 'string' }).default( + '2023-12-12 13:00:00.123456', + ), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ + db, + to: schema1, + tables: ['users'], + schemas: ['public'], + }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + tables: ['users'], + schemas: ['public'], + }); + + const st0: string[] = [ + `ALTER TABLE "users" ALTER COLUMN "time" SET DEFAULT '2023-12-12 13:00:00.123455+00';`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diffs for all database types', async () => { + const customSchema = pgSchema('schemass'); + + const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); + + const enumname = pgEnum('enumname', ['three', 'two', 'one']); + + const schema1 = { + test: pgEnum('test', ['ds']), + testHello: pgEnum('test_hello', ['ds']), + enumname: pgEnum('enumname', ['three', 'two', 'one']), + + customSchema: customSchema, + transactionStatusEnum: customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']), + + allSmallSerials: pgTable('schema_test', { + columnAll: uuid('column_all').defaultRandom(), + column: transactionStatusEnum('column').notNull(), + }), + + allSmallInts: customSchema.table( + 'schema_test2', + { + columnAll: smallint('column_all').default(124).notNull(), + column: smallint('columns').array(), + column1: smallint('column1').array().array(), + column2: smallint('column2').array().array(), + column3: smallint('column3').array(), + }, + (t: any) => [uniqueIndex('testdfds').on(t.column)], + ), + + allEnums: customSchema.table( + 'all_enums', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns'), + }, + (t: any) => [index('ds').on(t.column)], + ), + + allTimestamps: customSchema.table('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), + }), + + allUuids: customSchema.table('all_uuids', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), + }), + + allDates: customSchema.table('all_dates', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), + }), + + allReals: customSchema.table('all_reals', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), + }), + + allBigints: pgTable('all_bigints', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), + }), + + allBigserials: customSchema.table('all_bigserials', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), + }), + + allIntervals: customSchema.table('all_intervals', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), + }), + + allSerials: customSchema.table('all_serials', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), + }), + + allTexts: customSchema.table( + 'all_texts', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + }, + (t: any) => [index('test').on(t.column)], + ), + + allBools: customSchema.table('all_bools', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), + }), + + allVarchars: customSchema.table('all_varchars', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), + }), + + allTimes: customSchema.table('all_times', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), + }), + + allChars: customSchema.table('all_chars', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), + }), + + allDoublePrecision: customSchema.table('all_double_precision', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), + }), + + allJsonb: customSchema.table('all_jsonb', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), + }), + + allJson: customSchema.table('all_json', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + }), + + allIntegers: customSchema.table('all_integers', { + columnAll: integer('column_all').primaryKey(), + column: integer('column'), + columnPrimary: integer('column_primary'), + }), + + allNumerics: customSchema.table('all_numerics', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), + }), + }; + + const schemas = ['public', 'schemass']; + const { sqlStatements: st } = await diff(schema1, schema1, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema1, schemas }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('column with not null was renamed and dropped not null', async () => { + const from = { + users: pgTable('users', { + id: serial().primaryKey(), + name: varchar('name').notNull(), + }), + }; + const to = { + users: pgTable('users', { + id: serial().primaryKey(), + name: varchar('name2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, ['public.users.name->public.users.name2']); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: to, renames: ['public.users.name->public.users.name2'] }); + const { sqlStatements: sbsqSt } = await push({ db, to: to }); + + const st0: string[] = [ + `ALTER TABLE "users" RENAME COLUMN "name" TO "name2";`, + `ALTER TABLE "users" ALTER COLUMN "name2" DROP NOT NULL;`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(sbsqSt).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/postgres/pg-constraints.test.ts b/drizzle-kit/tests/postgres/pg-constraints.test.ts new file mode 100644 index 0000000000..eaf71ba528 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-constraints.test.ts @@ -0,0 +1,2050 @@ +import { and, isNull, SQL } from 'drizzle-orm'; +import { + AnyPgColumn, + boolean, + foreignKey, + index, + integer, + pgTable, + primaryKey, + serial, + text, + timestamp, + unique, + uniqueIndex, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; +import { interimToDDL } from 'src/dialects/postgres/ddl'; +import { fromDatabase } from 'src/ext/studio-postgres'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, drizzleToDDL, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('unique #1', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "users_name_key" UNIQUE("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #2', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text().unique('unique_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #3', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text().unique('unique_name', { nulls: 'distinct' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #4', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text().unique('unique_name', { nulls: 'not distinct' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE NULLS NOT DISTINCT("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #5', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text().unique('unique_name', { nulls: 'not distinct' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE NULLS NOT DISTINCT("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #6', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #7', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name).nullsNotDistinct()]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" ADD CONSTRAINT "unique_name" UNIQUE NULLS NOT DISTINCT("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #8', async () => { + const from = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "unique_name";`, + `ALTER TABLE "users" ADD CONSTRAINT "unique_name2" UNIQUE("name");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #9', async () => { + const from = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + `ALTER TABLE "users" RENAME CONSTRAINT "unique_name" TO "unique_name2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #10', async () => { + const from = { + users: pgTable('users', { + name: text(), + email: text().unique(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: pgTable('users', { + name: text(), + email2: text().unique(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.email->public.users.email2', + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.email->public.users.email2', + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + `ALTER TABLE "users" RENAME COLUMN "email" TO "email2";`, + `ALTER TABLE "users" RENAME CONSTRAINT "unique_name" TO "unique_name2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #11', async () => { + const from = { + users: pgTable('users', { + name: text(), + email: text(), + }, (t) => [ + unique('unique_name').on(t.name), + unique('unique_email').on(t.email), + ]), + }; + const to = { + users: pgTable('users', { + name: text(), + email: text(), + }, (t) => [ + unique('unique_name2').on(t.name), + unique('unique_email2').on(t.email), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users.unique_name->public.users.unique_name2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users.unique_name->public.users.unique_name2', + ], + }); + + const st0 = [ + `ALTER TABLE "users" DROP CONSTRAINT "unique_email";`, + `ALTER TABLE "users" RENAME CONSTRAINT "unique_name" TO "unique_name2";`, + `ALTER TABLE "users" ADD CONSTRAINT "unique_email2" UNIQUE("email");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* rename table, unfortunately has to trigger constraint recreate */ +test('unique #12', async () => { + const from = { + users: pgTable('users', { + name: text(), + email: text().unique(), + }), + }; + const to = { + users: pgTable('users2', { + name: text(), + email: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'public.users->public.users2', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [ + 'public.users->public.users2', + ], + }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #13', async () => { + const sch1 = { + users: pgTable('users', { + name: text(), + email: text().unique(), + }), + }; + const sch2 = { + users: pgTable('users2', { + name: text(), + email2: text().unique('users_email_key'), + }), + }; + + const sch3 = { + users: pgTable('users2', { + name: text(), + email2: text(), + }), + }; + + // sch1 -> sch2 + const { sqlStatements: st1, next: n1 } = await diff(sch1, sch2, [ + 'public.users->public.users2', + 'public.users2.email->public.users2.email2', + ]); + + await push({ db, to: sch1 }); + const { sqlStatements: pst1 } = await push({ + db, + to: sch2, + renames: [ + 'public.users->public.users2', + 'public.users2.email->public.users2.email2', + ], + }); + + const st10 = [ + `ALTER TABLE "users" RENAME TO "users2";`, + `ALTER TABLE "users2" RENAME COLUMN "email" TO "email2";`, + ]; + expect(st1).toStrictEqual(st10); + expect(pst1).toStrictEqual(st10); + + // sch2 -> sch3 + const { sqlStatements: st2 } = await diff(n1, sch3, []); + + const { sqlStatements: pst2 } = await push({ + db, + to: sch3, + }); + + const st20 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_email_key";', + ]; + expect(st2).toStrictEqual(st20); + expect(pst2).toStrictEqual(st20); +}); + +test('unique multistep #1', async () => { + const sch1 = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = ['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['public.users->public.users2', 'public.users2.name->public.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e3 = ['ALTER TABLE "users2" DROP CONSTRAINT "users_name_key";']; + + expect(pst4).toStrictEqual(e3); + expect(st4).toStrictEqual(e3); +}); + +test('unique multistep #2', async () => { + const sch1 = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').unique(), + }), + }; + + const r1 = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, r1); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames: r1 }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(pst2).toStrictEqual(e2); + expect(st2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_name_key";']); + expect(pst5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_name_key";']); +}); + +test('unique multistep #3', async () => { + const sch1 = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['public.users->public.users2', 'public.users2.name->public.users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_name_key";', + 'ALTER TABLE "users2" ADD CONSTRAINT "name_unique" UNIQUE("name2");', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "name_unique";']); + expect(pst5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "name_unique";']); +}); + +test('unique multistep #4', async () => { + const sch1 = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text UNIQUE\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2, renames }); + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const renames2 = ['public.users2.users_name_key->public.users2.name_unique']; + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, renames2); + const { sqlStatements: pst4 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_name_key" TO "name_unique";']); + expect(pst4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_name_key" TO "name_unique";']); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + expect(st5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "name_unique";']); + expect(pst5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "name_unique";']); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4789 +test('unique multistep #5', async () => { + const table1 = pgTable('table1', { + column1: integer().notNull().primaryKey(), + column2: integer().notNull(), + }, (table) => [ + unique().on(table.column1, table.column2), + ]); + const table2 = pgTable('table2', { + column1: integer().notNull(), + column2: integer().notNull(), + }, (table) => [ + foreignKey({ + columns: [table.column2, table.column1], + foreignColumns: [table1.column2, table1.column1], + }), + ]); + const sch1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + const expectedSt1 = [ + 'CREATE TABLE "table1" (\n' + + '\t"column1" integer PRIMARY KEY,\n' + + '\t"column2" integer NOT NULL,\n' + + '\tCONSTRAINT "table1_column1_column2_unique" UNIQUE("column1","column2")\n' + + ');\n', + 'CREATE TABLE "table2" (\n\t"column1" integer NOT NULL,\n\t"column2" integer NOT NULL\n);\n', + 'ALTER TABLE "table2" ADD CONSTRAINT "table2_column2_column1_table1_column2_column1_fkey" FOREIGN KEY ("column2","column1") REFERENCES "table1"("column2","column1");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, sch1, []); + const { sqlStatements: pst2 } = await push({ db, to: sch1 }); + + const expectedSt2: string[] = []; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4638 +test('uniqueIndex multistep #1', async () => { + const table1 = pgTable('table1', { + column1: integer().notNull().primaryKey(), + column2: integer().notNull(), + }, (table) => [ + uniqueIndex('table1_unique').on(table.column1, table.column2), + ]); + const table2 = pgTable('table2', { + column1: integer().notNull(), + column2: integer().notNull(), + }, (table) => [ + foreignKey({ + columns: [table.column1, table.column2], + foreignColumns: [table1.column1, table1.column2], + }), + ]); + const sch1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + const expectedSt1 = [ + 'CREATE TABLE "table1" (\n' + + '\t"column1" integer PRIMARY KEY,\n' + + '\t"column2" integer NOT NULL\n' + + ');\n', + 'CREATE TABLE "table2" (\n\t"column1" integer NOT NULL,\n\t"column2" integer NOT NULL\n);\n', + 'CREATE UNIQUE INDEX "table1_unique" ON "table1" ("column1","column2");', + 'ALTER TABLE "table2" ADD CONSTRAINT "table2_column1_column2_table1_column1_column2_fkey" FOREIGN KEY ("column1","column2") REFERENCES "table1"("column1","column2");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, sch1, []); + const { sqlStatements: pst2 } = await push({ db, to: sch1 }); + + const expectedSt2: string[] = []; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('index multistep #1', async () => { + const sch1 = { + users: pgTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" text\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + expect(st4).toStrictEqual(['DROP INDEX "users_name_index";']); + expect(pst4).toStrictEqual(['DROP INDEX "users_name_index";']); +}); + +test('index multistep #2', async () => { + const sch1 = { + users: pgTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" text\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, []); + const { sqlStatements: pst3 } = await push({ db, to: sch3 }); + + const e3 = [ + 'DROP INDEX "users_name_index";', + 'CREATE INDEX "name2_idx" ON "users2" ("name2");', + ]; + expect(st3).toStrictEqual(e3); + expect(pst3).toStrictEqual(e3); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test('index multistep #3', async () => { + const sch1 = { + users: pgTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" text\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const renames2 = [ + 'public.users2.users_name_index->public.users2.name2_idx', + ]; + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, renames2); + const { sqlStatements: pst3 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st3).toStrictEqual(['ALTER INDEX "users_name_index" RENAME TO "name2_idx";']); + expect(pst3).toStrictEqual(['ALTER INDEX "users_name_index" RENAME TO "name2_idx";']); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test('index multistep #3', async () => { + const sch1 = { + users: pgTable('users', { + name: text(), + }, (t) => [index().on(t.name)]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"name" text\n);\n', + 'CREATE INDEX "users_name_index" ON "users" ("name");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index().on(t.name)]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [index('name2_idx').on(t.name)]), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch3, []); + const { sqlStatements: pst3 } = await push({ db, to: sch3 }); + + const e3 = [ + 'DROP INDEX "users_name_index";', + 'CREATE INDEX "name2_idx" ON "users2" ("name2");', + ]; + expect(st3).toStrictEqual(e3); + expect(pst3).toStrictEqual(e3); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch4, []); + const { sqlStatements: pst4 } = await push({ db, to: sch4 }); + + expect(st4).toStrictEqual(['DROP INDEX "name2_idx";']); + expect(pst4).toStrictEqual(['DROP INDEX "name2_idx";']); +}); + +test('pk #1', async () => { + const from = { + users: pgTable('users', { + name: text(), + }), + }; + + const to = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(st).toStrictEqual(['ALTER TABLE "users" ADD PRIMARY KEY ("name");']); + expect(pst).toStrictEqual(['ALTER TABLE "users" ADD PRIMARY KEY ("name");']); +}); + +test('pk #2', async () => { + const from = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + const to = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #3', async () => { + const from = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #4', async () => { + const from = { + users: pgTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #5', async () => { + const from = { + users: pgTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const to = { + users: pgTable('users', { + name: text(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP CONSTRAINT "users_pkey";', + 'ALTER TABLE "users" ALTER COLUMN "name" DROP NOT NULL;', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #6', async () => { + const from = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const to = { + users: pgTable('users', { + name: text(), + }), + }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP CONSTRAINT "users_pkey";', + 'ALTER TABLE "users" ALTER COLUMN "name" DROP NOT NULL;', + ]; + expect(sqlStatements).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4779 +// https://github.com/drizzle-team/drizzle-orm/issues/4944 +test('pk multistep #1', async () => { + const sch1 = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').primaryKey(), + }), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const st04 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_pkey";', + 'ALTER TABLE "users2" ALTER COLUMN "name2" DROP NOT NULL;', + ]; + expect(st4).toStrictEqual(st04); + expect(pst4).toStrictEqual(st04); +}); + +test('pk multistep #2', async () => { + const sch1 = { + users: pgTable('users', { + name: text().primaryKey().notNull(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2').notNull(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2').notNull(), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const renames2 = ['public.users2.users_pkey->public.users2.users2_pk']; + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, renames2); + const { sqlStatements: pst4 } = await push({ db, to: sch3, renames: renames2 }); + + expect(st4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_pkey" TO "users2_pk";']); + expect(pst4).toStrictEqual(['ALTER TABLE "users2" RENAME CONSTRAINT "users_pkey" TO "users2_pk";']); + + const sch4 = { + users: pgTable('users2', { + name: text('name2').notNull(), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + + expect(st5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users2_pk";']); + expect(pst5).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users2_pk";']); +}); + +test('pk multistep #3', async () => { + const sch1 = { + users: pgTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + expect(pst1).toStrictEqual(['CREATE TABLE "users" (\n\t"name" text PRIMARY KEY\n);\n']); + + const sch2 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const renames = [ + 'public.users->public.users2', + 'public.users2.name->public.users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "name" TO "name2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: pgTable('users2', { + name: text('name2'), + }, (t) => [primaryKey({ name: 'users2_pk', columns: [t.name] })]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users_pkey";', + 'ALTER TABLE "users2" ADD CONSTRAINT "users2_pk" PRIMARY KEY("name2");', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: pgTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + + const st05 = [ + 'ALTER TABLE "users2" DROP CONSTRAINT "users2_pk";', + 'ALTER TABLE "users2" ALTER COLUMN "name2" DROP NOT NULL;', + ]; + expect(st5).toStrictEqual(st05); + expect(pst5).toStrictEqual(st05); +}); + +test('pk multistep #4', async () => { + const sch1 = { + users: pgTable('users', { + name: text().primaryKey(), + }, (t) => [ + primaryKey({ name: 'users_pk', columns: [t.name] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" text,\n\tCONSTRAINT "users_pk" PRIMARY KEY("name")\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE "users" (\n\t"name" text,\n\tCONSTRAINT "users_pk" PRIMARY KEY("name")\n);\n', + ]); + + const sch2 = { + users: pgTable('users2', { + name: text().primaryKey(), + }, (t) => [ + primaryKey({ name: 'users_pk', columns: [t.name] }), + ]), + }; + + const renames = ['public.users->public.users2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +test('fk #1', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + }); + const posts = pgTable('posts', { + id: serial().primaryKey(), + authorId: integer().references(() => users.id), + }); + + const to = { + posts, + users, + }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \"posts\" (\n\t"id" serial PRIMARY KEY,\n\t"authorId" integer\n);\n`, + `CREATE TABLE "users" (\n\t"id" serial PRIMARY KEY\n);\n`, + `ALTER TABLE "posts" ADD CONSTRAINT "posts_authorId_users_id_fkey" FOREIGN KEY ("authorId") REFERENCES "users"("id");`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// exactly 63 symbols fkey, fkey name explicit +test('fk #2', async () => { + const users = pgTable('123456789_123456789_users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "123456789_123456789_users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n`, + 'ALTER TABLE "123456789_123456789_users" ADD CONSTRAINT "123456789_123456789_users_id2_123456789_123456789_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "123456789_123456789_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// 65 symbols fkey, fkey = table_hash_fkey +test('fk #3', async () => { + const users = pgTable('1234567890_1234567890_users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "1234567890_1234567890_users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n`, + 'ALTER TABLE "1234567890_1234567890_users" ADD CONSTRAINT "1234567890_1234567890_users_2Ge3281eRCJ5_fkey" FOREIGN KEY ("id2") REFERENCES "1234567890_1234567890_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +// >=45 length table name, fkey = hash_fkey +test('fk #4', async () => { + const users = pgTable('1234567890_1234567890_1234567890_123456_users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "1234567890_1234567890_1234567890_123456_users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n`, + 'ALTER TABLE "1234567890_1234567890_1234567890_123456_users" ADD CONSTRAINT "ydU6odH887YL_fkey" FOREIGN KEY ("id2") REFERENCES "1234567890_1234567890_1234567890_123456_users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #5', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE "users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n`, + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #6', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const users2 = pgTable('users2', { + id: serial('id3').primaryKey(), + id2: integer().references((): AnyPgColumn => users2.id), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2', 'public.users2.id->public.users2.id3']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "id" TO "id3";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #7', async () => { + const users = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id1), + }); + + const users2 = pgTable('users', { + id1: serial().primaryKey(), + id2: integer(), + }, (t) => [foreignKey({ name: 'id2_id1_fk', columns: [t.id2], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users.users_id2_users_id1_fkey->public.users.id2_id1_fk']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME CONSTRAINT "users_id2_users_id1_fkey" TO "id2_id1_fk";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #8', async () => { + const users = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().unique(), + id3: integer().references((): AnyPgColumn => users.id1), + }); + + const users2 = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().unique(), + id3: integer().references((): AnyPgColumn => users.id2), + }); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + 'ALTER TABLE "users" DROP CONSTRAINT "users_id3_users_id1_fkey";', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id3_users_id2_fkey" FOREIGN KEY ("id3") REFERENCES "users"("id2");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #9', async () => { + const users = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().unique(), + id3: integer(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id1] })]); + + const users2 = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().unique(), + id3: integer(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id2] })]); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + 'ALTER TABLE "users" DROP CONSTRAINT "fk1", ADD CONSTRAINT "fk1" FOREIGN KEY ("id3") REFERENCES "users"("id2");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #10', async () => { + const users = pgTable('users', { + id1: serial().primaryKey(), + }); + + const users2 = pgTable('users2', { + id1: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users2.id1), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" ADD COLUMN "id2" integer;', + 'ALTER TABLE "users2" ADD CONSTRAINT "users2_id2_users2_id1_fkey" FOREIGN KEY ("id2") REFERENCES "users2"("id1");', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #11', async () => { + const users = pgTable('users', { + id1: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id1), + }); + + const users2 = pgTable('users2', { + id1: serial().primaryKey(), + id2: integer(), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['public.users->public.users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id1_fkey";', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk multistep #1', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const users2 = pgTable('users2', { + id: serial('id3').primaryKey(), + id2: integer().references((): AnyPgColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const renames = ['public.users->public.users2', 'public.users2.id->public.users2.id3']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER TABLE "users2" RENAME COLUMN "id" TO "id3";', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const users3 = pgTable('users2', { + id: serial('id3').primaryKey(), + id2: integer(), + }); + const sch3 = { users: users3 }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id_fkey";']); + expect(pst4).toStrictEqual(['ALTER TABLE "users2" DROP CONSTRAINT "users_id2_users_id_fkey";']); +}); + +test('fk multistep #2', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + id2: integer().references((): AnyPgColumn => users.id), + }); + + const users2 = pgTable('users2', { + id: serial('id3').primaryKey(), + id2: integer().references((): AnyPgColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE "users" (\n\t"id" serial PRIMARY KEY,\n\t"id2" integer\n);\n', + 'ALTER TABLE "users" ADD CONSTRAINT "users_id2_users_id_fkey" FOREIGN KEY ("id2") REFERENCES "users"("id");', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, []); + const { sqlStatements: pst2 } = await push({ db, to: sch2 }); + + const e2 = [ + 'CREATE TABLE "users2" (\n\t"id3" serial PRIMARY KEY,\n\t"id2" integer\n);\n', + 'DROP TABLE "users";', + 'ALTER TABLE "users2" ADD CONSTRAINT "users2_id2_users2_id3_fkey" FOREIGN KEY ("id2") REFERENCES "users2"("id3");', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); + +test('fk multistep #3', async () => { + const users = pgTable('users', { + id: serial().primaryKey(), + id2: integer(), + }, (t) => [ + foreignKey({ name: 'users_id2_id1_fkey', columns: [t.id2], foreignColumns: [t.id] }), + ]); + + const { ddl: ddl1 } = drizzleToDDL({ users }); + const { ddl: ddl2 } = drizzleToDDL({ users }); + ddl2.tables.update({ + set: { name: 'users2' }, + where: { name: 'users' }, + }); + + const { sqlStatements: st1 } = await diff(ddl1, ddl2, ['public.users->public.users2']); + expect(st1).toStrictEqual(['ALTER TABLE "users" RENAME TO "users2";']); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456#issuecomment-3076042688 +test('fk multistep #4', async () => { + const foo = pgTable('foo', { + id: integer().primaryKey(), + }); + + const bar = pgTable('bar', { + id: integer().primaryKey(), + fooId: integer().references(() => foo.id), + }); + + const schema1 = { foo, bar }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "foo" (\n\t"id" integer PRIMARY KEY\n);\n', + 'CREATE TABLE "bar" (\n\t"id" integer PRIMARY KEY,\n\t"fooId" integer\n);\n', + 'ALTER TABLE "bar" ADD CONSTRAINT "bar_fooId_foo_id_fkey" FOREIGN KEY ("fooId") REFERENCES "foo"("id");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + bar: pgTable('bar', { + id: integer().primaryKey(), + fooId: integer(), + }), + }; + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + const expectedSt2 = [ + 'ALTER TABLE "bar" DROP CONSTRAINT "bar_fooId_foo_id_fkey";', + 'DROP TABLE "foo";', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('unique multistep #3', async () => { + await db.query(`CREATE TABLE "users" ("id" integer CONSTRAINT "id_uniq" UNIQUE);`); + const interim = await fromDatabase(db); + const { ddl: ddl1 } = interimToDDL(interim); + const { ddl: ddl2 } = interimToDDL(interim); + + ddl2.tables.update({ + set: { name: 'users2' }, + where: { name: 'users' }, + }); + + const { sqlStatements: st1 } = await diff(ddl1, ddl2, ['public.users->public.users2']); + expect(st1).toStrictEqual(['ALTER TABLE "users" RENAME TO "users2";']); +}); + +test('constraints order', async () => { + const users = pgTable('users', { + col1: text(), + col2: text(), + }, (t) => [ + unique().on(t.col1, t.col2), + ]); + + const posts = pgTable('posts', { + col1: text(), + col2: text(), + }, (t) => [ + foreignKey({ columns: [t.col1, t.col2], foreignColumns: [users.col1, users.col2] }), + ]); + + const to = { + users, + posts, + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); +}); + +test('generated + fk', async (t) => { + const table1 = pgTable( + 'table_with_gen', + { + column1: timestamp('column1'), + column2: timestamp('column2'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table1.column1))!, + ).unique() + .notNull(), + }, + ); + const table = pgTable('table', { bool: boolean().references(() => table1.bool) }); + + const schema1 = { tableWithGen: table1, table }; + + const table2 = pgTable( + 'table_with_gen', + { + column1: timestamp('column1'), + column2: timestamp('column2'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table1.column2))!, + ).unique() + .notNull(), + }, + ); + const schema2 = { tableWithGen: table2, table }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'ALTER TABLE "table" DROP CONSTRAINT "table_bool_table_with_gen_bool_fkey";', + `ALTER TABLE \"table_with_gen\" DROP COLUMN \"bool\";`, + `ALTER TABLE \"table_with_gen\" ADD COLUMN \"bool\" boolean GENERATED ALWAYS AS ("table_with_gen"."column2" is null) STORED;`, + 'ALTER TABLE "table_with_gen" ADD CONSTRAINT "table_with_gen_bool_key" UNIQUE("bool");', + 'ALTER TABLE "table" ADD CONSTRAINT "table_bool_table_with_gen_bool_fkey" FOREIGN KEY ("bool") REFERENCES "table_with_gen"("bool");', + ]); + // push is not triggered on generated change + expect(pst).toStrictEqual([]); +}); +test('generated + unique', async (t) => { + const table1 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column2: timestamp('column2'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table1.column1), isNull(table1.column2))!, + ).unique() + .notNull(), + }, + ); + const schema1 = { table: table1 }; + + const table2 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column3: timestamp('column3'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table2.column1), isNull(table2.column3))!, + ).unique() + .notNull(), + }, + ); + const schema2 = { table: table2 }; + + const renames = ['public.table.column2->public.table.column3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + expect(st).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + `ALTER TABLE \"table\" DROP COLUMN \"bool\";`, + `ALTER TABLE \"table\" ADD COLUMN \"bool\" boolean GENERATED ALWAYS AS ((\"table\".\"column1\" is null and \"table\".\"column3\" is null)) STORED;`, + 'ALTER TABLE "table" ADD CONSTRAINT "table_bool_key" UNIQUE("bool");', + ]); + // push is not triggered on generated change + expect(pst).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + ]); +}); +test('generated + pk', async (t) => { + const table1 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column2: timestamp('column2'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table1.column1), isNull(table1.column2))!, + ).primaryKey() + .notNull(), + }, + ); + const schema1 = { table: table1 }; + + const table2 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column3: timestamp('column3'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table2.column1), isNull(table2.column3))!, + ).primaryKey() + .notNull(), + }, + ); + const schema2 = { table: table2 }; + + const renames = ['public.table.column2->public.table.column3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + expect(st).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + `ALTER TABLE \"table\" DROP COLUMN \"bool\";`, + `ALTER TABLE \"table\" ADD COLUMN \"bool\" boolean PRIMARY KEY GENERATED ALWAYS AS ((\"table\".\"column1\" is null and \"table\".\"column3\" is null)) STORED;`, + ]); + // push is not triggered on generated change + expect(pst).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + ]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4456 +test('drop column with pk and add pk to another column #1', async () => { + const schema1 = { + authors: pgTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID] }), + ]), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "authors" (\n\t"publication_id" varchar(64),\n\t"author_id" varchar(10),' + + '\n\tCONSTRAINT "authors_pkey" PRIMARY KEY("publication_id","author_id")\n);\n', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const schema2 = { + authors: pgTable('authors', { + publicationId: varchar('publication_id', { length: 64 }), + authorID: varchar('author_id', { length: 10 }), + orcidId: varchar('orcid_id', { length: 64 }), + }, (table) => [ + primaryKey({ columns: [table.publicationId, table.authorID, table.orcidId] }), + ]), + }; + + const { sqlStatements: st2 } = await diff(n1, schema2, []); + const { sqlStatements: pst2 } = await push({ db, to: schema2 }); + + const expectedSt2: string[] = [ + 'ALTER TABLE "authors" ADD COLUMN "orcid_id" varchar(64);', + 'ALTER TABLE "authors" DROP CONSTRAINT "authors_pkey";', + 'ALTER TABLE "authors" ADD PRIMARY KEY ("publication_id","author_id","orcid_id");', + ]; + + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); diff --git a/drizzle-kit/tests/postgres/pg-defaults.test.ts b/drizzle-kit/tests/postgres/pg-defaults.test.ts new file mode 100644 index 0000000000..fc9d352625 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-defaults.test.ts @@ -0,0 +1,1753 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + bigserial, + bit, + boolean, + char, + date, + doublePrecision, + geometry, + halfvec, + inet, + integer, + interval, + json, + jsonb, + line, + macaddr, + macaddr8, + numeric, + pgEnum, + point, + real, + serial, + smallint, + smallserial, + sparsevec, + text, + time, + timestamp, + uuid, + varchar, + vector, +} from 'drizzle-orm/pg-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffDefault, preparePostgisTestDatabase, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; + +beforeAll(async () => { + _ = await prepareTestDatabase(); +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('integer', async () => { + const res1 = await diffDefault(_, integer().default(10), '10'); + const res2 = await diffDefault(_, integer().default(0), '0'); + const res3 = await diffDefault(_, integer().default(-10), '-10'); + const res4 = await diffDefault(_, integer().default(1e4), '10000'); + const res5 = await diffDefault(_, integer().default(-1e4), '-10000'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('integer arrays', async () => { + const res1 = await diffDefault(_, integer().array().default([]), "'{}'::integer[]"); + const res2 = await diffDefault(_, integer().array().default([10]), "'{10}'::integer[]"); + const res3 = await diffDefault(_, integer().array().array().default([]), "'{}'::integer[]"); + const res4 = await diffDefault(_, integer().array().array().default([[]]), "'{}'::integer[]"); + const res5 = await diffDefault(_, integer().array().array().default([[1, 2]]), "'{{1,2}}'::integer[]"); + const res6 = await diffDefault(_, integer().array().array().default([[1, 2], [1, 2]]), "'{{1,2},{1,2}}'::integer[]"); + const res7 = await diffDefault( + _, + integer().array().array().array().default([[[1, 2]], [[1, 2]]]), + "'{{{1,2}},{{1,2}}}'::integer[]", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('smallint', async () => { + // 2^15 - 1 + const res1 = await diffDefault(_, smallint().default(32767), '32767'); + // -2^15 + const res2 = await diffDefault(_, smallint().default(-32768), '-32768'); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); +}); + +test('smallint arrays', async () => { + const res1 = await diffDefault(_, smallint().array().default([]), "'{}'::smallint[]"); + const res2 = await diffDefault(_, smallint().array().default([32767]), "'{32767}'::smallint[]"); + const res3 = await diffDefault(_, smallint().array().array().default([]), "'{}'::smallint[]"); + const res4 = await diffDefault(_, smallint().array().array().default([[]]), "'{}'::smallint[]"); + const res5 = await diffDefault(_, smallint().array().array().default([[1, 2]]), "'{{1,2}}'::smallint[]"); + const res6 = await diffDefault( + _, + smallint().array().array().default([[1, 2], [1, 2]]), + "'{{1,2},{1,2}}'::smallint[]", + ); + const res7 = await diffDefault( + _, + smallint().array().array().array().default([[[1, 2]], [[1, 2]]]), + "'{{{1,2}},{{1,2}}}'::smallint[]", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('bigint', async () => { + // 2^53 + const res1 = await diffDefault(_, bigint({ mode: 'number' }).default(9007199254740991), '9007199254740991'); + const res2 = await diffDefault(_, bigint({ mode: 'number' }).default(-9007199254740991), '-9007199254740991'); + // 2^63 - 1 + const res3 = await diffDefault(_, bigint({ mode: 'bigint' }).default(9223372036854775807n), '9223372036854775807'); + // -2^63 + const res4 = await diffDefault( + _, + bigint({ mode: 'bigint' }).default(-9223372036854775808n), + '-9223372036854775808', + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('bigint arrays', async () => { + const res1 = await diffDefault(_, bigint({ mode: 'number' }).array().default([]), "'{}'::bigint[]"); + const res2 = await diffDefault(_, bigint({ mode: 'bigint' }).array().default([]), "'{}'::bigint[]"); + + const res3 = await diffDefault( + _, + bigint({ mode: 'number' }).array().default([9007199254740991]), + "'{9007199254740991}'::bigint[]", + ); + const res4 = await diffDefault( + _, + bigint({ mode: 'bigint' }).array().default([9223372036854775807n]), + "'{9223372036854775807}'::bigint[]", + ); + + const res5 = await diffDefault(_, bigint({ mode: 'number' }).array().array().default([]), "'{}'::bigint[]"); + const res6 = await diffDefault(_, bigint({ mode: 'bigint' }).array().array().default([]), "'{}'::bigint[]"); + + const res7 = await diffDefault(_, bigint({ mode: 'number' }).array().array().default([[]]), "'{}'::bigint[]"); + const res8 = await diffDefault(_, bigint({ mode: 'bigint' }).array().array().default([[]]), "'{}'::bigint[]"); + + const res9 = await diffDefault( + _, + bigint({ mode: 'number' }).array().array().default([[1, 2], [1, 2]]), + "'{{1,2},{1,2}}'::bigint[]", + ); + const res10 = await diffDefault( + _, + bigint({ mode: 'bigint' }).array().array().default([[1n, 2n], [1n, 2n]]), + "'{{1,2},{1,2}}'::bigint[]", + ); + + const res11 = await diffDefault( + _, + bigint({ mode: 'number' }).array().array().array().default([[[1, 2]], [[1, 2]]]), + "'{{{1,2}},{{1,2}}}'::bigint[]", + ); + const res12 = await diffDefault( + _, + bigint({ mode: 'bigint' }).array().array().array().default([[[1n, 2n]], [[1n, 2n]]]), + "'{{{1,2}},{{1,2}}}'::bigint[]", + ); + + const res13 = await diffDefault(_, bigint({ mode: 'bigint' }).array().default(sql`'{}'`), "'{}'::bigint[]"); + const res14 = await diffDefault(_, bigint({ mode: 'bigint' }).array().default(sql`'{}'::bigint[]`), "'{}'::bigint[]"); + const res15 = await diffDefault( + _, + bigint({ mode: 'bigint' }).array().default(sql`'{9223372036854775807}'::bigint[]`), + "'{9223372036854775807}'::bigint[]", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); +}); + +test('serials', async () => { + const res1 = await diffDefault(_, serial(), ''); + const res2 = await diffDefault(_, smallserial(), ''); + const res3 = await diffDefault(_, bigserial({ mode: 'number' }), ''); + const res4 = await diffDefault(_, bigserial({ mode: 'bigint' }), ''); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('numeric', async () => { + const res1 = await diffDefault(_, numeric().default('10.123'), "'10.123'"); + + const res4 = await diffDefault(_, numeric({ mode: 'string' }).default('10.123'), "'10.123'"); + const res2 = await diffDefault(_, numeric({ mode: 'bigint' }).default(9223372036854775807n), "'9223372036854775807'"); + const res3 = await diffDefault(_, numeric({ mode: 'number' }).default(9007199254740991), "'9007199254740991'"); + + const res5 = await diffDefault(_, numeric({ precision: 6 }).default('10.123'), "'10.123'"); + const res6 = await diffDefault(_, numeric({ precision: 6, scale: 2 }).default('10.123'), "'10.123'"); + + const res7 = await diffDefault(_, numeric({ mode: 'string', scale: 2 }).default('10.123'), "'10.123'"); + const res8 = await diffDefault(_, numeric({ mode: 'string', precision: 6 }).default('10.123'), "'10.123'"); + const res9 = await diffDefault(_, numeric({ mode: 'string', precision: 6, scale: 2 }).default('10.123'), "'10.123'"); + + const res10 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 19 }).default(9223372036854775807n), + "'9223372036854775807'", + ); + const res11 = await diffDefault(_, numeric({ mode: 'number', precision: 6, scale: 2 }).default(10.123), "'10.123'"); + const res12 = await diffDefault(_, numeric({ mode: 'number', scale: 2 }).default(10.123), "'10.123'"); + const res13 = await diffDefault(_, numeric({ mode: 'number', precision: 6 }).default(10.123), "'10.123'"); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); +}); + +test('numeric arrays', async () => { + const res1 = await diffDefault(_, numeric({ mode: 'number' }).array().default([]), "'{}'::numeric[]"); + const res2 = await diffDefault( + _, + numeric({ mode: 'number', precision: 4, scale: 2 }).array().default([]), + "'{}'::numeric(4,2)[]", + ); + const res3 = await diffDefault(_, numeric({ mode: 'bigint' }).array().default([]), "'{}'::numeric[]"); + const res4 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 4 }).array().default([]), + "'{}'::numeric(4)[]", + ); + const res5 = await diffDefault(_, numeric({ mode: 'string' }).array().default([]), "'{}'::numeric[]"); + const res6 = await diffDefault( + _, + numeric({ mode: 'string', precision: 4, scale: 2 }).array().default([]), + "'{}'::numeric(4,2)[]", + ); + + const res7 = await diffDefault( + _, + numeric({ mode: 'number' }).array().default([10.123, 123.10]), + "'{10.123,123.1}'::numeric[]", // .1 due to number->string conversion + ); + + const res8 = await diffDefault( + _, + numeric({ mode: 'number', precision: 6, scale: 2 }).array().default([10.123, 123.10]), + "'{10.123,123.1}'::numeric(6,2)[]", // .1 due to number->string conversion + ); + const res9 = await diffDefault( + _, + numeric({ mode: 'bigint' }).array().default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::numeric[]", + ); + const res10 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 19 }).array().default([9223372036854775807n, 9223372036854775806n]), + "'{9223372036854775807,9223372036854775806}'::numeric(19)[]", + ); + const res11 = await diffDefault( + _, + numeric({ mode: 'string' }).array().default(['10.123', '123.10']), + "'{10.123,123.10}'::numeric[]", + ); + const res12 = await diffDefault( + _, + numeric({ mode: 'string', precision: 6, scale: 2 }).array().default(['10.123', '123.10']), + "'{10.123,123.10}'::numeric(6,2)[]", + ); + + const res13 = await diffDefault(_, numeric({ mode: 'string' }).array().array().default([]), "'{}'::numeric[]"); + const res14 = await diffDefault( + _, + numeric({ mode: 'string', precision: 4, scale: 2 }).array().array().default([]), + "'{}'::numeric(4,2)[]", + ); + const res15 = await diffDefault(_, numeric({ mode: 'number' }).array().array().default([]), "'{}'::numeric[]"); + const res16 = await diffDefault( + _, + numeric({ mode: 'number', precision: 4, scale: 2 }).array().array().default([]), + "'{}'::numeric(4,2)[]", + ); + const res17 = await diffDefault(_, numeric({ mode: 'bigint' }).array().array().default([]), "'{}'::numeric[]"); + const res18 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 4 }).array().array().default([]), + "'{}'::numeric(4)[]", + ); + const res19 = await diffDefault( + _, + numeric({ mode: 'string' }).array().array().default([['10.123', '123.10'], ['10.123', '123.10']]), + "'{{10.123,123.10},{10.123,123.10}}'::numeric[]", + ); + const res20 = await diffDefault( + _, + numeric({ mode: 'string', precision: 6, scale: 2 }).array().array().default([['10.123', '123.10'], [ + '10.123', + '123.10', + ]]), + "'{{10.123,123.10},{10.123,123.10}}'::numeric(6,2)[]", + ); + + const res23 = await diffDefault( + _, + numeric({ mode: 'bigint' }).array().array().default([[9223372036854775807n, 9223372036854775806n], [ + 9223372036854775807n, + 9223372036854775806n, + ]]), + "'{{9223372036854775807,9223372036854775806},{9223372036854775807,9223372036854775806}}'::numeric[]", + ); + const res24 = await diffDefault( + _, + numeric({ mode: 'bigint', precision: 19 }).array().array().default([[9223372036854775807n, 9223372036854775806n], [ + 9223372036854775807n, + 9223372036854775806n, + ]]), + "'{{9223372036854775807,9223372036854775806},{9223372036854775807,9223372036854775806}}'::numeric(19)[]", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); + expect.soft(res18).toStrictEqual([]); + expect.soft(res19).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + + expect.soft(res23).toStrictEqual([]); + expect.soft(res24).toStrictEqual([]); +}); + +test('real + real arrays', async () => { + const res1 = await diffDefault(_, real().default(1000.123), '1000.123'); + + const res2 = await diffDefault(_, real().array().default([]), `'{}'::real[]`); + const res3 = await diffDefault(_, real().array().default([1000.123, 10.2]), `'{1000.123,10.2}'::real[]`); + + const res4 = await diffDefault(_, real().array().array().default([]), `'{}'::real[]`); + const res5 = await diffDefault( + _, + real().array().array().default([[1000.123, 10.2], [1000.123, 10.2]]), + `'{{1000.123,10.2},{1000.123,10.2}}'::real[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('doublePrecision + doublePrecision arrays', async () => { + const res1 = await diffDefault(_, doublePrecision().default(10000.123), '10000.123'); + + const res2 = await diffDefault(_, doublePrecision().array().default([]), `'{}'::double precision[]`); + const res3 = await diffDefault( + _, + doublePrecision().array().default([10000.123]), + `'{10000.123}'::double precision[]`, + ); + + const res4 = await diffDefault(_, doublePrecision().array().array().default([]), `'{}'::double precision[]`); + const res5 = await diffDefault( + _, + doublePrecision().array().array().default([[10000.123, 10.1], [10000.123, 10.1]]), + `'{{10000.123,10.1},{10000.123,10.1}}'::double precision[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('boolean + boolean arrays', async () => { + const res1 = await diffDefault(_, boolean().default(true), 'true'); + const res2 = await diffDefault(_, boolean().default(false), 'false'); + const res3 = await diffDefault(_, boolean().default(sql`true`), 'true'); + + const res4 = await diffDefault(_, boolean().array().default([]), `'{}'::boolean[]`); + const res5 = await diffDefault(_, boolean().array().default([true]), `'{t}'::boolean[]`); + + const res6 = await diffDefault(_, boolean().array().array().default([]), `'{}'::boolean[]`); + const res7 = await diffDefault(_, boolean().array().array().default([[true], [false]]), `'{{t},{f}}'::boolean[]`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); +}); + +test('char + char arrays', async () => { + const res1 = await diffDefault(_, char({ length: 15 }).default('text'), `'text'`); + const res2 = await diffDefault(_, char({ length: 15 }).default("text'text"), `'text''text'`); + const res3 = await diffDefault(_, char({ length: 15 }).default('text\'text"'), "'text''text\"'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault(_, char({ length: 15 }).default(`mo''",\\\`}{od`), `'mo''''",\\\\\`}{od'`); + const res5 = await diffDefault(_, char({ length: 15, enum: ['one', 'two', 'three'] }).default('one'), "'one'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + _, + char({ length: 15, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'] }).default( + `mo''",\\\`}{od`, + ), + `'mo''''\",\\\\\`}{od'`, + ); + + const res7 = await diffDefault(_, char({ length: 15 }).array().default([]), `'{}'::char(15)[]`); + const res8 = await diffDefault(_, char({ length: 15 }).array().default(['text']), `'{text}'::char(15)[]`); + // raw default sql for the line below: '{text''\\text}'::char(15)[]; + const res9 = await diffDefault( + _, + char({ length: 15 }).array().default(['\\']), + `'{"\\\\"}'::char(15)[]`, + ); + const res10 = await diffDefault( + _, + char({ length: 15 }).array().default(["'"]), + `'{''}'::char(15)[]`, + ); + const res11 = await diffDefault( + _, + char({ length: 15, enum: ['one', 'two', 'three'] }).array().default(['one']), + `'{one}'::char(15)[]`, + ); + const res12 = await diffDefault( + _, + char({ length: 15, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().default( + [`mo''",\`}{od`], + ), + `'{"mo''''\\\",\`\}\{od"}'::char(15)[]`, + ); + + const res13 = await diffDefault(_, char({ length: 15 }).array().array().default([]), `'{}'::char(15)[]`); + // raw default sql for the line below: '{{text\\},{text}}'::text[] + const res14 = await diffDefault( + _, + char({ length: 15 }).array().array().default([['text\\'], ['text']]), + `'{{"text\\\\"},{text}}'::char(15)[]`, + ); + const res15 = await diffDefault( + _, + char({ length: 15, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().array() + .default( + [[`mo''",\`}{od`], [`mo''",\`}{od`]], + ), + `'{{"mo''''\\\",\`\}\{od"},{"mo''''\\\",\`\}\{od"}}'::char(15)[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); +}); + +test('varchar + varchar arrays', async () => { + const res1 = await diffDefault(_, varchar({ length: 256 }).default('text'), `'text'`); + const res2 = await diffDefault(_, varchar({ length: 256 }).default("text'text"), `'text''text'`); + const res3 = await diffDefault(_, varchar({ length: 256 }).default('text\'text"'), "'text''text\"'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault(_, varchar({ length: 256 }).default(`mo''",\\\`}{od`), `'mo''''",\\\\\`}{od'`); + const res5 = await diffDefault(_, varchar({ length: 256, enum: ['one', 'two', 'three'] }).default('one'), "'one'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + _, + varchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'] }).default( + `mo''",\\\`}{od`, + ), + `'mo''''",\\\\\`}{od'`, + ); + + const res7 = await diffDefault(_, varchar({ length: 256 }).array().default([]), `'{}'::varchar(256)[]`); + const res8 = await diffDefault(_, varchar({ length: 256 }).array().default(['text']), `'{text}'::varchar(256)[]`); + // raw default sql for the line below: '{text''\\text}'::varchar[]; + const res9 = await diffDefault( + _, + varchar({ length: 256 }).array().default(["text'\\text"]), + `'{"text''\\\\text"}'::varchar(256)[]`, + ); + const res10 = await diffDefault( + _, + varchar({ length: 256 }).array().default(['text\'text"']), + `'{"text''text\\\""}'::varchar(256)[]`, + ); + const res11 = await diffDefault( + _, + varchar({ length: 256, enum: ['one', 'two', 'three'] }).array().default(['one']), + `'{one}'::varchar(256)[]`, + ); + const res12 = await diffDefault( + _, + varchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().default( + [`mo''",\`}{od`], + ), + `'{"mo''''\\\",\`\}\{od"}'::varchar(256)[]`, + ); + + const res13 = await diffDefault(_, varchar({ length: 256 }).array().array().default([]), `'{}'::varchar(256)[]`); + // raw default sql for the line below: '{{text\\},{text}}'::varchar[] + const res14 = await diffDefault( + _, + varchar({ length: 256 }).array().array().default([['text\\'], ['text']]), + `'{{"text\\\\"},{text}}'::varchar(256)[]`, + ); + const res15 = await diffDefault( + _, + varchar({ length: 256, enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().array() + .default( + [[`mo''",\`}{od`], [`mo''",\`}{od`]], + ), + `'{{"mo''''\\\",\`\}\{od"},{"mo''''\\\",\`\}\{od"}}'::varchar(256)[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); +}); + +test('text + text arrays', async () => { + const res1 = await diffDefault(_, text().default('text'), `'text'`); + const res2 = await diffDefault(_, text().default("text'text"), `'text''text'`); + const res3 = await diffDefault(_, text().default('text\'text"'), "'text''text\"'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res4 = await diffDefault(_, text().default(`mo''",\\\`}{od`), `'mo''''",\\\\\`}{od'`); + const res5 = await diffDefault(_, text({ enum: ['one', 'two', 'three'] }).default('one'), "'one'"); + // raw default sql for the line below: 'mo''''",\`}{od'; + const res6 = await diffDefault( + _, + text({ enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\\\`}{od`, 'mo,\`od'] }).default( + `mo''",\\\`}{od`, + ), + `'mo''''",\\\\\`}{od'`, + ); + + const res7 = await diffDefault(_, text().array().default([]), `'{}'::text[]`); + const res8 = await diffDefault(_, text().array().default(['text']), `'{text}'::text[]`); + // raw default sql for the line below: '{text''\\text}'::text[]; + const res9 = await diffDefault( + _, + text().array().default(["text'\\text"]), + `'{"text''\\\\text"}'::text[]`, + ); + const res10 = await diffDefault( + _, + text().array().default([`text'text"`]), + `'{"text''text\\""}'::text[]`, + ); + const res11 = await diffDefault( + _, + text({ enum: ['one', 'two', 'three'] }).array().default(['one']), + `'{one}'::text[]`, + ); + + const res12 = await diffDefault(_, text().array().array().default([]), `'{}'::text[]`); + // raw default sql for the line below: '{{text\\},{text}}'::text[] + const res13 = await diffDefault( + _, + text().array().array().default([['text\\'], ['text']]), + `'{{"text\\\\"},{text}}'::text[]`, + ); + + const res14 = await diffDefault(_, text().default(sql`gen_random_uuid()`), `gen_random_uuid()`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); +}); + +test('json + json arrays', async () => { + const res1 = await diffDefault(_, json().default({}), `'{}'`); + const res2 = await diffDefault(_, json().default([]), `'[]'`); + const res3 = await diffDefault(_, json().default([1, 2, 3]), `'[1,2,3]'`); + const res4 = await diffDefault(_, json().default({ key: 'value' }), `'{"key":"value"}'`); + const res5 = await diffDefault(_, json().default({ key: "val'ue" }), `'{"key":"val''ue"}'`); + const res6 = await diffDefault(_, json().default({ key: `mo''",\`}{od` }), `'{"key":"mo''''\\\",\`}{od"}'`); + const res7 = await diffDefault(_, json().default({ key: 'mo",\\`}{od' }), `'{"key":"mo\\\",\\\\\`}{od"}'`); + + const res8 = await diffDefault(_, json().array().default([]), `'{}'::json[]`); + const res9 = await diffDefault( + _, + json().array().default([{ key: 'value' }]), + `'{"{\\"key\\":\\"value\\"}"}'::json[]`, + ); + const res10 = await diffDefault( + _, + json().array().default([{ key: "val'ue" }]), + `'{"{\\"key\\":\\"val''ue\\"}"}'::json[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +test('jsonb + jsonb arrays', async () => { + const res1 = await diffDefault(_, jsonb().default({}), `'{}'`); + const res2 = await diffDefault(_, jsonb().default([]), `'[]'`); + const res3 = await diffDefault(_, jsonb().default([1, 2, 3]), `'[1,2,3]'`); + const res4 = await diffDefault(_, jsonb().default({ key: 'value' }), `'{"key":"value"}'`); + const res5 = await diffDefault(_, jsonb().default({ key: "val'ue" }), `'{"key":"val''ue"}'`); + const res6 = await diffDefault(_, jsonb().default({ key: `mo''",\`}{od` }), `'{"key":"mo''''\\\",\`}{od"}'`); + const res7 = await diffDefault(_, jsonb().default({ key: 'mo",\\`}{od' }), `'{"key":"mo\\\",\\\\\`}{od"}'`); + + const res8 = await diffDefault(_, jsonb().array().default([]), `'{}'::jsonb[]`); + const res12 = await diffDefault(_, jsonb().array().array().default([]), `'{}'::jsonb[]`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('timestamp + timestamp arrays', async () => { + const res1 = await diffDefault( + _, + timestamp({ mode: 'date' }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115'`, + ); + const res2 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).default(new Date('2025-05-23T12:53:53.115Z')), + `'2025-05-23 12:53:53.115+00'`, + ); + const res3 = await diffDefault( + _, + timestamp({ mode: 'string' }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115'`, + ); + const res4 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).default('2025-05-23 12:53:53.115'), + `'2025-05-23 12:53:53.115+00'`, + ); + const res5 = await diffDefault(_, timestamp().defaultNow(), `now()`); + const res6 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).defaultNow(), + `now()`, + ); + + const res7 = await diffDefault(_, timestamp({ mode: 'date' }).array().default([]), `'{}'::timestamp[]`); + const res8 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).array().default([]), + `'{}'::timestamp(3) with time zone[]`, + ); + const res9 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 5 }).array().default([new Date('2025-05-23T12:53:53.115Z')]), + `'{"2025-05-23 12:53:53.115"}'::timestamp(5)[]`, + ); + const res10 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).array().default([ + new Date('2025-05-23T12:53:53.115Z'), + ]), + `'{"2025-05-23 12:53:53.115+00"}'::timestamp(3) with time zone[]`, + ); + + const res11 = await diffDefault(_, timestamp({ mode: 'string' }).array().default([]), `'{}'::timestamp[]`); + const res12 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).array().default([]), + `'{}'::timestamp(3) with time zone[]`, + ); + const res13 = await diffDefault( + _, + timestamp({ mode: 'string' }).array().default(['2025-05-23 12:53:53.115']), + `'{"2025-05-23 12:53:53.115"}'::timestamp[]`, + ); + const res14 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 4, withTimezone: true }).array().default(['2025-05-23 12:53:53.115+03:00']), + `'{"2025-05-23 12:53:53.115+03:00"}'::timestamp(4) with time zone[]`, + ); + const res14_1 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 4, withTimezone: true }).default('2025-05-23 12:53:53.115+03:00'), + `'2025-05-23 12:53:53.115+03:00'`, + ); + + const res15 = await diffDefault(_, timestamp({ mode: 'date' }).array().array().default([]), `'{}'::timestamp[]`); + const res16 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).array().array().default([]), + `'{}'::timestamp(3) with time zone[]`, + ); + const res17 = await diffDefault( + _, + timestamp({ mode: 'date' }).array().array().default([[new Date('2025-05-23T12:53:53.115Z')]]), + `'{{"2025-05-23 12:53:53.115"}}'::timestamp[]`, + ); + const res18 = await diffDefault( + _, + timestamp({ mode: 'date', precision: 3, withTimezone: true }).array().array().default([[ + new Date('2025-05-23T12:53:53.115Z'), + ]]), + `'{{"2025-05-23 12:53:53.115+00"}}'::timestamp(3) with time zone[]`, + ); + + const res19 = await diffDefault(_, timestamp({ mode: 'string' }).array().array().default([]), `'{}'::timestamp[]`); + const res20 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).array().array().default([]), + `'{}'::timestamp(3) with time zone[]`, + ); + const res21 = await diffDefault( + _, + timestamp({ mode: 'string' }).array().array().default([['2025-05-23 12:53:53.115']]), + `'{{"2025-05-23 12:53:53.115"}}'::timestamp[]`, + ); + const res22 = await diffDefault( + _, + timestamp({ mode: 'string', precision: 3, withTimezone: true }).array().array().default([[ + '2025-05-23 12:53:53.115', + ]]), + `'{{"2025-05-23 12:53:53.115+00"}}'::timestamp(3) with time zone[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); + expect.soft(res14_1).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); + expect.soft(res17).toStrictEqual([]); + expect.soft(res18).toStrictEqual([]); + expect.soft(res19).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + expect.soft(res21).toStrictEqual([]); + expect.soft(res22).toStrictEqual([]); +}); + +test('time + time arrays', async () => { + const res1 = await diffDefault(_, time().default('15:50:33'), `'15:50:33'`); + const res2 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).default('15:50:33.123+00'), + `'15:50:33.123+00'`, + ); + const res3 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).default('15:50:33.123'), + `'15:50:33.123+00'`, + ); + const res4 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).default('15:50:33.123+03'), + `'15:50:33.123+03'`, + ); + const res5 = await diffDefault(_, time().defaultNow(), `now()`); + const res6 = await diffDefault(_, time({ precision: 3, withTimezone: true }).defaultNow(), `now()`); + + const res7 = await diffDefault(_, time().array().default([]), `'{}'::time[]`); + const res8 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).array().default([]), + `'{}'::time(3) with time zone[]`, + ); + const res9 = await diffDefault(_, time({ precision: 3 }).array().default(['15:50:33']), `'{15:50:33}'::time(3)[]`); + const res10 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).array().default(['15:50:33.123']), + `'{15:50:33.123+00}'::time(3) with time zone[]`, + ); + + const res11 = await diffDefault(_, time().array().array().default([]), `'{}'::time[]`); + const res12 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).array().array().default([]), + `'{}'::time(3) with time zone[]`, + ); + const res13 = await diffDefault(_, time().array().array().default([['15:50:33']]), `'{{15:50:33}}'::time[]`); + const res14 = await diffDefault( + _, + time({ precision: 3, withTimezone: true }).array().array().default([['15:50:33.123']]), + `'{{15:50:33.123+00}}'::time(3) with time zone[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); +}); + +test('date + date arrays', async () => { + const res1 = await diffDefault(_, date({ mode: 'string' }).default('2025-05-23'), `'2025-05-23'`); + const res10 = await diffDefault(_, date({ mode: 'date' }).default(new Date('2025-05-23')), `'2025-05-23'`); + const res2 = await diffDefault(_, date({ mode: 'string' }).defaultNow(), `now()`); + const res20 = await diffDefault(_, date({ mode: 'date' }).defaultNow(), `now()`); + + const res3 = await diffDefault(_, date({ mode: 'string' }).array().default([]), `'{}'::date[]`); + const res30 = await diffDefault(_, date({ mode: 'date' }).array().default([]), `'{}'::date[]`); + const res4 = await diffDefault(_, date({ mode: 'string' }).array().default(['2025-05-23']), `'{2025-05-23}'::date[]`); + const res40 = await diffDefault( + _, + date({ mode: 'date' }).array().default([new Date('2025-05-23')]), + `'{2025-05-23}'::date[]`, + ); + + const res5 = await diffDefault(_, date({ mode: 'string' }).array().array().default([]), `'{}'::date[]`); + const res50 = await diffDefault(_, date({ mode: 'date' }).array().array().default([]), `'{}'::date[]`); + const res6 = await diffDefault( + _, + date({ mode: 'string' }).array().array().default([['2025-05-23']]), + `'{{2025-05-23}}'::date[]`, + ); + const res60 = await diffDefault( + _, + date({ mode: 'date' }).array().array().default([[new Date('2025-05-23')]]), + `'{{2025-05-23}}'::date[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + + expect.soft(res2).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + + expect.soft(res3).toStrictEqual([]); + expect.soft(res30).toStrictEqual([]); + + expect.soft(res4).toStrictEqual([]); + expect.soft(res40).toStrictEqual([]); + + expect.soft(res5).toStrictEqual([]); + expect.soft(res50).toStrictEqual([]); + + expect.soft(res6).toStrictEqual([]); + expect.soft(res60).toStrictEqual([]); +}); + +test('interval + interval arrays', async () => { + const res1 = await diffDefault(_, interval().default('1 day'), `'1 day'`); + const res10 = await diffDefault( + _, + interval({ fields: 'day to second', precision: 3 }).default('1 day 3 second'), + `'1 day 3 second'`, + ); + + const res2 = await diffDefault(_, interval().array().default([]), `'{}'::interval[]`); + const res20 = await diffDefault( + _, + interval({ fields: 'day to second', precision: 3 }).array().default([]), + `'{}'::interval day to second(3)[]`, + ); + + const res3 = await diffDefault(_, interval().array().default(['1 day']), `'{"1 day"}'::interval[]`); + const res30 = await diffDefault( + _, + interval({ fields: 'day to second', precision: 3 }).array().default(['1 day 3 second']), + `'{"1 day 3 second"}'::interval day to second(3)[]`, + ); + + const res4 = await diffDefault(_, interval().array().array().default([]), `'{}'::interval[]`); + const res40 = await diffDefault( + _, + interval({ fields: 'day to second', precision: 3 }).array().array().default([]), + `'{}'::interval day to second(3)[]`, + ); + + const res5 = await diffDefault(_, interval().array().array().default([['1 day']]), `'{{"1 day"}}'::interval[]`); + const res50 = await diffDefault( + _, + interval({ fields: 'day to second', precision: 3 }).array().array().default([['1 day 3 second']]), + `'{{"1 day 3 second"}}'::interval day to second(3)[]`, + ); + + expect.soft(res1).toStrictEqual([]); + // it's ok, that's due to '1 day 3 second' vs '1 day 00:00:03' + expect.soft(res10.length).toBe(1); + expect.soft(res2).toStrictEqual([]); + expect.soft(res20).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + + // it's ok, that's due to '1 day 3 second' vs '1 day 00:00:03' + expect.soft(res30.length).toBe(1); + expect.soft(res4).toStrictEqual([]); + expect.soft(res40).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + // it's ok, that's due to '1 day 3 second' vs '1 day 00:00:03' + expect.soft(res50.length).toBe(1); +}); + +test('point + point arrays', async () => { + const res1 = await diffDefault(_, point({ mode: 'xy' }).default({ x: 1, y: 2 }), `'(1,2)'`); + const res2 = await diffDefault(_, point({ mode: 'tuple' }).default([1, 2]), `'(1,2)'`); + + const res3 = await diffDefault(_, point({ mode: 'tuple' }).array().default([]), `'{}'::point[]`); + const res4 = await diffDefault(_, point({ mode: 'tuple' }).array().default([[1, 2]]), `'{"(1,2)"}'::point[]`); + + const res5 = await diffDefault(_, point({ mode: 'xy' }).array().default([]), `'{}'::point[]`); + const res6 = await diffDefault(_, point({ mode: 'xy' }).array().default([{ x: 1, y: 2 }]), `'{"(1,2)"}'::point[]`); + + const res7 = await diffDefault(_, point({ mode: 'tuple' }).array().array().default([]), `'{}'::point[]`); + const res8 = await diffDefault( + _, + point({ mode: 'tuple' }).array().array().default([[[1, 2]]]), + `'{{"(1,2)"}}'::point[]`, + ); + + const res9 = await diffDefault(_, point({ mode: 'xy' }).array().array().default([]), `'{}'::point[]`); + const res10 = await diffDefault( + _, + point({ mode: 'xy' }).array().array().default([[{ x: 1, y: 2 }]]), + `'{{"(1,2)"}}'::point[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +test('line + line arrays', async () => { + const res1 = await diffDefault(_, line({ mode: 'abc' }).default({ a: 1, b: 2, c: 3 }), `'{1,2,3}'`); + const res2 = await diffDefault(_, line({ mode: 'tuple' }).default([1, 2, 3]), `'{1,2,3}'`); + + const res3 = await diffDefault(_, line({ mode: 'tuple' }).array().default([]), `'{}'::line[]`); + const res4 = await diffDefault(_, line({ mode: 'tuple' }).array().default([[1, 2, 3]]), `'{"{1,2,3}"}'::line[]`); + + const res5 = await diffDefault(_, line({ mode: 'abc' }).array().default([]), `'{}'::line[]`); + const res6 = await diffDefault( + _, + line({ mode: 'abc' }).array().default([{ a: 1, b: 2, c: 3 }]), + `'{"{1,2,3}"}'::line[]`, + ); + + const res7 = await diffDefault(_, line({ mode: 'tuple' }).array().array().default([]), `'{}'::line[]`); + const res8 = await diffDefault( + _, + line({ mode: 'tuple' }).array().array().default([[[1, 2, 3]]]), + `'{{"{1,2,3}"}}'::line[]`, + ); + + const res9 = await diffDefault(_, line({ mode: 'abc' }).array().array().default([]), `'{}'::line[]`); + const res10 = await diffDefault( + _, + line({ mode: 'abc' }).array().array().default([[{ a: 1, b: 2, c: 3 }]]), + `'{{"{1,2,3}"}}'::line[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +test('enum + enum arrays', async () => { + const moodEnum = pgEnum('mood_enum', [ + 'sad', + 'ok', + 'ha\\ppy', + `text'text"`, + `no,''"\`rm`, + "mo''\",\\`}{od", + 'mo\`od', + ]); + const pre = { moodEnum }; + + const res1 = await diffDefault(_, moodEnum().default('ok'), `'ok'::"mood_enum"`, pre); + const res2 = await diffDefault(_, moodEnum().default('ha\\ppy'), `'ha\\ppy'::"mood_enum"`, pre); + const res3 = await diffDefault(_, moodEnum().default(`mo''",\\\`}{od`), `'mo''''",\\\`}{od'::"mood_enum"`, pre); + const res4 = await diffDefault(_, moodEnum().default(`text'text"`), `'text''text"'::"mood_enum"`, pre); + + const res5 = await diffDefault(_, moodEnum().array().default([]), `'{}'::"mood_enum"[]`, pre); + const res6 = await diffDefault(_, moodEnum().array().default(['ok']), `'{ok}'::"mood_enum"[]`, pre); + const res7 = await diffDefault(_, moodEnum().array().default(['ha\\ppy']), `'{"ha\\\\ppy"}'::"mood_enum"[]`, pre); + const res8 = await diffDefault(_, moodEnum().array().default(['mo\`od']), `'{mo\`od}'::"mood_enum"[]`, pre); + const res9 = await diffDefault(_, moodEnum().array().array().default([]), `'{}'::"mood_enum"[]`, pre); + const res10 = await diffDefault(_, moodEnum().array().array().default([['ok']]), `'{{ok}}'::"mood_enum"[]`, pre); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +test('uuid + uuid arrays', async () => { + const res1 = await diffDefault( + _, + uuid().default('550e8400-e29b-41d4-a716-446655440000'), + `'550e8400-e29b-41d4-a716-446655440000'`, + ); + const res2 = await diffDefault(_, uuid().defaultRandom(), `gen_random_uuid()`); + + const res3 = await diffDefault(_, uuid().array().default([]), `'{}'::uuid[]`); + const res4 = await diffDefault(_, uuid().array().array().default([]), `'{}'::uuid[]`); + + const res5 = await diffDefault( + _, + uuid().array().default(['550e8400-e29b-41d4-a716-446655440000']), + `'{550e8400-e29b-41d4-a716-446655440000}'::uuid[]`, + ); + + const res6 = await diffDefault( + _, + uuid().array().array().default([['550e8400-e29b-41d4-a716-446655440000']]), + `'{{550e8400-e29b-41d4-a716-446655440000}}'::uuid[]`, + ); + + const res7 = await diffDefault( + _, + uuid() + .default(sql`'550e8400-e29b-41d4-a716-446655440001'`), + `'550e8400-e29b-41d4-a716-446655440001'`, + ); + + const res8 = await diffDefault(_, uuid().defaultRandom(), `gen_random_uuid()`); + const res9 = await diffDefault(_, uuid().array().default([]), `'{}'::uuid[]`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); +}); + +// pgvector extension +test('bit + bit arrays', async () => { + // await _.db.query('create extension vector;'); + const res1 = await diffDefault(_, bit({ dimensions: 3 }).default(`101`), `'101'`); + const res2 = await diffDefault(_, bit({ dimensions: 3 }).default(sql`'101'`), `'101'`); + + const res3 = await diffDefault(_, bit({ dimensions: 3 }).array().default([]), `'{}'::bit(3)[]`); + const res4 = await diffDefault(_, bit({ dimensions: 3 }).array().default([`101`]), `'{101}'::bit(3)[]`); + + const res5 = await diffDefault(_, bit({ dimensions: 3 }).array().array().default([]), `'{}'::bit(3)[]`); + const res6 = await diffDefault( + _, + bit({ dimensions: 3 }).array().array().default([[`101`], [`101`]]), + `'{{101},{101}}'::bit(3)[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); +}); + +test('halfvec + halfvec arrays', async () => { + const res1 = await diffDefault(_, halfvec({ dimensions: 3 }).default([0, -2, 3]), `'[0,-2,3]'`); + + const res3 = await diffDefault(_, halfvec({ dimensions: 3 }).array().default([]), `'{}'::halfvec(3)[]`); + const res4 = await diffDefault( + _, + halfvec({ dimensions: 3 }).array().default([[0, -2, 3]]), + `'{"[0,-2,3]"}'::halfvec(3)[]`, + ); + + const res6 = await diffDefault(_, halfvec({ dimensions: 3 }).array().array().default([]), `'{}'::halfvec(3)[]`); + const res7 = await diffDefault( + _, + halfvec({ dimensions: 3 }).array().array().default([[[0, -2, 3]], [[1, 2, 3]]]), + `'{{"[0,-2,3]"},{"[1,2,3]"}}'::halfvec(3)[]`, + ); + + // TODO strange rounding + // looks like extension or postgres makes this + + // const res2 = await diffDefault( + // _, + // halfvec({ dimensions: 3 }).default([0, -2.123456789, 3.123456789]), + // `'[0,-2.123456789,3.123456789]'`, + // ); + // const res5 = await diffDefault( + // _, + // halfvec({ dimensions: 3 }).array().default([[0, -2.3, 3.123456789]]), + // `'{"[0,-2.123456789,3.123456789]"}'::halfvec(3)[]`, + // ); + // const res8 = await diffDefault( + // _, + // // [[[0, -2.1230469,3.1230469 ]],[[1.1230469,2.1230469,3.1230469]]] + // halfvec({ dimensions: 3 }).array().array().default([[[0, -2.123456, 3.123456]], [[1.123456, 2.123456, 3.123456]]]), + // `'{{"[0,-2.123456789,3.123456789]"},{"[1.123456789,2.123456789,3.123456789]"}}'::halfvec(3)[]`, + // ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + + // expect.soft(res2).toStrictEqual([]); + // expect.soft(res5).toStrictEqual([]); + // expect.soft(res8).toStrictEqual([]); +}); + +test('sparsevec + sparsevec arrays', async () => { + const res1 = await diffDefault(_, sparsevec({ dimensions: 5 }).default(`{1:-1,3:2,5:3}/5`), `'{1:-1,3:2,5:3}/5'`); + const res2 = await diffDefault( + _, + sparsevec({ dimensions: 5 }).default(`{1:-1.1234567,3:2.1234567,5:3.1234567}/5`), + `'{1:-1.1234567,3:2.1234567,5:3.1234567}/5'`, + ); + + const res3 = await diffDefault(_, sparsevec({ dimensions: 5 }).array().default([]), `'{}'::sparsevec(5)[]`); + const res4 = await diffDefault( + _, + sparsevec({ dimensions: 5 }).array().default([`{1:-1,3:2,5:3}/5`]), + `'{"{1:-1,3:2,5:3}/5"}'::sparsevec(5)[]`, + ); + const res5 = await diffDefault( + _, + sparsevec({ dimensions: 5 }).array().default(['{1:-1.1234567,3:2.1234567,5:3.1234567}/5']), + `'{"{1:-1.1234567,3:2.1234567,5:3.1234567}/5"}'::sparsevec(5)[]`, + ); + + const res6 = await diffDefault(_, sparsevec({ dimensions: 5 }).array().array().default([]), `'{}'::sparsevec(5)[]`); + const res7 = await diffDefault( + _, + sparsevec({ dimensions: 5 }).array().array().default([[`{1:-1,3:2,5:3}/5`], [`{1:-1,3:2,5:3}/5`]]), + `'{{"{1:-1,3:2,5:3}/5"},{"{1:-1,3:2,5:3}/5"}}'::sparsevec(5)[]`, + ); + const res8 = await diffDefault( + _, + sparsevec({ dimensions: 5 }).array().array().default([['{1:-1.1234567,3:2.1234567,5:3.1234567}/5'], [ + '{1:-1.1234567,3:2.1234567,5:3.1234567}/5', + ]]), + `'{{"{1:-1.1234567,3:2.1234567,5:3.1234567}/5"},{"{1:-1.1234567,3:2.1234567,5:3.1234567}/5"}}'::sparsevec(5)[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); +}); + +test('macaddr + macaddr arrays', async () => { + const res1 = await diffDefault(_, macaddr().default('08:00:2b:01:02:03'), `'08:00:2b:01:02:03'`); + const res2 = await diffDefault(_, macaddr().default('ff:ff:ff:ff:ff:ff'), `'ff:ff:ff:ff:ff:ff'`); + + const res3 = await diffDefault(_, macaddr().array().default([]), `'{}'::macaddr[]`); + const res4 = await diffDefault( + _, + macaddr().array().default(['08:00:2b:01:02:03']), + `'{08:00:2b:01:02:03}'::macaddr[]`, + ); + const res5 = await diffDefault( + _, + macaddr().array().array().default([['08:00:2b:01:02:03'], ['ff:ff:ff:ff:ff:ff']]), + `'{{08:00:2b:01:02:03},{ff:ff:ff:ff:ff:ff}}'::macaddr[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); +test('macaddr8 + macaddr8 arrays', async () => { + const res1 = await diffDefault(_, macaddr8().default('08:00:2b:01:02:03:04:05'), `'08:00:2b:01:02:03:04:05'`); + const res2 = await diffDefault(_, macaddr8().default('ff:ff:ff:ff:ff:ff:ff:ff'), `'ff:ff:ff:ff:ff:ff:ff:ff'`); + + const res3 = await diffDefault(_, macaddr8().array().default([]), `'{}'::macaddr8[]`); + const res4 = await diffDefault( + _, + macaddr8().array().default(['08:00:2b:01:02:03:04:05']), + `'{08:00:2b:01:02:03:04:05}'::macaddr8[]`, + ); + const res5 = await diffDefault( + _, + macaddr8().array().array().default([['08:00:2b:01:02:03:04:05'], ['ff:ff:ff:ff:ff:ff:ff:ff']]), + `'{{08:00:2b:01:02:03:04:05},{ff:ff:ff:ff:ff:ff:ff:ff}}'::macaddr8[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); +}); + +test('vector + vector arrays', async () => { + const res1 = await diffDefault(_, vector({ dimensions: 3 }).default([0, -2, 3]), `'[0,-2,3]'`); + const res2 = await diffDefault( + _, + vector({ dimensions: 3 }).default([0, -2.1234567, 3.1234567]), + `'[0,-2.1234567,3.1234567]'`, + ); + + const res3 = await diffDefault(_, vector({ dimensions: 3 }).array().default([]), `'{}'::vector(3)[]`); + const res4 = await diffDefault( + _, + vector({ dimensions: 3 }).array().default([[0, -2, 3]]), + `'{"[0,-2,3]"}'::vector(3)[]`, + ); + const res5 = await diffDefault( + _, + vector({ dimensions: 3 }).array().default([[0, -2.1234567, 3.1234567]]), + `'{"[0,-2.1234567,3.1234567]"}'::vector(3)[]`, + ); + + const res6 = await diffDefault(_, vector({ dimensions: 3 }).array().array().default([]), `'{}'::vector(3)[]`); + const res7 = await diffDefault( + _, + vector({ dimensions: 3 }).array().array().default([[[0, -2, 3]], [[1, 2, 3]]]), + `'{{"[0,-2,3]"},{"[1,2,3]"}}'::vector(3)[]`, + ); + const res8 = await diffDefault( + _, + vector({ dimensions: 3 }).array().array().default([[ + [0, -2.1234567, 3.1234567], + ], [[1.1234567, 2.1234567, 3.1234567]]]), + `'{{"[0,-2.1234567,3.1234567]"},{"[1.1234567,2.1234567,3.1234567]"}}'::vector(3)[]`, + ); + + const res9 = await diffDefault(_, vector({ dimensions: 2 }).default([0, -2]), `'[0,-2]'`); + const res10 = await diffDefault(_, vector({ dimensions: 5 }).default([0, -2, 0, 0, 0]), `'[0,-2,0,0,0]'`); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); +}); + +// postgis extension +// SRID=4326 -> these coordinates are longitude/latitude values +// Default is 0 or undefined +test('geometry + geometry arrays', async () => { + const postgisDb = await preparePostgisTestDatabase(); + + const res1 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).default([30.5234, 50.4501]), + `'SRID=4326;POINT(30.5234 50.4501)'`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res2 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }), + `'SRID=4326;POINT(30.5234 50.4501)'`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res3 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).array().default([]), + `'{}'::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + const res4 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).array().default([[30.5234, 50.4501]]), + `ARRAY['SRID=4326;POINT(30.5234 50.4501)']::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res5 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).array().default([]), + `'{}'::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + const res6 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).array().default([{ x: 30.5234, y: 50.4501 }]), + `ARRAY['SRID=4326;POINT(30.5234 50.4501)']::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res7 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).array().array().default([]), + `'{}'::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + const res8 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'tuple', type: 'point' }).array().array().default([[[30.5234, 50.4501]], [[ + 30.5234, + 50.4501, + ]]]), + `ARRAY[ARRAY['SRID=4326;POINT(30.5234 50.4501)'],ARRAY['SRID=4326;POINT(30.5234 50.4501)']]::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res9 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).array().array().default([]), + `'{}'::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res10 = await diffDefault( + postgisDb, + geometry({ srid: 4326, mode: 'xy', type: 'point' }).array().array().default([[{ x: 30.5234, y: 50.4501 }], [{ + x: 30.5234, + y: 50.4501, + }]]), + `ARRAY[ARRAY['SRID=4326;POINT(30.5234 50.4501)'],ARRAY['SRID=4326;POINT(30.5234 50.4501)']]::geometry(point,4326)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res11 = await diffDefault( + postgisDb, + geometry({ mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }), + `'POINT(30.5234 50.4501)'`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res12 = await diffDefault( + postgisDb, + geometry({ mode: 'xy', type: 'point' }).default(sql`'SRID=4326;POINT(10 10)'`), + `'SRID=4326;POINT(10 10)'`, + undefined, + undefined, + ['table'], + ['public'], + ); + // const res12_1 = await diffDefault( + // postgisDb, + // geometry().default(sql`'SRID=0;POINT(12.1 12.1)'`), + // `'SRID=0;POINT(12.1 12.1)'`, + // undefined, + // undefined, + // true, + // ); + + const res13 = await diffDefault( + postgisDb, + geometry({ mode: 'xy', type: 'point' }).array().default([{ x: 13, y: 13 }]), + `ARRAY['POINT(13 13)']::geometry(point)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + // this will result diffs on push only + // i believe we should not handle this since will be log in console for user about diff and this is sql`` + // const res14 = await diffDefault( + // postgisDb, + // geometry({ mode: 'xy', type: 'point' }).array().default(sql`'{SRID=4326;POINT(14 14)}'::geometry(point)[]`), + // `'{SRID=4326;POINT(14 14)}'::geometry(point)[]`, + // undefined, + // undefined, + // true, + // ); + + const res15 = await diffDefault( + postgisDb, + geometry({ mode: 'xy', type: 'point' }).array().default(sql`ARRAY['SRID=4326;POINT(15 15)']::geometry(point)[]`), + `ARRAY['SRID=4326;POINT(15 15)']::geometry(point)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + const res16 = await diffDefault( + postgisDb, + geometry({ mode: 'xy', type: 'point' }).array().default(sql`ARRAY['POINT(16 16)']::geometry(point)[]`), + `ARRAY['POINT(16 16)']::geometry(point)[]`, + undefined, + undefined, + ['table'], + ['public'], + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + // expect.soft(res12_1).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + // expect.soft(res14).toStrictEqual([]); + expect.soft(res15).toStrictEqual([]); + expect.soft(res16).toStrictEqual([]); +}); + +test('inet + inet arrays', async () => { + const res1 = await diffDefault(_, inet().default('127.0.0.1'), `'127.0.0.1'`); + const res2 = await diffDefault(_, inet().default('::ffff:192.168.0.1/96'), `'::ffff:192.168.0.1/96'`); + + const res1_1 = await diffDefault(_, inet().array().default(['127.0.0.1']), `'{127.0.0.1}'::inet[]`); + const res2_1 = await diffDefault( + _, + inet().array().default(['::ffff:192.168.0.1/96']), + `'{::ffff:192.168.0.1/96}'::inet[]`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + + expect.soft(res1_1).toStrictEqual([]); + expect.soft(res2_1).toStrictEqual([]); +}); + +test.skip('corner cases', async () => { + const moodEnum = pgEnum('mood_enum', ['sad', 'ok', 'happy', `text'text"`, `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od']); + const pre = { moodEnum }; + + await diffDefault( + _, + moodEnum().array().array().default([[`text'text"`]]), + `'{{"text''text\\\""}}'::"mood_enum"[]`, + pre, + ); + const res11 = await diffDefault( + _, + moodEnum().array().array().default([[`mo''",\`}{od`]]), + `'{{"mo''''\\\",\`\}\{od"}}'::"mood_enum"[]`, + pre, + ); + + const res6 = await diffDefault( + _, + moodEnum().array().default([`text'text"`]), + `'{"text''text\\\""}'::"mood_enum"[]`, + pre, + ); + + const res7 = await diffDefault( + _, + moodEnum().array().default([`mo''",\`}{od`]), + `'{"mo''''\\\",\`\}\{od"}'::"mood_enum"[]`, + pre, + ); + + // const res_10 = await diffDefault( + // _, + // json().array().default([{ key: `mo''",\`}{od` }]), + // `'{"{\\"key\\":\\"mo''\\\\\\",\`}{od\\"}"}'::json[]`, + // ); + // expect.soft(res_10).toStrictEqual([]); + + // const res14 = await diffDefault( + // _, + // json().array().array().default([[{ key: `mo''",\`}{od` }]]), + // `'{{"{\\"key\\":\\"mo''\\\\\\",\`}{od\\"}"}}'::json[]`, + // ); + // expect.soft(res14).toStrictEqual([]); + + // const res__10 = await diffDefault( + // _, + // json().array().default([{ key: `mo''",\`}{od` }]), + // `'{"{\\"key\\":\\"mo''\\\\\\",\`}{od\\"}"}'::json[]`, + // ); + // expect.soft(res__10).toStrictEqual([]); + + const res__14 = await diffDefault( + _, + text({ enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().array() + .default( + [[`mo''",\`}{od`], [`mo''",\`}{od`]], + ), + `'{{"mo''''\\\",\`\}\{od"},{"mo''''\\\",\`}{od"}}'::text[]`, + ); + expect.soft(res__14).toStrictEqual([]); + + // const res14 = await diffDefault( + // _, + // json().array().array().default([[{ key: `mo''",\`}{od` }]]), + // `'{{"{\\"key\\":\\"mo''\\\\\\",\`}{od\\"}"}}'::json[]`, + // ); + + // expect.soft(res14).toStrictEqual([]); + + const res_11 = await diffDefault( + _, + text({ enum: ['one', 'two', 'three', `no,''"\`rm`, `mo''",\`}{od`, 'mo,\`od'] }).array().default( + [`mo''",\`}{od`], + ), + `'{"mo''''\\\",\`\}\{od"}'::text[]`, + ); + expect.soft(res_11).toStrictEqual([]); + + const res21 = await diffDefault( + _, + numeric({ mode: 'number' }).array().array().default([[10.123, 123.10], [10.123, 123.10]]), + "'{{10.123,123.10},{10.123,123.10}}'::numeric[]", + ); + const res22 = await diffDefault( + _, + numeric({ mode: 'number', precision: 6, scale: 2 }).array().array().default([[10.123, 123.10], [ + 10.123, + 123.10, + ]]), + "'{{10.123,123.10},{10.123,123.10}}'::numeric[]", + ); + + // expect.soft(res21).toStrictEqual([]); + // expect.soft(res22).toStrictEqual([]); + + await diffDefault( + _, + json().array().default([{ key: 'mo",\\`}{od' }]), + `'{"{\"key\":\"mo\\\",\\\\\\\\\`}{od\"}"}'::json[]`, + ); + + await diffDefault( + _, + json().array().default([{ key: 'mo",\\`}{od' }]), + `'{"{\"key\":\"mo\\\",\\\\\\\\\`}{od\"}"}'::json[]`, + ); + + await diffDefault(_, json().array().array().default([]), `'{}'::json[]`); + await diffDefault( + _, + json().array().array().default([[{ key: 'value' }]]), + `'{{\"{\\\"key\\\":\\\"value\\\"}\"}}'::json[]`, + ); + await diffDefault( + _, + json().array().array().default([[{ key: "val'ue" }]]), + `'{{"{\\"key\\":\\"val''ue\\"}"}}'::json[]`, + ); + await diffDefault( + _, + json().array().array().default([[{ key: 'mo",\\`}{od' }]]), + `'{{"{\"key\":\"mo\\\",\\\\\\\\\`}{od\"}"}}'::json[]`, + ); + await diffDefault( + _, + json().default(sql`jsonb_build_object('chunkIndex', NULL, 'totalChunks', NULL)`), + `jsonb_build_object('chunkIndex', NULL, 'totalChunks', NULL)`, + ); + + await diffDefault( + _, + json().array().array().default([[{ key: 'mo",\\`}{od' }]]), + `'{{"{\"key\":\"mo\\\",\\\\\\\\\`}{od\"}"}}'::json[]`, + ); + + await diffDefault( + _, + json().array().array().default([[{ key: "val'ue" }]]), + `'{{"{\\"key\\":\\"val''ue\\"}"}}'::json[]`, + ); + + await diffDefault( + _, + json().array().array().default([[{ key: 'value' }]]), + `'{{\"{\\\"key\\\":\\\"value\\\"}\"}}'::json[]`, + ); + + await diffDefault( + _, + jsonb().array().default([{ key: 'value' }]), + `'{"{\\"key\\":\\"value\\"}"}'::jsonb[]`, + ); + await diffDefault( + _, + jsonb().array().default([{ key: "val'ue" }]), + `'{"{\\"key\\":\\"val''ue\\"}"}'::jsonb[]`, + ); +}); diff --git a/drizzle-kit/tests/postgres/pg-enums.test.ts b/drizzle-kit/tests/postgres/pg-enums.test.ts new file mode 100644 index 0000000000..111d76c6e6 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-enums.test.ts @@ -0,0 +1,2169 @@ +import { integer, pgEnum, pgSchema, pgTable, serial, text, varchar } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('enums #1', async () => { + const to = { + enum: pgEnum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #2', async () => { + const folder = pgSchema('folder'); + const to = { + folder, + enum: folder.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff({ folder }, to, []); + await push({ db, to: { folder } }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TYPE "folder"."enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #3', async () => { + const from = { + enum: pgEnum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + `DROP TYPE "enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #4', async () => { + const folder = pgSchema('folder'); + + const from = { + folder, + enum: folder.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, { folder }, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to: { folder } }); + + const st0 = [ + `DROP TYPE "folder"."enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #5', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + + const from = { + folder1, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, to, ['folder1->folder2']); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: ['folder1->folder2'], + }); + + const st0 = [ + `ALTER SCHEMA "folder1" RENAME TO "folder2";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #6', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + + const from = { + folder1, + folder2, + enum: folder1.enum('enum', ['value']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum', ['value']), + }; + + const { sqlStatements: st } = await diff(from, to, [ + 'folder1.enum->folder2.enum', + ]); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: ['folder1.enum->folder2.enum'], + }); + + const st0 = [ + `ALTER TYPE "folder1"."enum" SET SCHEMA "folder2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #7', async () => { + const from = { + enum: pgEnum('enum', ['value1']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TYPE "enum" ADD VALUE 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #8', async () => { + const from = { + enum: pgEnum('enum', ['value1']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TYPE "enum" ADD VALUE 'value2';`, + `ALTER TYPE "enum" ADD VALUE 'value3';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #9', async () => { + const from = { + enum: pgEnum('enum', ['value1', 'value3']), + }; + + const to = { + enum: pgEnum('enum', ['value1', 'value2', 'value3']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [`ALTER TYPE "enum" ADD VALUE 'value2' BEFORE 'value3';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #10', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + enum: schema.enum('enum', ['value1']), + }; + + const to = { + schema, + enum: schema.enum('enum', ['value1', 'value2']), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [`ALTER TYPE "folder"."enum" ADD VALUE 'value2';`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #11', async () => { + const schema1 = pgSchema('folder1'); + const from = { + schema1, + enum: schema1.enum('enum', ['value1']), + }; + + const to = { + schema1, + enum: pgEnum('enum', ['value1']), + }; + + const renames = [ + 'folder1.enum->public.enum', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "folder1"."enum" SET SCHEMA "public";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #12', async () => { + const schema1 = pgSchema('folder1'); + const from = { + schema1, + enum: pgEnum('enum', ['value1']), + }; + + const to = { + schema1, + enum: schema1.enum('enum', ['value1']), + }; + + const renames = [ + 'public.enum->folder1.enum', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum" SET SCHEMA "folder1";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #13', async () => { + const from = { + enum: pgEnum('enum1', ['value1']), + }; + + const to = { + enum: pgEnum('enum2', ['value1']), + }; + + const renames = [ + 'public.enum1->public.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" RENAME TO "enum2";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #14', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + const from = { + folder1, + folder2, + enum: folder1.enum('enum1', ['value1']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum2', ['value1']), + }; + + const renames = [ + 'folder1.enum1->folder2.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`, + `ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #15', async () => { + const folder1 = pgSchema('folder1'); + const folder2 = pgSchema('folder2'); + const from = { + folder1, + folder2, + enum: folder1.enum('enum1', ['value1', 'value4']), + }; + + const to = { + folder1, + folder2, + enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), + }; + + const renames = ['folder1.enum1->folder2.enum2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`, + `ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`, + `ALTER TYPE "folder2"."enum2" ADD VALUE 'value2' BEFORE 'value4';`, + `ALTER TYPE "folder2"."enum2" ADD VALUE 'value3' BEFORE 'value4';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #16', async () => { + const enum1 = pgEnum('enum1', ['value1']); + const enum2 = pgEnum('enum2', ['value1']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const renames = [ + 'public.enum1->public.enum2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" RENAME TO "enum2";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #17', async () => { + const schema = pgSchema('schema'); + const enum1 = pgEnum('enum1', ['value1']); + const enum2 = schema.enum('enum1', ['value1']); + + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const renames = [ + 'public.enum1->schema.enum1', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [`ALTER TYPE "enum1" SET SCHEMA "schema";`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #18', async () => { + const schema1 = pgSchema('schema1'); + const schema2 = pgSchema('schema2'); + + const enum1 = schema1.enum('enum1', ['value1']); + const enum2 = schema2.enum('enum2', ['value1']); + + const from = { + schema1, + schema2, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + schema1, + schema2, + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const renames = [ + 'schema1.enum1->schema2.enum2', + ]; + // change name and schema of the enum, no table changes + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER TYPE "schema1"."enum1" SET SCHEMA "schema2";`, + `ALTER TYPE "schema2"."enum1" RENAME TO "enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #19', async () => { + const myEnum = pgEnum('my_enum', ["escape's quotes"]); + + const from = {}; + + const to = { myEnum }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = ["CREATE TYPE \"my_enum\" AS ENUM('escape''s quotes');"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #20', async () => { + const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); + + const from = { + myEnum, + table: pgTable('table', { + id: serial('id').primaryKey(), + }), + }; + + const to = { + myEnum, + table: pgTable('table', { + id: serial('id').primaryKey(), + col1: myEnum('col1'), + col2: integer('col2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum";', + 'ALTER TABLE "table" ADD COLUMN "col2" integer;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #21', async () => { + const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); + + const from = { + myEnum, + table: pgTable('table', { + id: serial('id').primaryKey(), + }), + }; + + const to = { + myEnum, + table: pgTable('table', { + id: serial('id').primaryKey(), + col1: myEnum('col1').array(), + col2: integer('col2').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum"[];', + 'ALTER TABLE "table" ADD COLUMN "col2" integer[];', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #22', async () => { + const schema = pgSchema('schema'); + const en = schema.enum('e', ['a', 'b']); + + const from = { + schema, + en, + }; + + const to = { + schema, + en, + table: pgTable('table', { + en: en(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = ['CREATE TABLE "table" (\n\t"en" "schema"."e"\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums #23', async () => { + const schema = pgSchema('schema'); + const en = schema.enum('e', ['a', 'b']); + + const from = { + schema, + en, + }; + + const to = { + schema, + en, + table: pgTable('table', { + en1: en().array(), + en2: en().array().array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE TABLE "table" (\n\t"en1" "schema"."e"[],\n\t"en2" "schema"."e"[][]\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop enum value', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + }; + + const enum2 = pgEnum('enum', ['value1', 'value3']); + const to = { + enum2, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop enum values', async () => { + // TODO: revise + const newSchema = pgSchema('mySchema'); + const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'addedToMiddle', + 'custMgf', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema1 = { + enum3, + table: pgTable('enum_table', { + id: enum3(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum3(), + }), + }; + + const enum4 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ + 'addedToTop', + 'custAll', + 'custAdmin', + 'custClerk', + 'custInvoiceManager', + 'custApprover', + 'custOrderWriter', + 'custBuyer', + ]); + const schema2 = { + enum4, + table: pgTable('enum_table', { + id: enum4(), + }), + newSchema, + table1: newSchema.table('enum_table', { + id: enum4(), + }), + }; + + const schemas = ['public', 'mySchema']; + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, schemas }); + + const st0 = [ + `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, + `DROP TYPE "enum_users_customer_and_ship_to_settings_roles";`, + `CREATE TYPE "enum_users_customer_and_ship_to_settings_roles" AS ENUM('addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer');`, + `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE "enum_users_customer_and_ship_to_settings_roles" USING "id"::"enum_users_customer_and_ship_to_settings_roles";`, + `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE "enum_users_customer_and_ship_to_settings_roles" USING "id"::"enum_users_customer_and_ship_to_settings_roles";`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + users: pgTable('users', { + col: enum1().default('value1'), + }), + }; + + const to = { + users: pgTable('users', { + col: text().default('value1'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "col" DROP DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "col" SET DATA TYPE text;', + 'ALTER TABLE "users" ALTER COLUMN "col" SET DEFAULT \'value1\';', + `DROP TYPE "enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop enum value. enum is columns data type', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const schema = pgSchema('new_schema'); + + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3']); + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('shuffle enum values', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const schema = pgSchema('new_schema'); + + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum1('column'), + }), + table2: schema.table('table', { + column: enum1('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('column is enum type with default value. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').default('value2'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + 'ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";', + 'ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT \'value2\'::"enum";', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums as ts enum', async () => { + enum Test { + value = 'value', + } + + const to = { + enum: pgEnum('enum', Test), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('value');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is enum type with default value. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').default('value2'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum type with default value. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array().default(['value2']), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').array().default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum with custom size type with default value. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3).default(['value2']), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value2}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum with custom size type. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').array(3), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array of enum with multiple dimenions with custom sizes type. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3).array(2), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').array(3).array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[][];`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[][] USING "column"::"enum"[][];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array of enum with multiple dimenions type with custom size with default value. shuffle enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3).array(2).default([['value2']]), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').array(3).array(2).default([['value2']]), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[][];`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "enum";`, + `CREATE TYPE "enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[][] USING "column"::"enum"[][];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{{value2}}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is enum type with default value. custom schema. shuffle enum', async () => { + const schema = pgSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + const from = { + schema, + enum1, + table: pgTable('table', { + column: enum1('column').default('value2'), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: pgTable('table', { + column: enum2('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum" USING "column"::"new_schema"."enum";`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"new_schema"."enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum type with default value. custom schema. shuffle enum', async () => { + const schema = pgSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('column').array().default(['value2']), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('column').array().default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text[];`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" DROP DEFAULT;`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[] USING "column"::"new_schema"."enum"[];`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{value2}'::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum type with custom size with default value. custom schema. shuffle enum', async () => { + const schema = pgSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('column').array(3).default(['value2']), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text[];`, + 'ALTER TABLE "new_schema"."table" ALTER COLUMN "column" DROP DEFAULT;', + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[] USING "column"::"new_schema"."enum"[];`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{value2}'::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is array enum type with custom size. custom schema. shuffle enum', async () => { + const schema = pgSchema('new_schema'); + + const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); + + const from = { + schema, + enum1, + table: schema.table('table', { + column: enum1('column').array(3), + }), + }; + + const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); + const to = { + schema, + enum2, + table: schema.table('table', { + column: enum2('column').array(3), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text[];`, + `DROP TYPE "new_schema"."enum";`, + `CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`, + `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[] USING "column"::"new_schema"."enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('column is enum type without default value. add default to column', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const enum2 = pgEnum('enum', ['value1', 'value3']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to enum', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column'), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to enum. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').default('value2'), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum" USING "column"::"enum";`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3'::"enum";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array standart type to array enum. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').array().default(['value2']), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column').array().default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array standart type to array enum. column without default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').array(), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array standart type with custom size to array enum with custom size. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').array(3).default(['value2']), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3).default(['value3']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value3}'::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array standart type with custom size to array enum with custom size. column without default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').array(2), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: enum1('column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum"[] USING "column"::"enum"[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from enum type to standart type', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from enum type to standart type. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').default('value3'), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array enum type to array standart type', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array enum with custom size type to array standart type with custom size', async () => { + const enum1 = pgEnum('enum', ['value1', 'value3']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(2), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// +test('change data type from array enum type to array standart type. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array().default(['value2']), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column').array().default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value2}'::varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from array enum type with custom size to array standart type with custom size. column has default', async () => { + const enum1 = pgEnum('enum', ['value1', 'value2']); + + const from = { + enum1, + table: pgTable('table', { + column: enum1('column').array(3).default(['value2']), + }), + }; + + const to = { + enum1, + table: pgTable('table', { + column: varchar('column').array(3).default(['value2']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{value2}'::varchar[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type', async () => { + const from = { + table: pgTable('table', { + column: varchar('column'), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type. column has default', async () => { + const from = { + table: pgTable('table', { + column: varchar('column').default('value3'), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type. columns are arrays', async () => { + const from = { + table: pgTable('table', { + column: varchar('column').array(), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column').array(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type. columns are arrays with custom sizes', async () => { + const from = { + table: pgTable('table', { + column: varchar('column').array(2), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column').array(2), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type. columns are arrays. column has default', async () => { + const from = { + table: pgTable('table', { + column: varchar('column').array().default(['hello']), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column').array().default(['hello']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from standart type to standart type. columns are arrays with custom sizes.column has default', async () => { + const from = { + table: pgTable('table', { + column: varchar('column').array(2).default(['hello']), + }), + }; + + const to = { + table: pgTable('table', { + column: text('column').array(2).default(['hello']), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from one enum to other', async () => { + const enum1 = pgEnum('enum1', ['value1', 'value3']); + const enum2 = pgEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: pgTable('table', { + column: enum1('column'), + }), + }; + + const to = { + enum1, + enum2, + table: pgTable('table', { + column: enum2('column'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum2" USING "column"::text::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from one enum to other. column has default', async () => { + const enum1 = pgEnum('enum1', ['value1', 'value3']); + const enum2 = pgEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: pgTable('table', { + column: enum1('column').default('value3'), + }), + }; + + const to = { + enum1, + enum2, + table: pgTable('table', { + column: enum2('column').default('value3'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum2" USING "column"::text::"enum2";`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3'::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// + +test('change data type from one enum to other. changed defaults', async () => { + const enum1 = pgEnum('enum1', ['value1', 'value3']); + const enum2 = pgEnum('enum2', ['value1', 'value3']); + + const from = { + enum1, + enum2, + table: pgTable('table', { + column: enum1('column').default('value3'), + }), + }; + + const to = { + enum1, + enum2, + table: pgTable('table', { + column: enum2('column').default('value1'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum2" USING "column"::text::"enum2";`, + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value1'::"enum2";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('check filtering json statements. here we have recreate enum + set new type + alter default', async () => { + const enum1 = pgEnum('enum1', ['value1', 'value3']); + const from = { + enum1, + table: pgTable('table', { + column: varchar('column').default('value3'), + }), + }; + + const enum2 = pgEnum('enum1', ['value3', 'value1', 'value2']); + const to = { + enum2, + table: pgTable('table', { + column: enum2('column').default('value2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'DROP TYPE "enum1";', + `CREATE TYPE "enum1" AS ENUM('value3', 'value1', 'value2');`, + 'ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;', + 'ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "enum1" USING "column"::"enum1";', + `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"enum1";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column with same name as enum', async () => { + const statusEnum = pgEnum('status', ['inactive', 'active', 'banned']); + + const schema1 = { + statusEnum, + table1: pgTable('table1', { + id: serial('id').primaryKey(), + }), + }; + + const schema2 = { + statusEnum, + table1: pgTable('table1', { + id: serial('id').primaryKey(), + status: statusEnum('status').default('inactive'), + }), + table2: pgTable('table2', { + id: serial('id').primaryKey(), + status: statusEnum('status').default('inactive'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'CREATE TABLE "table2" (\n\t"id" serial PRIMARY KEY,\n\t"status" "status" DEFAULT \'inactive\'::"status"\n);\n', + 'ALTER TABLE "table1" ADD COLUMN "status" "status" DEFAULT \'inactive\'::"status";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enums ordering', async () => { + const schema1 = { + enum: pgEnum('settings', ['all', 'admin']), + }; + + const { next: n1 } = await diff({}, schema1, []); + await push({ db, to: schema1 }); + + const schema3 = { + enum: pgEnum('settings', ['new', 'all', 'admin']), + }; + + const { sqlStatements: st2, next: n2 } = await diff(n1, schema3, []); + const { sqlStatements: pst2 } = await push({ db, to: schema3 }); + + expect(st2).toStrictEqual(["ALTER TYPE \"settings\" ADD VALUE 'new' BEFORE 'all';"]); + expect(pst2).toStrictEqual(["ALTER TYPE \"settings\" ADD VALUE 'new' BEFORE 'all';"]); + + const schema4 = { + enum3: pgEnum('settings', ['new', 'all', 'new2', 'admin']), + }; + + const { sqlStatements: st3, next: n3 } = await diff(n2, schema4, []); + const { sqlStatements: pst3 } = await push({ db, to: schema4 }); + + const st0 = [ + `ALTER TYPE "settings" ADD VALUE 'new2' BEFORE 'admin';`, + ]; + + expect(st3).toStrictEqual(st0); + expect(pst3).toStrictEqual(st0); + + const { sqlStatements: st4 } = await diff(n3, schema4, []); + const { sqlStatements: pst4 } = await push({ db, to: schema4 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/pg-generated.test.ts b/drizzle-kit/tests/postgres/pg-generated.test.ts similarity index 51% rename from drizzle-kit/tests/pg-generated.test.ts rename to drizzle-kit/tests/postgres/pg-generated.test.ts index e9f294891f..a266b1bfb5 100644 --- a/drizzle-kit/tests/pg-generated.test.ts +++ b/drizzle-kit/tests/postgres/pg-generated.test.ts @@ -2,8 +2,25 @@ import { SQL, sql } from 'drizzle-orm'; import { integer, pgTable, text } from 'drizzle-orm/pg-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemas } from './schemaDiffer'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); test('generated as callback: add column with generated constraint', async () => { const from = { @@ -24,28 +41,19 @@ test('generated as callback: add column with generated constraint', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '"users"."name" || \'hello\'', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add generated constraint to an exisiting column', async () => { @@ -68,27 +76,20 @@ test('generated as callback: add generated constraint to an exisiting column', a }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', - 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', - ]); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: drop generated constraint', async () => { @@ -111,26 +112,19 @@ test('generated as callback: drop generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: change generated constraint', async () => { @@ -155,27 +149,17 @@ test('generated as callback: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push }); // --- @@ -199,28 +183,19 @@ test('generated as sql: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '"users"."name" || \'hello\'', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add generated constraint to an exisiting column', async () => { @@ -243,27 +218,20 @@ test('generated as sql: add generated constraint to an exisiting column', async }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', - 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', - ]); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: drop generated constraint', async () => { @@ -286,26 +254,19 @@ test('generated as sql: drop generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: change generated constraint', async () => { @@ -330,27 +291,20 @@ test('generated as sql: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push }); // --- @@ -374,28 +328,19 @@ test('generated as string: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '"users"."name" || \'hello\'', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add generated constraint to an exisiting column', async () => { @@ -418,27 +363,20 @@ test('generated as string: add generated constraint to an exisiting column', asy }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', - 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', - ]); + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: drop generated constraint', async () => { @@ -461,26 +399,19 @@ test('generated as string: drop generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: change generated constraint', async () => { @@ -505,25 +436,50 @@ test('generated as string: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemas(from, to, []); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we don't trigger generated column recreate if definition change within push +}); + +test('alter generated constraint', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), + }), + }; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" DROP COLUMN "gen_name";', + 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignores definition changes }); diff --git a/drizzle-kit/tests/postgres/pg-identity.test.ts b/drizzle-kit/tests/postgres/pg-identity.test.ts new file mode 100644 index 0000000000..b9c9425e19 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-identity.test.ts @@ -0,0 +1,586 @@ +import { bigint, integer, pgSequence, pgTable, smallint, text } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// same table - no diff +// 2. identity always/by default - no params + +// 3. identity always/by default - with a few params + +// 4. identity always/by default - with all params + + +// diff table with create statement +// 2. identity always/by default - no params + +// 3. identity always/by default - with a few params + +// 4. identity always/by default - with all params + + +// diff for drop statement +// 2. identity always/by default - no params, with params + + +// diff for alters +// 2. identity always/by default - no params -> add param + +// 3. identity always/by default - with a few params - remove/add/change params + +// 4. identity always/by default - with all params - remove/add/change params + + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table: identity always/by default - no params', async () => { + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity(), + id2: smallint('id2').generatedByDefaultAsIdentity(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table: identity always/by default - few params', async () => { + // TODO revise: added id1, id2 columns to users table, like in same test from push.test.ts + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + }), + id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table: identity always/by default - all params', async () => { + // TODO revise: added id1, id2 columns to users table, like in same test from push.test.ts + const from = {}; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ + startWith: 120, + maxValue: 17000, + increment: 3, + cycle: true, + cache: 100, + }), + id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 3 MAXVALUE 1000 START WITH 3 CACHE 200),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 3 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 100 CYCLE),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diff: identity always/by default - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id2: integer('id2').generatedAlwaysAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + id2: integer('id2').generatedAlwaysAsIdentity(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diff: identity always/by default - few params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + increment: 1, + startWith: 3, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('no diff: identity always/by default - all params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + name: 'custom_seq', + increment: 4, + minValue: 3, + maxValue: 1000, + cache: 200, + cycle: false, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from a column - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from a column - few params', async () => { + // TODO revise: added id1, id2 columns to users table, like in the same test from push.test.ts + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + }), + id1: integer('id1').generatedByDefaultAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + name: 'custom_name2', + increment: 4, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP NOT NULL;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop identity from a column - all params', async () => { + // TODO revise: added id1, id2 columns to users table, like in the same test from push.test.ts + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + increment: 3, + cache: 100, + cycle: true, + }), + id1: integer('id1').generatedByDefaultAsIdentity({ + name: 'custom_name1', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + id2: integer('id2').generatedAlwaysAsIdentity({ + name: 'custom_name2', + startWith: 10, + minValue: 10, + maxValue: 1000, + cycle: true, + cache: 10, + increment: 2, + }), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id'), + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id" DROP NOT NULL;', + `ALTER TABLE \"users\" ALTER COLUMN \"id1\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id1" DROP NOT NULL;', + `ALTER TABLE \"users\" ALTER COLUMN \"id2\" DROP IDENTITY;`, + 'ALTER TABLE "users" ALTER COLUMN "id2" DROP NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter identity from a column - no params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter identity from a column - few params', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), + }), + }; + + // TODO revise: added more params, like in same test from push.test.ts + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + cache: 10, + increment: 4, + maxValue: 10000, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter identity from a column - by default to always', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter identity from a column - always to by default', async () => { + const from = { + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity(), + }), + }; + + const to = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ + startWith: 100, + cache: 10, + }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', + 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column with identity - few params', async () => { + const schema1 = { + users: pgTable('users', { + email: text('email'), + }), + }; + + const schema2 = { + users: pgTable('users', { + email: text('email'), + id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), + id1: integer('id1').generatedAlwaysAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ADD COLUMN "id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ADD COLUMN "id1" integer GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add identity to column - few params', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id').notNull(), + id1: integer('id1').notNull(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), + id1: integer('id1').generatedAlwaysAsIdentity({ + name: 'custom_name1', + increment: 4, + }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE "users" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + 'ALTER TABLE "users" ALTER COLUMN "id1" ADD GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-indexes.test.ts b/drizzle-kit/tests/postgres/pg-indexes.test.ts new file mode 100644 index 0000000000..f7c893a84e --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-indexes.test.ts @@ -0,0 +1,595 @@ +import { and, eq, isNull, like, SQL, sql } from 'drizzle-orm'; +import { + boolean, + index, + integer, + pgEnum, + pgRole, + pgTable, + serial, + text, + timestamp, + uniqueIndex, + uuid, + vector, +} from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(false); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('adding basic indexes', async () => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => [ + index() + .on(t.name.desc(), t.id.asc().nullsLast()) + .with({ fillfactor: 70 }) + .where(sql`name != 'alef'`), + index('indx1') + .using('hash', t.name) + .with({ fillfactor: 70 }), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "users_name_id_index" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE name != 'alef';`, + `CREATE INDEX "indx1" ON "users" USING hash ("name") WITH (fillfactor=70);`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('dropping basic index', async () => { + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => [index().on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 })], + ), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [`DROP INDEX "users_name_id_index";`]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('altering indexes', async () => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('removeColumn').on(t.name, t.id), + index('addColumn').on(t.name.desc()).with({ fillfactor: 70 }), + index('removeExpression').on(t.name.desc(), sql`name`).concurrently(), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name`), + index('changeName').on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), + index('changeWith').on(t.name).with({ fillfactor: 70 }), + index('changeUsing').on(t.name), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('removeColumn').on(t.name), + index('addColumn').on(t.name.desc(), t.id.nullsLast()).with({ fillfactor: 70 }), + index('removeExpression').on(t.name.desc()).concurrently(), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('newName').on(t.name.desc(), sql`name`).with({ fillfactor: 70 }), + index('changeWith').on(t.name).with({ fillfactor: 90 }), + index('changeUsing').using('hash', t.name), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" ("name" DESC NULLS LAST);', + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC NULLS LAST,name desc);', + 'DROP INDEX "changeWith";', + 'CREATE INDEX "changeWith" ON "users" ("name") WITH (fillfactor=90);', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', + 'CREATE INDEX "newName" ON "users" ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', + 'DROP INDEX "changeWith";', + 'CREATE INDEX "changeWith" ON "users" ("name") WITH (fillfactor=90);', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" ("name" DESC NULLS LAST);', + 'CREATE INDEX "newName" ON "users" ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', + ]); +}); + +test('indexes test case #1', async () => { + const schema1 = { + users: pgTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => [ + index().on(t.id.desc().nullsFirst()), + index('indx1').on(t.id, t.imageUrl), + index('indx4').on(t.id), + ], + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: uuid('id').defaultRandom().primaryKey(), + name: text('name').notNull(), + description: text('description'), + imageUrl: text('image_url'), + inStock: boolean('in_stock').default(true), + }, + (t) => [ + index().on(t.id.desc().nullsFirst()), + index('indx1').on(t.id, t.imageUrl), + index('indx4').on(t.id), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('Indexes properties that should not trigger push changes', async () => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('changeExpression').on(t.id.desc(), sql`name`), + index('indx1').on(t.name.desc()).concurrently(), + index('indx2').on(t.name.desc()).where(sql`true`), + index('indx3').on(t.name.op('text_ops')).where(sql`true`), + index('indx4').on(sql`lower(name)`).where(sql`true`), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('indx1').on(t.name.desc()), + index('indx2').on(t.name.desc()).where(sql`false`), + index('indx3').on(t.name.op('test')).where(sql`true`), + index('indx4').on(sql`lower(id)`).where(sql`true`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC NULLS LAST,name desc);', + 'DROP INDEX "indx2";', + 'CREATE INDEX "indx2" ON "users" ("name" DESC NULLS LAST) WHERE false;', + 'DROP INDEX "indx3";', + 'CREATE INDEX "indx3" ON "users" ("name" test);', + 'DROP INDEX "indx4";', + 'CREATE INDEX "indx4" ON "users" (lower(id));', + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX "indx2";', + 'CREATE INDEX "indx2" ON "users" ("name" DESC NULLS LAST) WHERE false;', + ]); +}); + +test('indexes #0', async (t) => { + const schema1 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + ( + t, + ) => [ + index('removeColumn').on(t.name, t.id), + index('addColumn').on(t.name.desc()).with({ fillfactor: 70 }), + index('removeExpression').on(t.name.desc(), sql`name`).concurrently(), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name`), + index('changeName').on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), + index('changeWith').on(t.name).with({ fillfactor: 70 }), + index('changeUsing').on(t.name), + ], + ), + }; + + const schema2 = { + users: pgTable( + 'users', + { + id: serial('id').primaryKey(), + name: text('name'), + }, + (t) => [ + index('removeColumn').on(t.name), + index('addColumn').on(t.name.desc(), t.id.nullsLast()).with({ fillfactor: 70 }), + index('removeExpression').on(t.name.desc()).concurrently(), + index('addExpression').on(t.id.desc()), + index('changeExpression').on(t.id.desc(), sql`name desc`), + index('newName').on(t.name.desc(), sql`name`).with({ fillfactor: 70 }), + index('changeWith').on(t.name).with({ fillfactor: 90 }), + index('changeUsing').using('hash', t.name), + ], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + expect(st).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" ("name" DESC NULLS LAST);', + 'DROP INDEX "changeExpression";', + 'CREATE INDEX "changeExpression" ON "users" ("id" DESC NULLS LAST,name desc);', + 'DROP INDEX "changeWith";', + 'CREATE INDEX "changeWith" ON "users" ("name") WITH (fillfactor=90);', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', + 'CREATE INDEX "newName" ON "users" ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', + ]); + + // for push we ignore change of index expressions + expect(pst).toStrictEqual([ + 'DROP INDEX "changeName";', + 'DROP INDEX "addColumn";', + 'CREATE INDEX "addColumn" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', + // 'DROP INDEX "changeExpression";', + 'DROP INDEX "changeUsing";', + 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', + 'DROP INDEX "changeWith";', + 'CREATE INDEX "changeWith" ON "users" ("name") WITH (fillfactor=90);', + 'DROP INDEX "removeColumn";', + 'CREATE INDEX "removeColumn" ON "users" ("name");', + 'DROP INDEX "removeExpression";', + 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" ("name" DESC NULLS LAST);', + 'CREATE INDEX "newName" ON "users" ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', + // 'CREATE INDEX "changeExpression" ON "users" ("id" DESC NULLS LAST,name desc);', + ]); +}); + +test('vector index', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + embedding: vector('name', { dimensions: 3 }), + }, (t) => [ + index('vector_embedding_idx') + .using('hnsw', t.embedding.op('vector_ip_ops')) + .with({ m: 16, ef_construction: 64 }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "vector_embedding_idx" ON "users" USING hnsw ("name" vector_ip_ops) WITH (m=16, ef_construction=64);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('index #2', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('indx').on(t.name.desc()).concurrently(), + index('indx1').on(t.name.desc()), + index('indx2').on(t.name.op('text_ops')), + index('indx3').on(sql`lower(name)`), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index('indx').on(t.name.desc()), + index('indx1').on(t.name.desc()).where(sql`false`), + index('indx2').on(t.name.op('test')), + index('indx3').on(sql`lower(${t.name})`), + index('indx4').on(sql`lower(name)`), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP INDEX "indx1";', + 'CREATE INDEX "indx1" ON "users" ("name" DESC NULLS LAST) WHERE false;', + 'DROP INDEX "indx2";', + 'CREATE INDEX "indx2" ON "users" ("name" test);', + 'DROP INDEX "indx3";', + 'CREATE INDEX "indx3" ON "users" (lower("name"));', + 'CREATE INDEX "indx4" ON "users" (lower(name));', + ]); + expect(pst).toStrictEqual([ + 'DROP INDEX "indx1";', + 'CREATE INDEX "indx1" ON "users" ("name" DESC NULLS LAST) WHERE false;', + // TODO: we ignore columns changes during 'push', we should probably tell user about it in CLI? + // 'DROP INDEX "indx2";', + // 'DROP INDEX "indx3";', + 'CREATE INDEX "indx4" ON "users" (lower(name));', + // 'CREATE INDEX "indx2" ON "users" ("name" test);', + // 'CREATE INDEX "indx3" ON "users" (lower("name"));', + ]); +}); + +test('index #3', async (t) => { + const schema1 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }, (t) => [ + index().on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }).where(sql`name != 'alex'`), + index('indx1').using('hash', sql`${t.name}`).with({ fillfactor: 70 }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + `CREATE INDEX "users_name_id_index" ON "users" ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE name != 'alex';`, + `CREATE INDEX "indx1" ON "users" USING hash ("name") WITH (fillfactor=70);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4929 +test('index #4', async (t) => { + const table1 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column2: timestamp('column2'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table1.column1), isNull(table1.column2))!, + ) + .notNull(), + }, + (table) => [index('table_uid_bool_idx').on(table.uid, table.bool)], + ); + const schema1 = { table: table1 }; + + const table2 = pgTable( + 'table', + { + uid: uuid('uid').notNull(), + column1: timestamp('column1'), + column3: timestamp('column3'), + bool: boolean('bool') + .generatedAlwaysAs( + (): SQL => and(isNull(table2.column1), isNull(table2.column3))!, + ) + .notNull(), + }, + (table) => [index('table_uid_bool_idx').on(table.uid, table.bool)], + ); + const schema2 = { table: table2 }; + + const renames = ['public.table.column2->public.table.column3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + expect(st).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + `ALTER TABLE \"table\" DROP COLUMN \"bool\";`, + `ALTER TABLE \"table\" ADD COLUMN \"bool\" boolean GENERATED ALWAYS AS ((\"table\".\"column1\" is null and \"table\".\"column3\" is null)) STORED;`, + `CREATE INDEX "table_uid_bool_idx" ON "table" ("uid","bool");`, + ]); + // push is not triggered on generated change + expect(pst).toStrictEqual([ + `ALTER TABLE \"table\" RENAME COLUMN \"column2\" TO \"column3\";`, + ]); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4790 +test('index #5', async (t) => { + const enum_ = pgEnum('enum', ['text', 'not_text']); + const schema1 = { + enum_, + table1: pgTable('table1', { + column1: integer(), + column2: integer(), + column3: integer(), + column4: boolean(), + column5: enum_(), + column6: text(), + }, (table) => [ + uniqueIndex().on(table.column1).where(eq(table.column4, true)), + uniqueIndex().on(table.column2).where(eq(table.column5, 'text')), + uniqueIndex().on(table.column3).where(like(table.column6, 'text')), + ]), + }; + + const { sqlStatements: st } = await diff({}, schema1, []); + const { sqlStatements: pst } = await push({ db, to: schema1 }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('text', 'not_text');`, + 'CREATE TABLE "table1" (\n' + + '\t"column1" integer,\n' + + '\t"column2" integer,\n' + + '\t"column3" integer,\n' + + '\t"column4" boolean,\n' + + '\t"column5" "enum",\n' + + '\t"column6" text\n' + + ');\n', + 'CREATE UNIQUE INDEX "table1_column1_index" ON "table1" ("column1") WHERE "column4" = true;', // or with $1 param instead of true, but then params must be included in the query + `CREATE UNIQUE INDEX "table1_column2_index" ON "table1" ("column2") WHERE "column5" = 'text';`, + `CREATE UNIQUE INDEX "table1_column3_index" ON "table1" ("column3") WHERE "column6" like 'text';`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('index #6', async (t) => { + const enum_ = pgEnum('enum', ['text', 'not_text', 'something_else']); + const schema1 = { + enum_, + table1: pgTable('table1', { + column1: integer(), + column2: boolean(), + column3: enum_(), + }, (table) => [ + uniqueIndex().on(table.column2).where(eq(table.column2, true)), + uniqueIndex().on(table.column3).where(eq(table.column3, 'text')), + ]), + }; + + const { sqlStatements: st } = await diff({}, schema1, []); + const { sqlStatements: pst } = await push({ db, to: schema1 }); + + const st0 = [ + `CREATE TYPE "enum" AS ENUM('text', 'not_text', 'something_else');`, + 'CREATE TABLE "table1" (\n' + + '\t"column1" integer,\n' + + '\t"column2" boolean,\n' + + '\t"column3" "enum"\n' + + ');\n', + 'CREATE UNIQUE INDEX "table1_column2_index" ON "table1" ("column2") WHERE "column2" = true;', // or with $1 param instead of true, but then params must be included in the query + `CREATE UNIQUE INDEX "table1_column3_index" ON "table1" ("column3") WHERE "column3" = 'text';`, // in indices names maybe should be some hash + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-policy.test.ts b/drizzle-kit/tests/postgres/pg-policy.test.ts new file mode 100644 index 0000000000..243fc35fcc --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-policy.test.ts @@ -0,0 +1,1528 @@ +import { sql } from 'drizzle-orm'; +import { integer, pgPolicy, pgRole, pgSchema, pgTable } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from '../postgres/mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('full policy: no changes', async () => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + enable rls', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy + disable rls', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy without enable rls', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' }), pgPolicy('newRls')]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy without disable rls', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' }), pgPolicy('oldRls')]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, ignoreSubsequent: true }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "oldRls" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy without recreation: changing roles #2', async (t) => { + const role = pgRole('test'); + const schema1 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', to: role })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role.name] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role.name] } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "test";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('alter policy without recreation: changing roles', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', to: 'current_role' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, ignoreSubsequent: true }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO current_role;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy without recreation: changing using', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', using: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy without recreation: changing with check', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +/// + +test('alter policy with recreation: changing as', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'restrictive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing for', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', for: 'delete' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing both "as" and "for"', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'restrictive', for: 'insert' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy with recreation: changing all fields', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'restrictive', to: 'current_role', withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_role WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('alter policy with recreation: changing all fields #2', async (t) => { + const role = pgRole('test'); + const schema1 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` })]), + }; + + const schema2 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'restrictive', to: role, withCheck: sql`true` })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role.name] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role.name] } }, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO "test" WITH CHECK (true);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename policy', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('newName', { as: 'permissive' })]), + }; + + const renames = [ + 'public.users.test->public.users.newName', + ]; + + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename policy in renamed table', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [ + pgPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + users: pgTable('users2', { + id: integer('id').primaryKey(), + }, (t) => [pgPolicy('newName', { as: 'permissive' })]), + }; + + const renames = ['public.users->public.users2', 'public.users2.test->public.users2.newName']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0 = [ + 'ALTER TABLE "users" RENAME TO "users2";', + 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with a policy', async (t) => { + const schema1 = {}; + + const schema2 = { + users: pgTable('users2', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'CREATE TABLE "users2" (\n\t"id" integer PRIMARY KEY\n);\n', + 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop table with a policy', async (t) => { + const schema1 = { + users: pgTable('users2', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { as: 'permissive' })]), + }; + + const schema2 = {}; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users2";', + 'DROP TABLE "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with multiple "to" roles', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const role = pgRole('manager'); + + const schema2 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: ['current_role', role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + // TODO: @AlexBlokh: it is now really weird that I have to include role names in entities when I just have them in schema + // if I don't - it will try to create same roles all the time + const st0 = [ + 'CREATE ROLE "manager";', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('add policy with multiple "to" roles #2', async (t) => { + const role2 = pgRole('test'); + const schema1 = { + role2, + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const role = pgRole('manager'); + + const schema2 = { + role2, + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: [role2, role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role2.name, role.name] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role2.name, role.name] } }, + }); + + // TODO: @AlexBlokh: it is now really weird that I have to include role names in entities when I just have them in schema + // if I don't - it will try to create same roles all the time + const st0 = [ + 'CREATE ROLE "manager";', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO "manager", "test";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with rls enabled', async (t) => { + const schema1 = {}; + + const schema2 = { + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('enable rls force', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('disable rls force', async (t) => { + const schema1 = { + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy with enabled rls', async (t) => { + const role = pgRole('manager'); + + const schema1 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: ['current_role', role] })]), + }; + + const schema2 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, ignoreSubsequent: true }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + ignoreSubsequent: true, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('drop policy with enabled rls #2', async (t) => { + const role = pgRole('manager'); + + const schema1 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: [role] })]), + }; + + const schema2 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy with enabled rls', async (t) => { + const schema1 = { + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const role = pgRole('manager'); + + const schema2 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: ['current_role', role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + ignoreSubsequent: true, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('add policy with enabled rls #2', async (t) => { + const schema1 = { + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }), + }; + + const role = pgRole('manager'); + + const schema2 = { + role, + users: pgTable.withRLS('users', { + id: integer('id').primaryKey(), + }, () => [pgPolicy('test', { to: [role] })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'CREATE ROLE "manager";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link table', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('link table', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + rls: pgPolicy('test', { as: 'permissive' }), + }; + + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unlink table', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop policy with link', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy in table and with link table', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const users = pgTable('users', { + id: integer('id').primaryKey(), + }, () => [ + pgPolicy('test1', { to: 'current_user' }), + ]); + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO current_user;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('add policy in table and with link table #2', async (t) => { + const role = pgRole('test2'); + const schema1 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const users = pgTable('users', { + id: integer('id').primaryKey(), + }, () => [ + pgPolicy('test1', { to: role }), + ]); + + const schema2 = { + role, + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role.name] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role.name] } }, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO "test2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('link non-schema table', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { users }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unlink non-schema table', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive' }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link non-schema table', async (t) => { + const cities = pgTable.withRLS('cities', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + cities, + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + cities, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test2'), + ]), + rls: pgPolicy('test', { as: 'permissive' }).link(cities), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0 = [ + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add policy + link non-schema table from auth schema', async (t) => { + const authSchema = pgSchema('auth'); + const cities = authSchema.table('cities', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + authSchema, + cities, + users: pgTable('users', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + authSchema, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test2'), + ]), + cities, + rls: pgPolicy('test', { as: 'permissive' }).link(cities), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'ALTER TABLE "auth"."cities" ENABLE ROW LEVEL SECURITY;', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]); + expect(pst).toStrictEqual([ + 'ALTER TABLE "auth"."cities" ENABLE ROW LEVEL SECURITY;', + 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', + 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', + 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', + ]); +}); + +test('rename policy that is linked', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('newName', { as: 'permissive' }).link(users), + }; + + const renames = [ + 'public.users.test->public.users.newName', + ]; + + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" RENAME TO "newName";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy that is linked', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO current_role;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('alter policy that is linked #2', async (t) => { + const role = pgRole('owner'); + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + role, + users, + rls: pgPolicy('test', { as: 'permissive' }).link(users), + }; + + const schema2 = { + role, + users, + rls: pgPolicy('test', { as: 'permissive', to: role }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role.name, 'test'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role.name, 'test'] } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy that is linked: withCheck', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy that is linked: using', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { as: 'permissive', using: sql`true` }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { as: 'permissive', using: sql`false` }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy that is linked: using', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users, + rls: pgPolicy('test', { for: 'insert' }).link(users), + }; + + const schema2 = { + users, + rls: pgPolicy('test', { for: 'delete' }).link(users), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +//// + +test('alter policy in the table', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', to: 'current_role' }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['test'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['test'] } }, + ignoreSubsequent: true, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO current_role;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); +test('alter policy in the table #2', async (t) => { + const role = pgRole('owner'); + const schema1 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive' }), + ]), + }; + + const schema2 = { + role, + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', to: role }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: [role.name] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: [role.name] } }, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO "owner";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter policy in the table: withCheck', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', withCheck: sql`false` }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy in the table: using', async (t) => { + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', using: sql`true` }), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { as: 'permissive', using: sql`false` }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'ALTER POLICY "test" ON "users" TO public USING (false);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore [using withcheck] for push +}); + +test('alter policy in the table: using', async (t) => { + const users = pgTable('users', { + id: integer('id').primaryKey(), + }); + + const schema1 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { for: 'insert' }), + ]), + }; + + const schema2 = { + users: pgTable('users', { + id: integer('id').primaryKey(), + }, (t) => [ + pgPolicy('test', { for: 'delete' }), + ]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0 = [ + 'DROP POLICY "test" ON "users";', + 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-role.test.ts b/drizzle-kit/tests/postgres/pg-role.test.ts new file mode 100644 index 0000000000..ebbb7892d5 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-role.test.ts @@ -0,0 +1,245 @@ +import { pgRole } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from '../postgres/mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create role', async (t) => { + const schema1 = {}; + + const schema2 = { + manager: pgRole('manager'), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'CREATE ROLE "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create role with properties', async (t) => { + const schema1 = {}; + + const schema2 = { + manager: pgRole('manager', { + createDb: true, + createRole: true, + inherit: false, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + `CREATE ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create role with some properties', async (t) => { + const schema1 = {}; + + const schema2 = { + manager: pgRole('manager', { createDb: true, inherit: false }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + }); + + const st0 = [ + 'CREATE ROLE "manager" WITH CREATEDB NOINHERIT;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop role', async (t) => { + const schema1 = { manager: pgRole('manager') }; + + const schema2 = {}; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'DROP ROLE "manager";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create and drop role', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + admin: pgRole('admin'), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager', 'admin'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager', 'admin'] } }, + }); + + const st0 = [ + 'DROP ROLE "manager";', + 'CREATE ROLE "admin";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename role', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + admin: pgRole('admin'), + }; + + const renames = ['manager->admin']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1, entities: { roles: { include: ['manager', 'admin'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + renames, + entities: { roles: { include: ['manager', 'admin'] } }, + }); + + const st0 = [ + 'ALTER ROLE "manager" RENAME TO "admin";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter all role field', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + manager: pgRole('manager', { + createDb: true, + createRole: true, + inherit: false, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + `ALTER ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter createdb in role', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + manager: pgRole('manager', { createDb: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'ALTER ROLE "manager" WITH CREATEDB;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter createrole in role', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + manager: pgRole('manager', { createRole: true }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ db, to: schema2, entities: { roles: { include: ['manager'] } } }); + + const st0 = [ + 'ALTER ROLE "manager" WITH CREATEROLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter inherit in role', async (t) => { + const schema1 = { + manager: pgRole('manager'), + }; + + const schema2 = { + manager: pgRole('manager', { inherit: false }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1, entities: { roles: { include: ['manager'] } } }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + entities: { roles: { include: ['manager'] } }, + log: 'statements', + }); + + const st0 = [ + 'ALTER ROLE "manager" WITH NOINHERIT;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-schemas.test.ts b/drizzle-kit/tests/postgres/pg-schemas.test.ts new file mode 100644 index 0000000000..15b385baf5 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-schemas.test.ts @@ -0,0 +1,160 @@ +import { pgSchema } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add schema #1', async () => { + const to = { + devSchema: pgSchema('dev'), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "dev";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + const to = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #1', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + 'DROP SCHEMA "dev";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('delete schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + const to = { + devSchema: pgSchema('dev'), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SCHEMA "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename schema #1', async () => { + const from = { + devSchema: pgSchema('dev'), + }; + + const to = { + devSchema2: pgSchema('dev2'), + }; + + const renames = ['dev->dev2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "dev" RENAME TO "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename schema #2', async () => { + const from = { + devSchema: pgSchema('dev'), + devSchema1: pgSchema('dev1'), + }; + const to = { + devSchema: pgSchema('dev'), + devSchema2: pgSchema('dev2'), + }; + + const renames = ['dev1->dev2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "dev1" RENAME TO "dev2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-sequences.test.ts b/drizzle-kit/tests/postgres/pg-sequences.test.ts new file mode 100644 index 0000000000..75d086e023 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-sequences.test.ts @@ -0,0 +1,448 @@ +import { pgSchema, pgSequence } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create sequence', async () => { + const to = { + seq: pgSequence('name', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create sequence: all fields', async () => { + const from = {}; + const to = { + seq: pgSequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SEQUENCE "public"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create sequence: custom schema', async () => { + const customSchema = pgSchema('custom'); + const from = { customSchema }; + const to = { + customSchema, + seq: customSchema.sequence('name', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create sequence: custom schema + all fields', async () => { + const customSchema = pgSchema('custom'); + const from = { customSchema }; + const to = { + customSchema, + seq: customSchema.sequence('name', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE SEQUENCE "custom"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = {}; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SEQUENCE "public"."name";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop sequence: custom schema', async () => { + const customSchema = pgSchema('custom'); + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'DROP SEQUENCE "custom"."name";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// rename sequence + +test('rename sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = { seq: pgSequence('name_new', { startWith: 100 }) }; + + const renames = [ + 'public.name->public.name_new', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" RENAME TO "name_new";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename sequence in custom schema', async () => { + const customSchema = pgSchema('custom'); + + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: customSchema.sequence('name_new', { startWith: 100 }) }; + + const renames = [ + 'custom.name->custom.name_new', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "custom"."name" RENAME TO "name_new";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('move sequence between schemas #1', async () => { + const customSchema = pgSchema('custom'); + const from = { customSchema, seq: pgSequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + + const renames = [ + 'public.name->custom.name', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" SET SCHEMA "custom";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('move sequence between schemas #2', async () => { + const customSchema = pgSchema('custom'); + const from = { customSchema, seq: customSchema.sequence('name', { startWith: 100 }) }; + const to = { customSchema, seq: pgSequence('name', { startWith: 100 }) }; + + const renames = [ + 'custom.name->public.name', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SEQUENCE "custom"."name" SET SCHEMA "public";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// Add squasher for sequences to make alters work + +// Run all tests + +// Finish introspect for sequences + +// Check push for sequences + + +// add tests for generated to postgresql + +// add tests for generated to mysql + +// add tests for generated to sqlite + + +// add tests for identity to postgresql + +// check introspect generated(all dialects) + +// check push generated(all dialect) + + +// add introspect ts file logic for all the features +// manually test everything +// beta release + +test('alter sequence', async () => { + const from = { seq: pgSequence('name', { startWith: 100 }) }; + const to = { seq: pgSequence('name', { startWith: 105 }) }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER SEQUENCE "name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 105 CACHE 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('full sequence: no changes', async () => { + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('basic sequence: change fields', async () => { + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 100000, + minValue: 100, + cycle: true, + cache: 10, + increment: 4, + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 100000 START WITH 100 CACHE 10 CYCLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('basic sequence: change name', async () => { + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const renames = ['public.my_seq->public.my_seq2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" RENAME TO "my_seq2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('basic sequence: change name and fields', async () => { + const schema1 = { + seq: pgSequence('my_seq', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 2, + }), + }; + + const schema2 = { + seq: pgSequence('my_seq2', { + startWith: 100, + maxValue: 10000, + minValue: 100, + cycle: true, + cache: 10, + increment: 4, + }), + }; + + const renames = ['public.my_seq->public.my_seq2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER SEQUENCE "my_seq" RENAME TO "my_seq2";', + 'ALTER SEQUENCE "my_seq2" INCREMENT BY 4 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('Add basic sequences', async () => { + const schema1 = { + seq: pgSequence('my_seq', { startWith: 100 }), + }; + + const schema2 = { + seq: pgSequence('my_seq', { startWith: 100 }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-snapshot-v7.test.ts b/drizzle-kit/tests/postgres/pg-snapshot-v7.test.ts new file mode 100644 index 0000000000..8462e5fa31 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-snapshot-v7.test.ts @@ -0,0 +1,40 @@ +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diffSnapshotV7, prepareTestDatabase, TestDatabase } from './mocks'; +import * as s01 from './snapshots/schema01'; +import * as s01new from './snapshots/schema01new'; +import * as s02 from './snapshots/schema02'; +import * as s02new from './snapshots/schema02new'; +import * as s03 from './snapshots/schema03'; +import * as s03new from './snapshots/schema03new'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('s01', async (t) => { + const res = await diffSnapshotV7(db, s01new, s01); + expect(res.all).toStrictEqual([]); +}); + +test('s02', async (t) => { + const res = await diffSnapshotV7(db, s02new, s02); + expect(res.all).toStrictEqual([]); +}); + +test('s03', async (t) => { + const res = await diffSnapshotV7(db, s03new, s03); + expect(res.all).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/postgres/pg-tables.test.ts b/drizzle-kit/tests/postgres/pg-tables.test.ts new file mode 100644 index 0000000000..c70d5f0f5e --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-tables.test.ts @@ -0,0 +1,1332 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + foreignKey, + geometry, + index, + integer, + pgSchema, + pgTable, + pgTableCreator, + primaryKey, + serial, + text, + unique, + uniqueIndex, + vector, +} from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add table #1', async () => { + const to = { + users: pgTable('users', {}), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #2', async () => { + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" serial PRIMARY KEY\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #3', async () => { + const to = { + users: pgTable('users', { + id: serial('id'), + }, (t) => [primaryKey({ name: 'users_pk', columns: [t.id] })]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n' + + '\t"id" serial,\n' + + '\tCONSTRAINT "users_pk" PRIMARY KEY("id")\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #4', async () => { + const to = { + users: pgTable('users', { id: integer() }), + posts: pgTable('posts', { id: integer() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" integer\n);\n', + 'CREATE TABLE "posts" (\n\t"id" integer\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #5', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + }; + + const to = { + schema, + users: schema.table('users', { + id: integer(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "folder"."users" (\n\t"id" integer\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #6', async () => { + const from = { + users1: pgTable('users1', { id: integer() }), + }; + + const to = { + users2: pgTable('users2', { id: integer() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users2" (\n\t"id" integer\n);\n', + 'DROP TABLE "users1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #7', async () => { + const from = { + users1: pgTable('users1', { id: integer() }), + }; + + const to = { + users: pgTable('users', { id: integer() }), + users2: pgTable('users2', { id: integer() }), + }; + + const renames = ['public.users1->public.users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n\t"id" integer\n);\n', + 'ALTER TABLE "users1" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #8: geometry types', async () => { + const to = { + users: pgTable('users', { + geom: geometry('geom', { type: 'point' }).notNull(), + geom1: geometry('geom1').notNull(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + // TODO: for now pglite does not support postgis extension, revise later https://github.com/electric-sql/pglite/issues/11 + // const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"geom" geometry(point) NOT NULL,\n\t"geom1" geometry(point) NOT NULL\n);\n`, + ]; + expect(st).toStrictEqual(st0); + // expect(pst).toStrictEqual(st0); +}); + +/* unique inline */ +test('add table #9', async () => { + const to = { + users: pgTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "users" (\n' + + '\t"name" text UNIQUE\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline named */ +test('add table #10', async () => { + const from = {}; + const to = { + users: pgTable('users', { + name: text().unique('name_unique'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "name_unique" UNIQUE\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline named nulls not distinct */ +test('add table #11', async () => { + const from = {}; + const to = { + users: pgTable('users', { + name: text().unique('name_unique', { nulls: 'not distinct' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "name_unique" UNIQUE NULLS NOT DISTINCT\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique inline default-named nulls not distinct */ +test('add table #12', async () => { + const from = {}; + const to = { + users: pgTable('users', { + name: text().unique('users_name_key', { nulls: 'not distinct' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "users_name_key" UNIQUE NULLS NOT DISTINCT\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique default-named */ +test('add table #13', async () => { + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('users_name_key').on(t.name)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "users_name_key" UNIQUE\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique default-named nulls not distinct */ +test('add table #14', async () => { + const from = {}; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('users_name_key').on(t.name).nullsNotDistinct()]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "users_name_key" UNIQUE NULLS NOT DISTINCT\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +/* unique */ +test('add table #15', async () => { + const from = {}; + const to = { + users: pgTable('users', { + name: text(), + }, (t) => [unique('name_unique').on(t.name).nullsNotDistinct()]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"name" text CONSTRAINT "name_unique" UNIQUE NULLS NOT DISTINCT\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('multiproject schema add table #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const to = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "prefix_users" (\n\t"id" serial PRIMARY KEY\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('multiproject schema drop table #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, {}, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to: {}, + }); + + const st0 = [ + 'DROP TABLE "prefix_users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('multiproject schema alter table name #1', async () => { + const table = pgTableCreator((name) => `prefix_${name}`); + + const from = { + users: table('users', { + id: serial('id').primaryKey(), + }), + }; + const to = { + users1: table('users1', { + id: serial('id').primaryKey(), + }), + }; + + const renames = [ + 'public.prefix_users->public.prefix_users1', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "prefix_users" RENAME TO "prefix_users1";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #8: column with pgvector', async () => { + const to = { + users2: pgTable('users2', { + id: serial('id').primaryKey(), + name: vector('name', { dimensions: 3 }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users2" (\n\t"id" serial PRIMARY KEY,\n\t"name" vector(3)\n);\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add schema + table #1', async () => { + const schema = pgSchema('folder'); + + const to = { + schema, + users: schema.table('users', { + id: integer(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE SCHEMA "folder";\n', + 'CREATE TABLE "folder"."users" (\n\t"id" integer\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4796 +test('add schema + table #2', async () => { + const schema = pgSchema('folder'); + + const to = { + schema, + users: schema.table('users', { + id: integer(), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const schemas = ['folder']; + const { sqlStatements: pst } = await push({ db, to, schemas }); + + const st0 = [ + 'CREATE SCHEMA "folder";\n', + 'CREATE TABLE "folder"."users" (\n\t"id" integer\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change schema with tables #1', async () => { + const schema = pgSchema('folder'); + const schema2 = pgSchema('folder2'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema2, + users: schema2.table('users', {}), + }; + + const renames = ['folder->folder2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "folder" RENAME TO "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #1', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + users: pgTable('users', {}), + }; + const to = { + schema, + users: schema.table('users', {}), + }; + + const renames = [ + 'public.users->folder.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "users" SET SCHEMA "folder";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #2', async () => { + const schema = pgSchema('folder'); + const from = { + schema, + users: schema.table('users', {}), + }; + const to = { + schema, + users: pgTable('users', {}), + }; + + const renames = [ + 'folder.users->public.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "folder"."users" SET SCHEMA "public";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #3', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users', {}), + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #4', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE SCHEMA "folder2";\n', + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #5', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, // remove schema + users: schema1.table('users', {}), + }; + const to = { + schema2, // add schema + users: schema2.table('users', {}), // move table + }; + + const renames = [ + 'folder1.users->folder2.users', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'CREATE SCHEMA "folder2";\n', + 'ALTER TABLE "folder1"."users" SET SCHEMA "folder2";\n', + 'DROP SCHEMA "folder1";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #5', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + schema2, + users: schema1.table('users', {}), + }; + const to = { + schema1, + schema2, + users: schema2.table('users2', {}), // rename and move table + }; + + const renames = [ + 'folder1.users->folder2.users2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + 'ALTER TABLE "folder1"."users" RENAME TO "users2";', + 'ALTER TABLE "folder1"."users2" SET SCHEMA "folder2";\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change table schema #6', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + users: schema2.table('users2', {}), // rename table + }; + + const renames = [ + 'folder1->folder2', + 'folder2.users->folder2.users2', + ]; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + 'ALTER SCHEMA "folder1" RENAME TO "folder2";\n', + 'ALTER TABLE "folder2"."users" RENAME TO "users2";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop table + rename schema #1', async () => { + const schema1 = pgSchema('folder1'); + const schema2 = pgSchema('folder2'); + const from = { + schema1, + users: schema1.table('users', {}), + }; + const to = { + schema2, // rename schema + // drop table + }; + + const renames = ['folder1->folder2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + 'ALTER SCHEMA "folder1" RENAME TO "folder2";\n', + 'DROP TABLE "folder2"."users";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop tables with fk constraint', async () => { + const table1 = pgTable('table1', { + column1: integer().primaryKey(), + }); + const table2 = pgTable('table2', { + column1: integer().primaryKey(), + column2: integer().references(() => table1.column1), + }); + const schema1 = { table1, table2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, schema1, []); + const { sqlStatements: pst1 } = await push({ db, to: schema1 }); + const expectedSt1 = [ + 'CREATE TABLE "table1" (\n\t"column1" integer PRIMARY KEY\n);\n', + 'CREATE TABLE "table2" (\n\t"column1" integer PRIMARY KEY,\n\t"column2" integer\n);\n', + 'ALTER TABLE "table2" ADD CONSTRAINT "table2_column2_table1_column1_fkey" FOREIGN KEY ("column2") REFERENCES "table1"("column1");', + ]; + expect(st1).toStrictEqual(expectedSt1); + expect(pst1).toStrictEqual(expectedSt1); + + const { sqlStatements: st2 } = await diff(n1, {}, []); + const { sqlStatements: pst2 } = await push({ db, to: {} }); + + const expectedSt2 = [ + 'ALTER TABLE "table2" DROP CONSTRAINT "table2_column2_table1_column1_fkey";', + 'DROP TABLE "table1";', + 'DROP TABLE "table2";', + ]; + expect(st2).toStrictEqual(expectedSt2); + expect(pst2).toStrictEqual(expectedSt2); +}); + +test('create table with tsvector', async () => { + const from = {}; + const to = { + users: pgTable('posts', { + id: serial('id').primaryKey(), + title: text('title').notNull(), + description: text('description').notNull(), + }, (table) => [ + index('title_search_index').using('gin', sql`to_tsvector('english', ${table.title})`), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "posts" (\n\t"id" serial PRIMARY KEY,\n\t"title" text NOT NULL,\n\t"description" text NOT NULL\n);\n', + `CREATE INDEX "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('composite primary key', async () => { + const from = {}; + const to = { + table: pgTable('works_to_creators', { + workId: integer('work_id').notNull(), + creatorId: integer('creator_id').notNull(), + classification: text('classification').notNull(), + }, (t) => [ + primaryKey({ columns: [t.workId, t.creatorId, t.classification] }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'CREATE TABLE "works_to_creators" (\n\t"work_id" integer,\n\t"creator_id" integer,\n\t"classification" text,\n\tCONSTRAINT "works_to_creators_pkey" PRIMARY KEY("work_id","creator_id","classification")\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column before creating unique constraint', async () => { + const from = { + table: pgTable('table', { + id: serial('id').primaryKey(), + }), + }; + const to = { + table: pgTable('table', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }, (t) => [unique('uq').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" ADD COLUMN "name" text NOT NULL;', + 'ALTER TABLE "table" ADD CONSTRAINT "uq" UNIQUE("name");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter composite primary key', async () => { + const from = { + table: pgTable('table', { + col1: integer('col1').notNull(), + col2: integer('col2').notNull(), + col3: text('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col1, t.col2], + }), + ]), + }; + const to = { + table: pgTable('table', { + col1: integer('col1').notNull(), + col2: integer('col2').notNull(), + col3: text('col3').notNull(), + }, (t) => [ + primaryKey({ + name: 'table_pk', + columns: [t.col2, t.col3], + }), + ]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'ALTER TABLE "table" DROP CONSTRAINT "table_pk";', + 'ALTER TABLE "table" ADD CONSTRAINT "table_pk" PRIMARY KEY("col2","col3");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add index with op', async () => { + const from = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }), + }; + const to = { + users: pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }, (t) => [index().using('gin', t.name.op('gin_trgm_ops'))]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + 'CREATE INDEX "users_name_index" ON "users" USING gin ("name" gin_trgm_ops);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4800 +test('optional db aliases (snake case)', async () => { + const from = {}; + + const t1 = pgTable( + 't1', + { + t1Id1: integer().notNull().primaryKey(), + t1Col2: integer().notNull(), + t1Col3: integer().notNull(), + t2Ref: integer().notNull().references(() => t2.t2Id), + t1Uni: integer().notNull(), + t1UniIdx: integer().notNull(), + t1Idx: integer().notNull(), + t1Uni1: integer().unique(), + }, + (table) => [ + unique('t1_uni').on(table.t1Uni), + uniqueIndex('t1_uni_idx').on(table.t1UniIdx), + index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`), + foreignKey({ + columns: [table.t1Col2, table.t1Col3], + foreignColumns: [t3.t3Id1, t3.t3Id2], + }), + ], + ); + + const t2 = pgTable( + 't2', + { + t2Id: serial().primaryKey(), + }, + ); + + const t3 = pgTable( + 't3', + { + t3Id1: integer(), + t3Id2: integer(), + }, + (table) => [primaryKey({ columns: [table.t3Id1, table.t3Id2] })], + ); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'snake_case'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + const { sqlStatements: pst } = await push({ + db, + to, + casing, + }); + + const st1 = `CREATE TABLE "t1" ( + "t1_id1" integer PRIMARY KEY, + "t1_col2" integer NOT NULL, + "t1_col3" integer NOT NULL, + "t2_ref" integer NOT NULL, + "t1_uni" integer NOT NULL CONSTRAINT "t1_uni" UNIQUE, + "t1_uni_idx" integer NOT NULL, + "t1_idx" integer NOT NULL, + "t1_uni1" integer UNIQUE +); +`; + + const st2 = `CREATE TABLE "t2" ( + "t2_id" serial PRIMARY KEY +); +`; + + const st3 = `CREATE TABLE "t3" ( + "t3_id1" integer, + "t3_id2" integer, + CONSTRAINT "t3_pkey" PRIMARY KEY("t3_id1","t3_id2") +); +`; + + const st4 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2_ref_t2_t2_id_fkey" FOREIGN KEY ("t2_ref") REFERENCES "t2"("t2_id");`; + const st5 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fkey" FOREIGN KEY ("t1_col2","t1_col3") REFERENCES "t3"("t3_id1","t3_id2");`; + + const st6 = `CREATE UNIQUE INDEX "t1_uni_idx" ON "t1" ("t1_uni_idx");`; + + const st7 = `CREATE INDEX "t1_idx" ON "t1" ("t1_idx") WHERE "t1_idx" > 0;`; + + const st0 = [st1, st2, st3, st6, st7, st4, st5]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4541 +test('create table (camel case -> snake case)', async () => { + const t1 = pgTable('table_snake_case1', { + columnCamelCase1: integer(), + columnCamelCase2: integer(), + columnCamelCase3: integer(), + }, (t) => [ + primaryKey({ columns: [t.columnCamelCase1, t.columnCamelCase2] }), + unique().on(t.columnCamelCase1, t.columnCamelCase3), + uniqueIndex().on(t.columnCamelCase2, t.columnCamelCase3), + ]); + + const to = { t1 }; + + const casing = 'snake_case'; + const { sqlStatements: st1 } = await diff({}, to, [], casing); + const { sqlStatements: pst1 } = await push({ db, to, casing }); + + const eSt1 = [ + 'CREATE TABLE "table_snake_case1" (\n' + + '\t"column_camel_case1" integer,\n' + + '\t"column_camel_case2" integer,\n' + + '\t"column_camel_case3" integer,\n' + + '\tCONSTRAINT "table_snake_case1_pkey" PRIMARY KEY("column_camel_case1","column_camel_case2"),\n' + + '\tCONSTRAINT "table_snake_case1_column_camel_case1_column_camel_case3_unique" UNIQUE("column_camel_case1","column_camel_case3")\n' + + ');\n', + 'CREATE UNIQUE INDEX "table_snake_case1_column_camel_case2_column_camel_case3_index" ON "table_snake_case1" ("column_camel_case2","column_camel_case3");', + ]; + expect(st1).toStrictEqual(eSt1); + expect(pst1).toStrictEqual(eSt1); +}); + +test('create table (snake case -> camel case)', async () => { + const t1 = pgTable('tableCamelcase1', { + column_snake_case1: integer(), + column_snake_case2: integer(), + column_snake_case3: integer(), + }, (t) => [ + primaryKey({ columns: [t.column_snake_case1, t.column_snake_case2] }), + unique().on(t.column_snake_case1, t.column_snake_case3), + uniqueIndex().on(t.column_snake_case2, t.column_snake_case3), + ]); + + const to = { t1 }; + + const casing = 'camelCase'; + const { sqlStatements: st1 } = await diff({}, to, [], casing); + const { sqlStatements: pst1 } = await push({ db, to, casing }); + + const eSt1 = [ + 'CREATE TABLE "tableCamelcase1" (\n' + + '\t"columnSnakeCase1" integer,\n' + + '\t"columnSnakeCase2" integer,\n' + + '\t"columnSnakeCase3" integer,\n' + + '\tCONSTRAINT "tableCamelcase1_pkey" PRIMARY KEY("columnSnakeCase1","columnSnakeCase2"),\n' + + '\tCONSTRAINT "tableCamelcase1_columnSnakeCase1_columnSnakeCase3_unique" UNIQUE("columnSnakeCase1","columnSnakeCase3")\n' + + ');\n', + 'CREATE UNIQUE INDEX "tableCamelcase1_columnSnakeCase2_columnSnakeCase3_index" ON "tableCamelcase1" ("columnSnakeCase2","columnSnakeCase3");', + ]; + expect(st1).toStrictEqual(eSt1); + expect(pst1).toStrictEqual(eSt1); +}); + +test('optional db aliases (camel case)', async () => { + const from = {}; + + const t1 = pgTable('t1', { + t1_id1: integer().notNull().primaryKey(), + t1_col2: integer().notNull(), + t1_col3: integer().notNull(), + t2_ref: integer().notNull().references(() => t2.t2_id), + t1_uni: integer().notNull(), + t1_uni_idx: integer().notNull(), + t1_idx: integer().notNull(), + }, (table) => [ + unique('t1Uni').on(table.t1_uni), + uniqueIndex('t1UniIdx').on(table.t1_uni_idx), + index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`), + foreignKey({ + columns: [table.t1_col2, table.t1_col3], + foreignColumns: [t3.t3_id1, t3.t3_id2], + }), + ]); + + const t2 = pgTable('t2', { + t2_id: serial().primaryKey(), + }); + + const t3 = pgTable('t3', { + t3_id1: integer(), + t3_id2: integer(), + }, (table) => [primaryKey({ columns: [table.t3_id1, table.t3_id2] })]); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'camelCase'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + const { sqlStatements: pst } = await push({ + db, + to, + casing, + }); + + const st1 = `CREATE TABLE "t1" ( + "t1Id1" integer PRIMARY KEY, + "t1Col2" integer NOT NULL, + "t1Col3" integer NOT NULL, + "t2Ref" integer NOT NULL, + "t1Uni" integer NOT NULL CONSTRAINT "t1Uni" UNIQUE, + "t1UniIdx" integer NOT NULL, + "t1Idx" integer NOT NULL +); +`; + + const st2 = `CREATE TABLE "t2" ( + "t2Id" serial PRIMARY KEY +); +`; + + const st3 = `CREATE TABLE "t3" ( + "t3Id1" integer, + "t3Id2" integer, + CONSTRAINT "t3_pkey" PRIMARY KEY("t3Id1","t3Id2") +); +`; + + const st4 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2Ref_t2_t2Id_fkey" FOREIGN KEY ("t2Ref") REFERENCES "t2"("t2Id");`; + const st5 = + `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fkey" FOREIGN KEY ("t1Col2","t1Col3") REFERENCES "t3"("t3Id1","t3Id2");`; + const st6 = `CREATE UNIQUE INDEX "t1UniIdx" ON "t1" ("t1UniIdx");`; + const st7 = `CREATE INDEX "t1Idx" ON "t1" ("t1Idx") WHERE "t1Idx" > 0;`; + + const st0 = [st1, st2, st3, st6, st7, st4, st5]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with generated column', async () => { + const schema1 = {}; + const schema2 = { + users: pgTable('users', { + id: integer('id'), + id2: integer('id2'), + name: text('name'), + generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE "users" (\n\t"id" integer,\n\t"id2" integer,\n\t"name" text,\n\t"gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table with composite primary key', async () => { + const schema1 = { + table: pgTable('table1', { + productId: text('product_id').notNull(), + categoryId: text('category_id').notNull(), + }, (t) => [primaryKey({ columns: [t.productId, t.categoryId] })]), + }; + const schema2 = { + test: pgTable('table2', { + productId: text('product_id').notNull(), + categoryId: text('category_id').notNull(), + }, (t) => [primaryKey({ columns: [t.productId, t.categoryId] })]), + }; + + const renames = ['public.table1->public.table2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = ['ALTER TABLE "table1" RENAME TO "table2";']; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table and enable rls', async () => { + const schema1 = { + table: pgTable('table1', { + id: text().primaryKey(), + }), + }; + const schema2 = { + table: pgTable.withRLS('table2', { + id: text().primaryKey(), + }), + }; + + const renames = ['public.table1->public.table2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + const st0: string[] = ['ALTER TABLE "table1" RENAME TO "table2";', 'ALTER TABLE "table2" ENABLE ROW LEVEL SECURITY;']; + + expect(st).toStrictEqual(st0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4838 +test('rename 2 tables', async () => { + const schema1 = { + table1: pgTable('table1', { + id: text().primaryKey(), + }), + table2: pgTable('table2', { + id: text().primaryKey(), + }), + }; + const schema2 = { + table3: pgTable('table3', { + id: text().primaryKey(), + }), + table4: pgTable('table4', { + id: text().primaryKey(), + }), + }; + + const renames = ['public.table1->public.table3', 'public.table2->public.table4']; + + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE "table1" RENAME TO "table3";', + 'ALTER TABLE "table2" RENAME TO "table4";', + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/postgres/pg-views.test.ts b/drizzle-kit/tests/postgres/pg-views.test.ts new file mode 100644 index 0000000000..8e7c07b2a2 --- /dev/null +++ b/drizzle-kit/tests/postgres/pg-views.test.ts @@ -0,0 +1,2019 @@ +import { eq, gt, sql } from 'drizzle-orm'; +import { integer, pgMaterializedView, pgSchema, pgTable, pgView, serial } from 'drizzle-orm/pg-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create view', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'CREATE VIEW "view" AS (select distinct "id" from "test");', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view" AS (select "id" from "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view" AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #3', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view1', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }).as(sql`SELECT * FROM ${users}`), + view2: pgView('some_view2').with({ + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "users");`, + `CREATE VIEW "some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #4', async () => { + const schema = pgSchema('new_schema'); + + const users = schema.table('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + schema, + users: users, + view1: schema.view('some_view1', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: false, + securityInvoker: true, + }).as(sql`SELECT * FROM ${users}`), + view2: schema.view('some_view2').with({ + checkOption: 'cascaded', + securityBarrier: true, + securityInvoker: false, + }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + `CREATE TABLE "new_schema"."users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE VIEW "new_schema"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "new_schema"."users");`, + `CREATE VIEW "new_schema"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "new_schema"."users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and view #5', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + // view_name_duplicate + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to })).rejects.toThrow(); +}); + +test('create table and view #6', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE VIEW "some_view" WITH (check_option = cascaded) AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create materialized view', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .withNoData() + .using('drizzle_heap') + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'CREATE MATERIALIZED VIEW "view" USING "drizzle_heap" AS (select distinct "id" from "test") WITH NO DATA;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view" AS (select "id" from "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and materialized view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view" AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and materialized view #3', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view1', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgMaterializedView('some_view2').tablespace('pg_default').using('drizzle_heap').withNoData().with({ + autovacuumEnabled: true, + autovacuumFreezeMaxAge: 1000000, + autovacuumFreezeMinAge: 1000000, + autovacuumFreezeTableAge: 1, + autovacuumMultixactFreezeMaxAge: 1000000, + autovacuumMultixactFreezeMinAge: 1000000, + autovacuumMultixactFreezeTableAge: 1000000, + autovacuumVacuumCostDelay: 1, + autovacuumVacuumCostLimit: 1, + autovacuumVacuumScaleFactor: 1, + autovacuumVacuumThreshold: 1, + fillfactor: 10, + logAutovacuumMinDuration: 1, + parallelWorkers: 1, + toastTupleTarget: 128, + userCatalogTable: true, + vacuumIndexCleanup: 'off', + vacuumTruncate: false, + }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view1" AS (SELECT * FROM "users");`, + `CREATE MATERIALIZED VIEW "some_view2" USING "drizzle_heap" WITH (autovacuum_enabled = true, autovacuum_freeze_max_age = 1000000, autovacuum_freeze_min_age = 1000000, autovacuum_freeze_table_age = 1, autovacuum_multixact_freeze_max_age = 1000000, autovacuum_multixact_freeze_min_age = 1000000, autovacuum_multixact_freeze_table_age = 1000000, autovacuum_vacuum_cost_delay = 1, autovacuum_vacuum_cost_limit = 1, autovacuum_vacuum_scale_factor = 1, autovacuum_vacuum_threshold = 1, fillfactor = 10, log_autovacuum_min_duration = 1, parallel_workers = 1, toast_tuple_target = 128, user_catalog_table = true, vacuum_index_cleanup = off, vacuum_truncate = false) TABLESPACE pg_default AS (select "id" from "users") WITH NO DATA;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table and materialized view #4', async () => { + // same names + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + view2: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + // view_name_duplicate + await expect(diff({}, to, [])).rejects.toThrow(); + await expect(push({ db, to })).rejects.toThrow(); +}); + +test('create table and materialized view #5', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumFreezeMinAge: 14 }).as( + sql`SELECT * FROM ${users}`, + ), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY\n);\n`, + `CREATE MATERIALIZED VIEW "some_view" WITH (autovacuum_freeze_min_age = 14) AS (SELECT * FROM "users");`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + }; + + const to = { + users: users, + view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumEnabled: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP VIEW "some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #2', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'DROP VIEW "view";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view with data', async () => { + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const seedStatements = [`INSERT INTO "table" ("id") VALUES (1), (2), (3)`]; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ + db, + to: schema2, + }); + + // seeding + for (const seedSt of seedStatements) { + await db.query(seedSt); + } + + const st0: string[] = [ + `DROP VIEW "view";`, + ]; + const hints0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual(hints0); +}); + +test('drop materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `DROP MATERIALIZED VIEW "some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop materialized view #2', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + 'DROP MATERIALIZED VIEW "view";', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + users: users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop materialized view with data', async () => { + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.query(`INSERT INTO "table" ("id") VALUES (1), (2), (3)`); + + const { sqlStatements: pst, hints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + `DROP MATERIALIZED VIEW "view";`, + ]; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(hints).toStrictEqual([]); +}); + +test('drop materialized view without data', async () => { + const table = pgTable('table', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + `DROP MATERIALIZED VIEW "view";`, + ]; + const hints0: string[] = []; + + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual(hints0); +}); + +test('rename view #1', async () => { + const from = { + users: pgTable('users', { id: serial() }), + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + users: pgTable('users', { id: serial() }), + view: pgView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER VIEW "some_view" RENAME TO "new_some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view with existing flag', async () => { + const from = { + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + view: pgView('new_some_view', { id: integer('id') }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename materialized view #1', async () => { + const from = { + users: pgTable('users', { id: serial() }), + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + users: pgTable('users', { id: serial() }), + view: pgMaterializedView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `ALTER MATERIALIZED VIEW "some_view" RENAME TO "new_some_view";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename materialized view with existing flag', async () => { + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + view: pgMaterializedView('new_some_view', { id: integer('id') }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view alter schema', async () => { + const schema = pgSchema('new_schema'); + + const from = { + users: pgTable('users', { id: serial() }), + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: pgTable('users', { id: serial() }), + view: schema.view('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + `ALTER VIEW "some_view" SET SCHEMA "new_schema";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view alter schema with existing flag', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + schema, + view: schema.view('some_view', { id: integer('id') }).existing(), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view alter schema for materialized', async () => { + const schema = pgSchema('new_schema'); + + const from = { + users: pgTable('users', { id: serial() }), + view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: pgTable('users', { id: serial() }), + view: schema.materializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + `ALTER MATERIALIZED VIEW "some_view" SET SCHEMA "new_schema";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('view alter schema for materialized with existing flag', async () => { + const schema = pgSchema('new_schema'); + + const from = { + view: pgMaterializedView('some_view', { id: integer('id') }).existing(), + }; + + const to = { + schema, + view: schema.materializedView('some_view', { id: integer('id') }).existing(), + }; + + const renames = ['public.some_view->new_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0 = [ + `CREATE SCHEMA "new_schema";\n`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true }).as((qb) => + qb.select().from(users) + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER VIEW "some_view" SET (check_option = cascaded, security_barrier = true);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumMultixactFreezeMaxAge: 1_000_000 }).as((qb) => + qb.select().from(users) + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER MATERIALIZED VIEW "some_view" SET (autovacuum_multixact_freeze_max_age = 1000000);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to materialized view #1_2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const to = { + users, + view: pgMaterializedView('some_view').tablespace('pg_default').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER MATERIALIZED VIEW "some_view" SET TABLESPACE \"pg_default\";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); +}); + +test('add with options for materialized view #2', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .with({ autovacuumFreezeTableAge: 1, autovacuumEnabled: false }) + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "view" SET (autovacuum_enabled = false, autovacuum_freeze_table_age = 1);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with options for materialized view #3', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view') + .with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }) + .as((qb) => qb.selectDistinct().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "view" SET (autovacuum_vacuum_cost_delay = 100, vacuum_truncate = false);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with option to materialized view with existing flag #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumMultixactFreezeMaxAge: 3 }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add with options to materialized view with existing flag #2', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view', {}).as(sql`SELECT id FROM "test"`), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view', {}).with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }).existing(), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ + db, + to: schema2, + }); + + expect(st).toStrictEqual(['DROP MATERIALIZED VIEW "view";']); + expect(pst).toStrictEqual([]); +}); + +test('drop with option from view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + `ALTER VIEW "some_view" RESET (check_option, security_barrier, security_invoker);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop with option from view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }) + .existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop with option from materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 1_000_000 }).as(( + qb, + ) => qb.select().from(users)), + }; + + const to = { + users, + view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0 = [ + `ALTER MATERIALIZED VIEW "some_view" RESET (autovacuum_enabled, autovacuum_freeze_max_age);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop with option from materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgView('some_view').with({ securityBarrier: true }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER VIEW "some_view" RESET (security_invoker);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', {}).with({ securityBarrier: true, securityInvoker: true }).existing(), + }; + + const to = { + users, + view: pgView('some_view', {}).with({ securityBarrier: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in materialized view #1', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true }).as((qb) => qb.select().from(users)), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" RESET (autovacuum_vacuum_scale_factor);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in materialized view with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }) + .existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view').with({ checkOption: 'local', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users).where(gt(users.id, 10)) + ), + }; + + const to = { + users, + view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => + qb.select().from(users).where(gt(users.id, 10)) + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER VIEW "some_view" SET (check_option = cascaded);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter with option in materialized view #2', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, fillfactor: 10 }).as((qb) => + qb.select().from(users) + ), + }; + + const to = { + users, + view: pgMaterializedView('some_view').with({ autovacuumEnabled: false, fillfactor: 10 }).as((qb) => + qb.select().from(users) + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET (autovacuum_enabled = false);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter view ".as" value', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).as(sql`select * from users where id > 100`), + }; + + const to = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).as(sql`select * from users where id > 101`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + 'DROP VIEW "some_view";', + `CREATE VIEW "some_view" WITH (check_option = local, security_barrier = true, security_invoker = true) AS (select * from users where id > 101);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // push ignored definition change +}); + +test('alter view ".as" value with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).existing(), + }; + + const to = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'local', + securityBarrier: true, + securityInvoker: true, + }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter materialized view ".as" value', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT '123'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT '1234'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + 'DROP MATERIALIZED VIEW "some_view";', + `CREATE MATERIALIZED VIEW "some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT '1234');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // we ignore definition changes for push +}); + +test('alter materialized view ".as" value with existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop existing flag', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `CREATE MATERIALIZED VIEW "some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT 'asd');`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET TABLESPACE "pg_default";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // commutative +}); + +test('set tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET TABLESPACE "pg_default";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // commutative +}); + +test('drop tablespace - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 1`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 1`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET TABLESPACE "pg_default";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // commutative +}); + +test('set existing - materialized', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('new_some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + autovacuumFreezeMinAge: 1, + }).withNoData().existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames, + }); + + const st0: string[] = ['DROP MATERIALIZED VIEW "some_view";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop existing - materialized', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('view', { id: integer('id') }).tablespace('pg_default').with({ + autovacuumVacuumCostLimit: 1, + }).existing(), + }; + + const to = { + users, + view: pgMaterializedView('view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + autovacuumFreezeMinAge: 1, + }).withNoData().as(sql`SELECT * FROM users WHERE id > 100`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE MATERIALIZED VIEW "view" WITH (autovacuum_freeze_min_age = 1, autovacuum_vacuum_cost_limit = 1) AS (SELECT * FROM users WHERE id > 100) WITH NO DATA;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('set existing', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgView('some_view', { id: integer('id') }).with({ + checkOption: 'cascaded', + }).as(sql`SELECT * from users where id > 100`), + }; + + const to = { + users, + view: pgView('new_some_view', { id: integer('id') }).with({ + checkOption: 'cascaded', + securityBarrier: true, + }).existing(), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['DROP VIEW "some_view";']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').using('heap').with( + { + autovacuumVacuumCostLimit: 1, + }, + ).as(sql`SELECT 1`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('pg_default').using('drizzle_heap').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 1`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET ACCESS METHOD "drizzle_heap";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('set using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).using('drizzle_heap').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET ACCESS METHOD "drizzle_heap";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop using - materialize', async () => { + const users = pgTable('users', { + id: integer('id').primaryKey().notNull(), + }); + + const from = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).using('drizzle_heap').with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const to = { + users, + view: pgMaterializedView('some_view', { id: integer('id') }).with({ + autovacuumVacuumCostLimit: 1, + }).as(sql`SELECT 'asd'`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `ALTER MATERIALIZED VIEW "some_view" SET ACCESS METHOD "heap";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view and alter view', async () => { + const from = { + users: pgTable('users', { id: serial() }), + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + users: pgTable('users', { id: serial() }), + view: pgView('new_some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM "users"`, + ), + }; + + const renames = ['public.some_view->public.new_some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [ + `ALTER VIEW "some_view" RENAME TO "new_some_view";`, + `ALTER VIEW "new_some_view" SET (check_option = cascaded);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('moved schema and alter view', async () => { + const schema = pgSchema('my_schema'); + const from = { + schema, + users: pgTable('users', { id: serial() }), + view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), + }; + + const to = { + schema, + users: pgTable('users', { id: serial() }), + view: schema.view('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( + sql`SELECT * FROM "users"`, + ), + }; + + const renames = ['public.some_view->my_schema.some_view']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = [ + `ALTER VIEW "some_view" SET SCHEMA "my_schema";`, + `ALTER VIEW "my_schema"."some_view" SET (check_option = cascaded);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('push view with same name', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP VIEW "view";', + 'CREATE VIEW "view" AS (select distinct "id" from "test" where "test"."id" = 1);', + ]); + expect(pst).toStrictEqual([]); +}); + +test('push materialized view with same name', async () => { + const table = pgTable('test', { + id: serial('id').primaryKey(), + }); + const schema1 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), + }; + + const schema2 = { + test: table, + view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([ + 'DROP MATERIALIZED VIEW "view";', + 'CREATE MATERIALIZED VIEW "view" AS (select distinct "id" from "test" where "test"."id" = 1);', + ]); + expect(pst).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/introspect/pg.test.ts b/drizzle-kit/tests/postgres/pull.test.ts similarity index 58% rename from drizzle-kit/tests/introspect/pg.test.ts rename to drizzle-kit/tests/postgres/pull.test.ts index e89b212aec..fee6aac0c6 100644 --- a/drizzle-kit/tests/introspect/pg.test.ts +++ b/drizzle-kit/tests/postgres/pull.test.ts @@ -7,6 +7,7 @@ import { char, check, cidr, + customType, date, doublePrecision, index, @@ -23,6 +24,7 @@ import { pgPolicy, pgRole, pgSchema, + pgSequence, pgTable, pgView, real, @@ -32,20 +34,39 @@ import { text, time, timestamp, + unique, uuid, varchar, } from 'drizzle-orm/pg-core'; import fs from 'fs'; -import { introspectPgToFile } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; +import { fromDatabase } from 'src/dialects/postgres/introspect'; +import { DB } from 'src/utils'; +import { diffIntrospect, prepareTestDatabase, TestDatabase } from 'tests/postgres/mocks'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -if (!fs.existsSync('tests/introspect/postgres')) { - fs.mkdirSync('tests/introspect/postgres'); +// @vitest-environment-options {"max-concurrency":1} + +if (!fs.existsSync('tests/postgres/tmp')) { + fs.mkdirSync(`tests/postgres/tmp`, { recursive: true }); } -test('basic introspect test', async () => { - const client = new PGlite(); +let _: TestDatabase; +let db: DB; +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('basic introspect test', async () => { const schema = { users: pgTable('users', { id: integer('id').notNull(), @@ -53,19 +74,13 @@ test('basic introspect test', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, - schema, - 'basic-introspect', - ); + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-introspect'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic identity always test', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity(), @@ -73,19 +88,35 @@ test('basic identity always test', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, - schema, - 'basic-identity-always-introspect', - ); + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'basic-identity-always-introspect'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); -test('basic identity by default test', async () => { - const client = new PGlite(); +test('identity always test: few schemas', async () => { + const testSchema = pgSchema('test'); + const schema = { + testSchema, + users: pgTable('users', { + id: integer('id').generatedAlwaysAsIdentity(), + email: text('email'), + }), + usersInTestSchema: testSchema.table('users', { + id: integer('id').generatedAlwaysAsIdentity(), + email: text('email'), + }), + }; + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'identity always test: few schemas', [ + 'public', + 'test', + ]); + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('basic identity by default test', async () => { const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), @@ -93,8 +124,8 @@ test('basic identity by default test', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-identity-default-introspect', ); @@ -111,34 +142,72 @@ test('basic index test', async () => { firstName: text('first_name'), lastName: text('last_name'), data: jsonb('data'), - }, (table) => ({ - singleColumn: index('single_column').on(table.firstName), - multiColumn: index('multi_column').on(table.firstName, table.lastName), - singleExpression: index('single_expression').on(sql`lower(${table.firstName})`), - multiExpression: index('multi_expression').on(sql`lower(${table.firstName})`, sql`lower(${table.lastName})`), - expressionWithComma: index('expression_with_comma').on( + }, (table) => [ + index('single_column').on(table.firstName), + index('multi_column').on(table.firstName, table.lastName), + index('single_expression').on(sql`lower(${table.firstName})`), + index('multi_expression').on(sql`lower(${table.firstName})`, sql`lower(${table.lastName})`), + index('expression_with_comma').on( sql`(lower(${table.firstName}) || ', '::text || lower(${table.lastName}))`, ), - expressionWithDoubleQuote: index('expression_with_double_quote').on(sql`('"'::text || ${table.firstName})`), - expressionWithJsonbOperator: index('expression_with_jsonb_operator').on( + index('expression_with_double_quote').on(sql`('"'::text || ${table.firstName})`), + index('expression_with_jsonb_operator').on( sql`(${table.data} #>> '{a,b,1}'::text[])`, ), - })), + ]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { sqlStatements } = await diffIntrospect( + db, schema, 'basic-index-introspect', ); - expect(statements.length).toBe(10); - expect(sqlStatements.length).toBe(10); + expect(sqlStatements).toStrictEqual([]); }); -test('identity always test: few params', async () => { - const client = new PGlite(); +// TODO: Refactor this test +test('advanced index test', async () => { + db.query('CREATE table job (name text, start_after text, priority text, created_on text, id text, state text);'); + db.query("CREATE INDEX job_i5 ON job (name, start_after) INCLUDE (priority, created_on, id) WHERE state < 'active';"); + + const { indexes } = await fromDatabase(db, () => true); + + expect(indexes).toStrictEqual([ + { + name: 'job_i5', + table: 'job', + columns: [ + { + asc: true, + isExpression: false, + nullsFirst: false, + opclass: null, + value: 'name', + }, + { + asc: true, + isExpression: false, + nullsFirst: false, + opclass: null, + value: 'start_after', + }, + ], + concurrently: false, + entityType: 'indexes', + forPK: false, + isUnique: false, + method: 'btree', + forUnique: false, + nameExplicit: true, + schema: 'public', + where: "(state < 'active'::text)", + with: '', + } satisfies typeof indexes[number], + ]); +}); +test('identity always test: few params', async () => { const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ @@ -149,8 +218,8 @@ test('identity always test: few params', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'identity-always-few-params-introspect', ); @@ -160,8 +229,6 @@ test('identity always test: few params', async () => { }); test('identity by default test: few params', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ @@ -172,8 +239,8 @@ test('identity by default test: few params', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'identity-default-few-params-introspect', ); @@ -183,8 +250,6 @@ test('identity by default test: few params', async () => { }); test('identity always test: all params', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ @@ -199,8 +264,8 @@ test('identity always test: all params', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'identity-always-all-params-introspect', ); @@ -210,8 +275,6 @@ test('identity always test: all params', async () => { }); test('identity by default test: all params', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ @@ -226,8 +289,8 @@ test('identity by default test: all params', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'identity-default-all-params-introspect', ); @@ -237,8 +300,6 @@ test('identity by default test: all params', async () => { }); test('generated column: link to another column', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity(), @@ -249,8 +310,8 @@ test('generated column: link to another column', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'generated-link-column', ); @@ -259,9 +320,31 @@ test('generated column: link to another column', async () => { expect(sqlStatements.length).toBe(0); }); -test('instrospect all column types', async () => { - const client = new PGlite(); +test('generated column: link to another jsonb column', async () => { + const schema = { + users: pgTable('users', { + predict: jsonb('predict'), + predictions: jsonb('predictions') + .generatedAlwaysAs((): SQL => sql`predict -> 'predictions'`), + }), + }; + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'generated-link-jsonb-column', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +// https://github.com/drizzle-team/drizzle-orm/issues/4632 +// https://github.com/drizzle-team/drizzle-orm/issues/4644 +// https://github.com/drizzle-team/drizzle-orm/issues/4730 +// https://github.com/drizzle-team/drizzle-orm/issues/4760 +// https://github.com/drizzle-team/drizzle-orm/issues/4916 +test('introspect all column types', async () => { const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); const schema = { enum_: myEnum, @@ -275,9 +358,14 @@ test('instrospect all column types', async () => { numeric3: numeric('numeric3').default('99.9'), bigint: bigint('bigint', { mode: 'number' }).default(100), boolean: boolean('boolean').default(true), - text: text('test').default('abc'), + text: text('text').default('abc'), + text1: text('text1').default(sql`gen_random_uuid()`), + text2: text('text2').default('``'), + text3: text('text3').default(''), varchar: varchar('varchar', { length: 25 }).default('abc'), + varchar1: varchar('varchar1', { length: 25 }).default(''), char: char('char', { length: 3 }).default('abc'), + char1: char('char1', { length: 3 }).default(''), serial: serial('serial'), bigserial: bigserial('bigserial', { mode: 'number' }), smallserial: smallserial('smallserial'), @@ -285,6 +373,8 @@ test('instrospect all column types', async () => { real: real('real').default(100), json: json('json').$type<{ attr: string }>().default({ attr: 'value' }), jsonb: jsonb('jsonb').$type<{ attr: string }>().default({ attr: 'value' }), + jsonb1: jsonb('jsonb1').default(sql`jsonb_build_object()`), + jsonb2: jsonb('jsonb2').default({}), time1: time('time1').default('00:00:00'), time2: time('time2').defaultNow(), timestamp1: timestamp('timestamp1', { withTimezone: true, precision: 6 }).default(new Date()), @@ -302,22 +392,24 @@ test('instrospect all column types', async () => { macaddr: macaddr('macaddr').default('00:00:00:00:00:00'), macaddr8: macaddr8('macaddr8').default('00:00:00:ff:fe:00:00:00'), interval: interval('interval').default('1 day 01:00:00'), + customType: customType({ + dataType: () => 'tsvector', + })().default("to_tsvector('english', 'The Fat Rats')"), }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-all-columns-types', ); - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); }); -test('instrospect all column array types', async () => { - const client = new PGlite(); - +// https://github.com/drizzle-team/drizzle-orm/issues/4529 +test('introspect all column array types', async () => { const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); const schema = { enum_: myEnum, @@ -336,6 +428,7 @@ test('instrospect all column array types', async () => { real: real('real').array().default([100, 200]), json: json('json').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), jsonb: jsonb('jsonb').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), + jsonb1: jsonb('jsonb3').array().default(sql`'{}'`), time: time('time').array().default(['00:00:00', '01:00:00']), timestamp: timestamp('timestamp', { withTimezone: true, precision: 6 }) .array() @@ -353,8 +446,8 @@ test('instrospect all column array types', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-all-columns-array-types', ); @@ -364,7 +457,6 @@ test('instrospect all column array types', async () => { }); test('introspect columns with name with non-alphanumeric characters', async () => { - const client = new PGlite(); const schema = { users: pgTable('users', { 'not:allowed': integer('not:allowed'), @@ -374,8 +466,8 @@ test('introspect columns with name with non-alphanumeric characters', async () = }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-column-with-name-with-non-alphanumeric-characters', ); @@ -385,8 +477,6 @@ test('introspect columns with name with non-alphanumeric characters', async () = }); test('introspect enum from different schema', async () => { - const client = new PGlite(); - const schema2 = pgSchema('schema2'); const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); const schema = { @@ -397,8 +487,8 @@ test('introspect enum from different schema', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-enum-from-different-schema', ['public', 'schema2'], @@ -409,8 +499,6 @@ test('introspect enum from different schema', async () => { }); test('introspect enum with same names across different schema', async () => { - const client = new PGlite(); - const schema2 = pgSchema('schema2'); const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); @@ -424,8 +512,8 @@ test('introspect enum with same names across different schema', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-enum-with-same-names-across-different-schema', ['public', 'schema2'], @@ -436,8 +524,6 @@ test('introspect enum with same names across different schema', async () => { }); test('introspect enum with similar name to native type', async () => { - const client = new PGlite(); - const timeLeft = pgEnum('time_left', ['short', 'medium', 'long']); const schema = { timeLeft, @@ -446,8 +532,8 @@ test('introspect enum with similar name to native type', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-enum-with-similar-name-to-native-type', ); @@ -456,9 +542,7 @@ test('introspect enum with similar name to native type', async () => { expect(sqlStatements.length).toBe(0); }); -test('instrospect strings with single quotes', async () => { - const client = new PGlite(); - +test('introspect strings with single quotes', async () => { const myEnum = pgEnum('my_enum', ['escape\'s quotes " ']); const schema = { enum_: myEnum, @@ -469,8 +553,8 @@ test('instrospect strings with single quotes', async () => { }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-strings-with-single-quotes', ); @@ -480,20 +564,16 @@ test('instrospect strings with single quotes', async () => { }); test('introspect checks', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: serial('id'), name: varchar('name'), age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-checks', ); @@ -503,27 +583,21 @@ test('introspect checks', async () => { }); test('introspect checks from different schemas with same names', async () => { - const client = new PGlite(); - const mySchema = pgSchema('schema2'); const schema = { mySchema, users: pgTable('users', { id: serial('id'), age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), usersInMySchema: mySchema.table('users', { id: serial('id'), age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} < 1`), - })), + }, (table) => [check('some_check', sql`${table.age} < 1`)]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-checks-diff-schema-same-names', ['public', 'schema2'], @@ -534,8 +608,6 @@ test('introspect checks from different schemas with same names', async () => { }); test('introspect view #1', async () => { - const client = new PGlite(); - const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), @@ -547,8 +619,8 @@ test('introspect view #1', async () => { users, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-view', ); @@ -558,8 +630,6 @@ test('introspect view #1', async () => { }); test('introspect view #2', async () => { - const client = new PGlite(); - const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), @@ -573,8 +643,8 @@ test('introspect view #2', async () => { users, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-view-2', ); @@ -583,9 +653,26 @@ test('introspect view #2', async () => { expect(sqlStatements.length).toBe(0); }); -test('introspect view in other schema', async () => { - const client = new PGlite(); +// https://github.com/drizzle-team/drizzle-orm/issues/4764 +test('introspect view #3', async () => { + const enum1 = pgEnum('enum_1', ['text', 'not_text']); + + const test = pgTable('test', { + column1: enum1().array(), + column2: enum1().array().array(), + }); + const publicJobsWithCompanies = pgView('public_jobs_with_companies').as((qb) => qb.select().from(test)); + + const schema = { enum1, test, publicJobsWithCompanies }; + + const { statements, sqlStatements } = await diffIntrospect(db, schema, 'introspect-view-3'); + + expect(statements).toStrictEqual([]); + expect(sqlStatements).toStrictEqual([]); + // TODO: we need to check actual types generated; +}); +test('introspect view in other schema', async () => { const newSchema = pgSchema('new_schema'); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), @@ -601,8 +688,8 @@ test('introspect view in other schema', async () => { newSchema, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-view-in-other-schema', ['new_schema'], @@ -613,8 +700,6 @@ test('introspect view in other schema', async () => { }); test('introspect materialized view in other schema', async () => { - const client = new PGlite(); - const newSchema = pgSchema('new_schema'); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), @@ -630,8 +715,8 @@ test('introspect materialized view in other schema', async () => { newSchema, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-mat-view-in-other-schema', ['new_schema'], @@ -642,8 +727,6 @@ test('introspect materialized view in other schema', async () => { }); test('introspect materialized view #1', async () => { - const client = new PGlite(); - const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), @@ -655,8 +738,8 @@ test('introspect materialized view #1', async () => { users, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-materialized-view', ); @@ -666,8 +749,6 @@ test('introspect materialized view #1', async () => { }); test('introspect materialized view #2', async () => { - const client = new PGlite(); - const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), @@ -681,8 +762,8 @@ test('introspect materialized view #2', async () => { users, }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'introspect-materialized-view-2', ); @@ -691,21 +772,19 @@ test('introspect materialized view #2', async () => { expect(sqlStatements.length).toBe(0); }); -test('basic policy', async () => { - const client = new PGlite(); - +test('basic policy #1', async () => { const schema = { users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test'), - })), + }, () => [pgPolicy('test')]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, - 'basic-policy', + 'basic-policy-#1', + ['public'], + { roles: { include: ['test'] } }, ); expect(statements.length).toBe(0); @@ -713,18 +792,14 @@ test('basic policy', async () => { }); test('basic policy with "as"', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), + }, () => [pgPolicy('test', { as: 'permissive' })]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-policy-as', ); @@ -733,21 +808,20 @@ test('basic policy with "as"', async () => { expect(sqlStatements.length).toBe(0); }); -test.todo('basic policy with CURRENT_USER role', async () => { - const client = new PGlite(); - +test('basic policy', async () => { const schema = { + role: pgRole('test2'), users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { to: 'current_user' }), - })), + }, () => [pgPolicy('test', { to: 'test2' })]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-policy', + ['public'], + { roles: { include: ['test2'] } }, ); expect(statements.length).toBe(0); @@ -755,18 +829,14 @@ test.todo('basic policy with CURRENT_USER role', async () => { }); test('basic policy with all fields except "using" and "with"', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', for: 'all', to: ['postgres'] }), - })), + }, () => [pgPolicy('test', { as: 'permissive', for: 'all', to: ['postgres'] })]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-policy-all-fields', ); @@ -776,18 +846,14 @@ test('basic policy with all fields except "using" and "with"', async () => { }); test('basic policy with "using" and "with"', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), - })), + }, () => [pgPolicy('test', { using: sql`true`, withCheck: sql`true` })]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-policy-using-withcheck', ); @@ -797,19 +863,14 @@ test('basic policy with "using" and "with"', async () => { }); test('multiple policies', async () => { - const client = new PGlite(); - const schema = { users: pgTable('users', { id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), - rlsPolicy: pgPolicy('newRls'), - })), + }, () => [pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), pgPolicy('newRls')]), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'multiple-policies', ); @@ -819,21 +880,23 @@ test('multiple policies', async () => { }); test('multiple policies with roles', async () => { - const client = new PGlite(); - - client.query(`CREATE ROLE manager;`); + db.query(`CREATE ROLE manager;`); const schema = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), - rlsPolicy: pgPolicy('newRls', { to: ['postgres', 'manager'] }), - })), + users: pgTable( + 'users', + { + id: integer('id').primaryKey(), + }, + () => [ + pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), + pgPolicy('newRls', { to: ['postgres', 'manager'] }), + ], + ), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'multiple-policies-with-roles', ); @@ -843,14 +906,12 @@ test('multiple policies with roles', async () => { }); test('basic roles', async () => { - const client = new PGlite(); - const schema = { usersRole: pgRole('user'), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'basic-roles', ['public'], @@ -862,14 +923,12 @@ test('basic roles', async () => { }); test('role with properties', async () => { - const client = new PGlite(); - const schema = { usersRole: pgRole('user', { inherit: false, createDb: true, createRole: true }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'roles-with-properties', ['public'], @@ -881,14 +940,12 @@ test('role with properties', async () => { }); test('role with a few properties', async () => { - const client = new PGlite(); - const schema = { usersRole: pgRole('user', { inherit: false, createRole: true }), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'roles-with-few-properties', ['public'], @@ -900,23 +957,24 @@ test('role with a few properties', async () => { }); test('multiple policies with roles from schema', async () => { - const client = new PGlite(); - const usersRole = pgRole('user_role', { inherit: false, createRole: true }); const schema = { usersRole, - - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), - rlsPolicy: pgPolicy('newRls', { to: ['postgres', usersRole] }), - })), + users: pgTable( + 'users', + { + id: integer('id').primaryKey(), + }, + () => [ + pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), + pgPolicy('newRls', { to: ['postgres', usersRole] }), + ], + ), }; - const { statements, sqlStatements } = await introspectPgToFile( - client, + const { statements, sqlStatements } = await diffIntrospect( + db, schema, 'multiple-policies-with-roles-from-schema', ['public'], @@ -926,3 +984,155 @@ test('multiple policies with roles from schema', async () => { expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); + +test('case sensitive schema name + identity column', async () => { + const mySchema = pgSchema('CaseSensitiveSchema'); + const schema = { + mySchema, + users: mySchema.table('users', { + id: integer('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name'), + }), + }; + + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'case-sensitive-schema-name', + ['CaseSensitiveSchema'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect without any schema', async () => { + await db.query(`DROP SCHEMA "public" cascade`); + const schema = {}; + const { statements, sqlStatements } = await diffIntrospect( + db, + schema, + 'introspect-without-any-schema', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('introspect foreign keys', async () => { + const mySchema = pgSchema('my_schema'); + const users = pgTable('users', { + id: integer('id').primaryKey(), + name: text('name'), + }); + const schema = { + mySchema, + users, + posts: mySchema.table('posts', { + id: integer('id').primaryKey(), + userId: integer('user_id').references(() => users.id, { onDelete: 'set null', onUpdate: 'cascade' }), + }), + }; + const { statements, sqlStatements, ddlAfterPull } = await diffIntrospect( + db, + schema, + 'introspect-foreign-keys', + ['my_schema', 'public'], + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + expect(ddlAfterPull.fks.one({ + schema: 'my_schema', + table: 'posts', + columns: ['user_id'], + schemaTo: 'public', + tableTo: 'users', + columnsTo: ['id'], + })).not.toBeNull(); +}); + +test('introspect partitioned tables', async () => { + await db.query(` + CREATE TABLE measurement ( + city_id int not null, + logdate date not null, + peaktemp int, + unitsales int + ) PARTITION BY RANGE (logdate); + `); + + const { tables } = await fromDatabase(db); + + expect(tables).toStrictEqual([ + { + name: 'measurement', + schema: 'public', + entityType: 'tables', + isRlsEnabled: false, + } satisfies typeof tables[number], + ]); +}); + +test('default sequence nextval', async () => { + const seqOrgCode = pgSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', + cycle: false, + }); + + const organizations = pgTable('organizations', { + code: bigint({ mode: 'number' }).default(sql`nextval('seq_org_code'::regclass)`).notNull(), + }); + + const { sqlStatements } = await diffIntrospect(db, { seqOrgCode, organizations }, 'default_sequence_nextval'); + + expect(sqlStatements).toStrictEqual([]); +}); + +test('policy', async () => { + const organizationsInCore = pgTable('organizations', { + domain: text(), + }, (table) => [ + unique('organizations_domain_key').on(table.domain), + ]); + + const policy = pgPolicy('new_policy', { + as: 'restrictive', + to: 'postgres', + withCheck: sql`1 = 1`, + for: 'all', + }).link(organizationsInCore); + + const { sqlStatements } = await diffIntrospect(db, { organizationsInCore, policy }, 'policy'); + expect(sqlStatements).toStrictEqual([]); +}); + +// test('introspect foreign tables', async () => { +// await db.query('CREATE EXTENSION postgres_fdw;'); +// await db.query("CREATE SERVER film_server FOREIGN DATA WRAPPER postgres_fdw OPTIONS (host 'foo', dbname 'foodb', port '5432');"); +// await db.query(` +// CREATE FOREIGN TABLE films ( +// code char(5) NOT NULL, +// title varchar(40) NOT NULL, +// did integer NOT NULL, +// date_prod date, +// kind varchar(10), +// len interval hour to minute +// ) SERVER film_server; +// `); + +// const { tables } = await fromDatabase(db); + +// expect(tables).toStrictEqual([ +// { +// name: 'films', +// schema: 'public', +// entityType: 'tables', +// isRlsEnabled: false, +// } satisfies typeof tables[number], +// ]); +// }); diff --git a/drizzle-kit/tests/postgres/schemas/schema1.ts b/drizzle-kit/tests/postgres/schemas/schema1.ts new file mode 100644 index 0000000000..a72e074722 --- /dev/null +++ b/drizzle-kit/tests/postgres/schemas/schema1.ts @@ -0,0 +1,1137 @@ +import { eq, sql } from 'drizzle-orm'; +import { + AnyPgColumn, + bigint, + bigserial, + boolean, + char, + check, + decimal, + doublePrecision, + foreignKey, + index, + inet, + integer, + interval, + jsonb, + numeric, + pgEnum, + pgPolicy, + pgSchema, + pgSequence, + primaryKey, + serial, + smallint, + text, + timestamp, + unique, + uniqueIndex, + uuid, +} from 'drizzle-orm/pg-core'; + +// generated with AI and updated manually in some places + +export const core = pgSchema('core'); +export const analytics = pgSchema('analytics'); +export const billing = pgSchema('billing'); +export const monitoring = pgSchema('monitoring'); +export const alertAction = pgEnum('alert_action', ['email', 'pagerd/ut"\'y', 'slack', 'webhook']); +export const currencyCode = pgEnum('currency_code', ['USD', 'EUR', 'GBP', 'UAH', 'JPY']); +export const datasetVisibility = pgEnum('dataset_visibility', ['priv"ate', 'team', 'public']); +export const env = pgEnum('env', ['dev', 'staging', 'prod']); +export const featureState = pgEnum('feature_state', ['enabled', 'disabled', 'gradual']); +export const invoiceStatus = pgEnum('invoice_status', ['draft', "iss'ued", 'paid', 'voided', 'failed']); +export const jobState = pgEnum('job_state', ['queued', 'running', 'success', 'failed', 'cancelled']); +export const notificationChannel = pgEnum('notification_channel', ['email', 'sms', 'in_app', 'webhook']); +export const paymentMethod = pgEnum('payment_method', ['card', 'bank_transfer', 'paypal', 'crypto']); +export const pipelineStatus = pgEnum('pipeline_status', ['created', 'running', 'paused', 'completed', 'errored']); +export const roleKind = pgEnum('role_kind', ['system', 'custom']); +export const ruleConditionOperator = pgEnum('rule_condition_operator', [ + 'eq', + 'neq', + 'gt', + 'lt', + 'gte', + 'lte', + 'in', + 'nin', +]); +export const severityLevel = pgEnum('severity_level', ['low', 'medium', 'high', 'critical']); +export const userStatus = pgEnum('user_status', ['active', 'inactive', 'suspended', 'pending']); + +export const seqOrgCode = pgSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', + cycle: false, +}); + +export const organizationsInCore = core.table('organizations', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: bigint({ mode: 'number' }).default(sql`nextval('seq_org_code'::regclass)`).notNull(), + name: text().notNull(), + domain: text(), + currency: currencyCode().default('EUR').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_org_name_idx').using('btree', table.name.asc().nullsLast().op('text_ops')), + index('organizations_code_idx').using('btree', table.code.asc().nullsLast().op('int8_ops')), + unique('organizations_domain_key').on(table.domain), + check('organizations_name_check', sql`char_length(name) > 1`), +]); + +export const usersInCore = core.table('users', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + username: text().notNull(), + status: userStatus().default('pending').notNull(), + locale: text().default('en-US').notNull(), + lastLogin: timestamp('last_login', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + bio: text().$onUpdate(() => sql`bio || 'some test'`), + profile: jsonb(), +}, (table) => [ + index('core_users_username_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.username.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'users_organization_id_fkey', + }).onDelete('cascade'), + unique('users_org_username_unique').on(table.organizationId, table.username), +]); + +export const rolesInCore = core.table('roles', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull().references(() => organizationsInCore.id, { onDelete: 'cascade' }), + name: text().notNull(), + kind: roleKind().default('custom').notNull(), + builtin: boolean().default(false).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + unique('roles_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const permissionsInCore = core.table('permissions', { + id: serial().primaryKey().notNull(), + code: text().notNull().unique(), + description: text(), +}); + +export const membershipsInCore = core.table('memberships', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + roleId: integer('role_id').notNull(), + organizationId: uuid('organization_id').notNull(), + joinedAt: timestamp('joined_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'memberships_user_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'memberships_role_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'memberships_organization_id_fkey', + }).onDelete('cascade'), + unique('unique_membership').on(table.userId, table.organizationId), +]); + +export const apiKeysInCore = core.table('api_keys', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + userId: uuid('user_id'), + name: text().notNull(), + keyHash: text('key_hash').notNull(), + revoked: boolean().default(false).notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb().generatedAlwaysAs(sql`'{"some":"test"}'`), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_apikey_org_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(revoked = false)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'api_keys_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'api_keys_user_id_fkey', + }).onDelete('set null'), + unique('api_keys_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const sessionsInCore = core.table('sessions', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + ip: inet(), + userAgent: text('user_agent'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }).notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + index('core_sessions_user_expires').using( + 'btree', + table.userId.asc().nullsLast(), + table.expiresAt.asc().nullsLast().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'sessions_user_id_fkey', + }).onDelete('cascade'), +]); + +export const oauthProvidersInCore = core.table('oauth_providers', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + provider: text().notNull(), + clientId: text('client_id').notNull(), + clientSecret: text('client_secret').notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'oauth_providers_organization_id_fkey', + }).onDelete('cascade'), + unique('oauth_providers_organization_id_provider_key').on(table.organizationId, table.provider), +]); + +export const featureFlagsInCore = core.table('feature_flags', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + state: featureState().default('disabled').notNull(), + rolloutPercent: smallint('rollout_percent').default(0), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'feature_flags_organization_id_fkey', + }).onDelete('cascade'), + unique('feature_flags_organization_id_key_key').on(table.organizationId, table.key), + check('feature_flags_rollout_percent_check', sql`(rollout_percent >= 0) AND (rollout_percent <= 100)`), +]); + +export const projectsInCore = core.table('projects', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + slug: text().notNull(), + description: text(), + visibility: datasetVisibility().default('priv"ate').notNull(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_projects_org_name_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.name.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'projects_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'projects_created_by_fkey', + }), + unique('projects_org_slug_unique').on(table.organizationId, table.slug), +]); + +export const repositoriesInCore = core.table('repositories', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + provider: text().notNull(), + repoOwner: text('repo_owner').notNull(), + repoName: text('repo_name').notNull(), + defaultBranch: text('default_branch').default('main').notNull(), + cloneUrl: text('clone_url'), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'repositories_project_id_fkey', + }).onDelete('cascade'), + unique('repositories_project_id_provider_repo_owner_repo_name_key').on( + table.projectId, + table.provider, + table.repoOwner, + table.repoName, + ), +]); + +export const buildsInCore = core.table('builds', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + triggeredBy: uuid('triggered_by'), + commitSha: char('commit_sha', { length: 40 }).notNull(), + status: pipelineStatus().default('created').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + index('core_builds_project_status_idx').using( + 'btree', + table.projectId.asc().nullsLast().op('uuid_ops'), + table.status.asc().nullsLast(), + ), + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'builds_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.triggeredBy], + foreignColumns: [usersInCore.id], + name: 'builds_triggered_by_fkey', + }), + unique('builds_project_id_commit_sha_key').on(table.projectId, table.commitSha), +]); + +export const pipelinesInCore = core.table('pipelines', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + name: text().notNull(), + spec: jsonb().notNull(), + status: pipelineStatus().default('created').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'pipelines_project_id_fkey', + }).onDelete('cascade'), + unique('pipelines_project_id_name_key').on(table.projectId, table.name), +]); + +export const pipelineRunsInAnalytics = analytics.table('pipeline_runs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineId: uuid('pipeline_id').notNull(), + + runNumber: bigint('run_number', { mode: 'number' }).notNull(), + state: jobState().default('queued').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + logs: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_pipeline_runs_state_idx').using('btree', table.state.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.pipelineId], + foreignColumns: [pipelinesInCore.id], + name: 'pipeline_runs_pipeline_id_fkey', + }).onDelete('cascade'), + unique('pipeline_runs_unique_run').on(table.pipelineId, table.runNumber), +]); + +export const jobsInAnalytics = analytics.table('jobs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineRunId: uuid('pipeline_run_id'), + name: text().notNull(), + state: jobState().default('queued').notNull(), + attempts: integer().default(0).notNull(), + lastError: text('last_error'), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_jobs_state_attempts_idx').using( + 'btree', + table.state.asc().nullsLast(), + table.attempts.asc().nullsLast().op('int4_ops'), + ), + foreignKey({ + columns: [table.pipelineRunId], + foreignColumns: [pipelineRunsInAnalytics.id], + name: 'jobs_pipeline_run_id_fkey', + }).onDelete('cascade'), +]); + +export const storageBucketsInCore = core.table('storage_buckets', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + region: text().notNull(), + config: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'storage_buckets_organization_id_fkey', + }).onDelete('cascade'), + unique('storage_buckets_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const objectsInCore = core.table('objects', { + id: uuid().defaultRandom().primaryKey().notNull(), + bucketId: uuid('bucket_id').notNull(), + path: text().notNull(), + + size: bigint({ mode: 'number' }).default(0).notNull(), + contentType: text('content_type'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_objects_bucket_path_gin').using('gin', table.metadata.asc().nullsLast().op('jsonb_ops')), + foreignKey({ + columns: [table.bucketId], + foreignColumns: [storageBucketsInCore.id], + name: 'objects_bucket_id_fkey', + }).onDelete('cascade'), + unique('objects_bucket_id_path_key').on(table.bucketId, table.path), +]); + +export const filesInCore = core.table('files', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + name: text().notNull(), + latestObjectId: uuid('latest_object_id'), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'files_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.latestObjectId], + foreignColumns: [objectsInCore.id], + name: 'files_latest_object_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'files_created_by_fkey', + }), + unique('files_project_id_name_key').on(table.projectId, table.name), +]); + +export const fileVersionsInCore = core.table('file_versions', { + id: uuid().defaultRandom().primaryKey().notNull(), + fileId: uuid('file_id').notNull(), + objectId: uuid('object_id').notNull(), + versionNumber: integer('version_number').notNull(), + checksum: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.fileId], + foreignColumns: [filesInCore.id], + name: 'file_versions_file_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.objectId], + foreignColumns: [objectsInCore.id], + name: 'file_versions_object_id_fkey', + }).onDelete('cascade'), + unique('file_versions_file_id_version_number_key').on(table.fileId, table.versionNumber), +]); + +export const tagsInCore = core.table('tags', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + value: text(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'tags_organization_id_fkey', + }).onDelete('cascade'), + unique('tags_organization_id_key_value_key').on(table.organizationId, table.key, table.value), +]); + +export const conversationsInCore = core.table('conversations', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + title: text(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'conversations_project_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'conversations_created_by_fkey', + }), +]); + +export const chatMessagesInCore = core.table('chat_messages', { + id: uuid().defaultRandom().primaryKey().notNull(), + conversationId: uuid('conversation_id').notNull(), + senderId: uuid('sender_id'), + body: text().notNull(), + attachments: jsonb(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + editedAt: timestamp('edited_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + index('core_chat_conv_sent_at_idx').using( + 'btree', + table.conversationId.asc().nullsLast(), + table.sentAt.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.conversationId], + foreignColumns: [conversationsInCore.id], + name: 'chat_messages_conversation_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.senderId], + foreignColumns: [usersInCore.id], + name: 'chat_messages_sender_id_fkey', + }).onDelete('set null'), +]); + +export const notificationsInCore = core.table('notifications', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + channel: notificationChannel().default('in_app').notNull(), + payload: jsonb().notNull(), + seen: boolean().default(false).notNull(), + deliveredAt: timestamp('delivered_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_notifications_unseen_idx').using('btree', table.userId.asc().nullsLast().op('uuid_ops')).where( + sql`(seen = false)`, + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'notifications_user_id_fkey', + }).onDelete('cascade'), +]); + +export const customersInBilling = billing.table('customers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + name: text().notNull(), + address: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'customers_organization_id_fkey', + }).onDelete('cascade'), + unique('customers_organization_id_key').on(table.organizationId), + unique('idnameunique').on(table.id, table.name), +]); + +export const subscriptionsInBilling = billing.table('subscriptions', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + plan: text().notNull(), + status: text().default('active').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + endedAt: timestamp('ended_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'subscriptions_customer_id_fkey', + }).onDelete('cascade'), +]); + +export const paymentsInBilling = billing.table('payments', { + id: uuid().defaultRandom().primaryKey().notNull(), + invoiceId: uuid('invoice_id').notNull(), + paidAt: timestamp('paid_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + amount: numeric({ precision: 12, scale: 2 }).notNull(), + amount2: decimal({ precision: 12, scale: 2 }).notNull(), + method: paymentMethod().notNull(), + transactionRef: text('transaction_ref'), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.invoiceId], + foreignColumns: [invoicesInBilling.id], + name: 'payments_invoice_id_fkey', + }).onDelete('cascade'), +]); + +export const couponsInBilling = billing.table('coupons', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: text().notNull(), + description: text(), + discountPercent: smallint('discount_percent'), + redeemableFrom: timestamp('redeemable_from', { withTimezone: true, mode: 'string' }), + redeemableTo: timestamp('redeemable_to', { withTimezone: true, mode: 'string' }), + maxRedemptions: integer('max_redemptions').generatedAlwaysAsIdentity(), + metadata: jsonb(), +}, (table) => [ + unique('coupons_code_key').on(table.code), + check('coupons_discount_percent_check', sql`(discount_percent >= 0) AND (discount_percent <= 100)`), +]); + +export const webhooksInCore = core.table('webhooks', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + url: text().notNull(), + secret: text(), + events: text().array().notNull(), + active: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_webhooks_org_active_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(active = true)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'webhooks_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const metricSourcesInAnalytics = analytics.table('metric_sources', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'metric_sources_organization_id_fkey', + }).onDelete('cascade'), + unique('metric_sources_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const metricsInAnalytics = analytics.table('metrics', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + sourceId: uuid('source_id').notNull(), + metricKey: text('metric_key').notNull(), + ts: timestamp({ withTimezone: true, mode: 'string' }).notNull(), + value: doublePrecision().notNull(), + tags: jsonb(), +}, (table) => [ + index('analytics_metrics_key_ts_idx').using( + 'btree', + table.metricKey.asc().nullsLast().op('text_ops'), + table.ts.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.sourceId], + foreignColumns: [metricSourcesInAnalytics.id], + name: 'metrics_source_id_fkey', + }).onDelete('cascade'), + unique('metrics_source_id_metric_key_ts_key').on(table.sourceId, table.metricKey, table.ts), +]); + +export const alertRulesInMonitoring = monitoring.table('alert_rules', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + description: text(), + severity: severityLevel().default('medium').notNull(), + enabled: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'alert_rules_organization_id_fkey', + }).onDelete('cascade'), + unique('alert_rules_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const ruleConditionsInMonitoring = monitoring.table('rule_conditions', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + metricKey: text('metric_key').notNull(), + operator: ruleConditionOperator().notNull().unique('some_name', { nulls: 'not distinct' }), + threshold: doublePrecision().notNull(), + window: interval().default('00:05:00').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'rule_conditions_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const alertsInMonitoring = monitoring.table('alerts', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + triggeredAt: timestamp('triggered_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + resolvedAt: timestamp('resolved_at', { withTimezone: true, mode: 'string' }), + payload: jsonb(), + state: text().default('firing').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'alerts_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const escalationsInMonitoring = monitoring.table('escalations', { + id: uuid().defaultRandom().primaryKey().notNull(), + alertId: uuid('alert_id').notNull(), + action: alertAction().notNull(), + target: text().notNull(), + executedAt: timestamp('executed_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + foreignKey({ + columns: [table.alertId], + foreignColumns: [alertsInMonitoring.id], + name: 'escalations_alert_id_fkey', + }).onDelete('cascade'), +]); + +export const ssoProvidersInCore = core.table('sso_providers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + config: jsonb().notNull(), + enabled: boolean().default(false).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'sso_providers_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const auditLogsInCore = core.table('audit_logs', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + organizationId: uuid('organization_id'), + actorId: uuid('actor_id'), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').array().array().array(), + action: text().notNull(), + beforeState: jsonb('before_state'), + afterState: jsonb('after_state'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_audit_org_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.createdAt.desc().nullsFirst().op('timestamptz_ops'), + ), +]); + +export const rateLimitsInCore = core.table('rate_limits', { + id: uuid().defaultRandom().primaryKey().notNull(), + apiKeyId: uuid('api_key_id').notNull(), + windowStart: timestamp('window_start', { withTimezone: true, mode: 'string' }).notNull(), + requests: integer().generatedByDefaultAsIdentity().notNull().array(), + limit: integer().generatedAlwaysAs(() => sql`1`).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.apiKeyId], + foreignColumns: [apiKeysInCore.id], + name: 'rate_limits_api_key_id_fkey', + }).onDelete('cascade'), + unique('rate_limits_api_key_id_window_start_key').on(table.apiKeyId, table.windowStart).nullsNotDistinct(), +]); + +export const experimentsInCore = core.table('experiments', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'experiments_organization_id_fkey', + }).onDelete('cascade'), + unique('experiments_organization_id_key_key').on(table.organizationId, table.key), +]); + +export const experimentVariantsInCore = core.table('experiment_variants', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + name: text().notNull(), + allocationPercent: smallint('allocation_percent').default(0).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_variants_experiment_id_fkey', + }).onDelete('cascade'), + unique('experiment_variants_experiment_id_name_key').on(table.experimentId, table.name), + check('experiment_variants_allocation_percent_check', sql`(allocation_percent >= 0) AND (allocation_percent <= 100)`), +]); + +export const experimentAssignmentsInCore = core.table('experiment_assignments', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + variantId: uuid('variant_id').notNull(), + userId: uuid('user_id').notNull(), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_assignments_experiment_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.variantId], + foreignColumns: [experimentVariantsInCore.id], + name: 'experiment_assignments_variant_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'experiment_assignments_user_id_fkey', + }).onDelete('cascade'), + unique('experiment_assignments_experiment_id_user_id_key').on(table.experimentId, table.userId), +]); + +export const deploymentsInCore = core.table('deployments', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + environment: env().default('dev').notNull(), + version: text().notNull(), + deployedBy: uuid('deployed_by'), + deployedAt: timestamp('deployed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + notes: text(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'deployments_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.deployedBy], + foreignColumns: [usersInCore.id], + name: 'deployments_deployed_by_fkey', + }), + unique('deployments_project_id_environment_version_key').on(table.projectId, table.environment, table.version), +]); + +export const servicesInCore = core.table('services', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + kind: text(), + ownerId: uuid('owner_id'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string', precision: 6 }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'services_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.ownerId], + foreignColumns: [usersInCore.id], + name: 'services_owner_id_fkey', + }), + unique('services_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const locksInCore = core.table('locks', { + name: text().primaryKey().notNull(), + owner: text(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string', precision: 2 }), +}); + +export const entitiesInCore = core.table('entities', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + data: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'entities_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const taskQueueInAnalytics = analytics.table('task_queue', { + id: uuid().defaultRandom().primaryKey().notNull(), + queueName: text('queue_name').default('default').notNull(), + payload: jsonb().notNull(), + priority: smallint().default(100).notNull(), + reserved: boolean().default(false).notNull(), + reservedUntil: timestamp('reserved_until', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex('analytics_task_queue_unique_unreserved').using( + 'btree', + sql`queue_name`, + sql`((payload ->> 'task_type'::text))`, + ).where(sql`(reserved = false)`), +]); + +export const invoicesInBilling = billing.table('invoices', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + number: text().notNull(), + issuedAt: timestamp('issued_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + dueAt: timestamp('due_at', { withTimezone: true, mode: 'string' }), + totalAmount: numeric('total_amount', { precision: 12, scale: 2 }).default('0.0').notNull(), + currency: currencyCode().default('USD').notNull(), + status: invoiceStatus().default('draft').notNull(), + notes: text(), +}, (table) => [ + index('billing_invoices_status_idx').using('btree', table.status.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.customerId, table.number], + foreignColumns: [customersInBilling.id, customersInBilling.name], + name: 'invoices_customer_id_fkey', + }).onDelete('cascade'), + unique('invoices_customer_id_number_key').on(table.customerId, table.number), + check('invoices_total_nonnegative', sql`total_amount >= (0)::numeric`), +]); + +export const aliasesInCore = core.table('aliases', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + alias: text().notNull().unique('unique_with_name'), + organizationId: uuid('organization_id'), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'aliases_organization_id_fkey', + }).onUpdate('cascade'), + unique('aliases_object_type_object_id_alias_key').on(table.objectType, table.objectId, table.alias), +]); + +export const selfRef = core.table('self_ref', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull().unique().references((): AnyPgColumn => selfRef.organizationId), + organizationId: text('organization_id').notNull().unique(), +}); + +export const couponRedemptionsInBilling = billing.table('coupon_redemptions', { + couponId: uuid('coupon_id').notNull(), + customerId: uuid('customer_id').notNull(), + redeemedAt: timestamp('redeemed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.couponId], + foreignColumns: [couponsInBilling.id], + name: 'coupon_redemptions_coupon_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'coupon_redemptions_customer_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.couponId, table.customerId], name: 'coupon_redemptions_pkey' }), +]); + +export const entityLinksInCore = core.table('entity_links', { + parentEntityId: uuid('parent_entity_id').notNull(), + childEntityId: uuid('child_entity_id').notNull(), + relationship: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.parentEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_parent_entity_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.childEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_child_entity_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.parentEntityId, table.childEntityId, table.relationship], name: 'entity_links_pkey' }), +]); + +export const rolePermissionsInCore = core.table('role_permissions', { + roleId: integer('role_id').notNull(), + permissionId: integer('permission_id').notNull(), + assignedBy: uuid('assigned_by'), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'role_permissions_role_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.permissionId], + foreignColumns: [permissionsInCore.id], + name: 'role_permissions_permission_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.assignedBy], + foreignColumns: [usersInCore.id], + name: 'role_permissions_assigned_by_fkey', + }), + primaryKey({ columns: [table.roleId, table.permissionId], name: 'role_permissions_pkey' }), +]); + +export const taggingsInCore = core.table('taggings', { + tagId: integer('tag_id').notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.tagId], + foreignColumns: [tagsInCore.id], + name: 'taggings_tag_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.tagId, table.objectType, table.objectId], name: 'taggings_pkey' }), +]); + +export const reactionsInCore = core.table('reactions', { + messageId: uuid('message_id').notNull(), + userId: uuid('user_id').notNull(), + reaction: text().notNull().array(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.messageId], + foreignColumns: [chatMessagesInCore.id], + name: 'reactions_message_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'reactions_user_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.messageId, table.userId, table.reaction], name: 'reactions_pkey' }), +]); + +// views +export const projectSearchInAnalytics = analytics.materializedView('project_search', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().as( + sql`SELECT id, name, slug, description FROM core.projects p`, + ); + +export const projectSearchInAnalytics2 = analytics.materializedView('project_search2', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().existing(); + +export const vActiveUsersInCore = core.view('v_active_users').as((qb) => + qb.select({ + id: usersInCore.id, + username: usersInCore.username, + organization_id: usersInCore.organizationId, + }).from(usersInCore).where(eq(usersInCore.status, 'active')) +); +export const vActiveUsersInCore2 = core.view('v_active_users2', {}).existing(); + +// polices +export const rls = pgSchema('rls'); +export const documentsInRls = rls.table('documents', { + docId: uuid('doc_id').defaultRandom().primaryKey().notNull(), + ownerId: uuid('owner_id').notNull(), + title: text().notNull(), + content: text().notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('documents_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(owner_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('documents_update_own', { as: 'permissive', for: 'update', to: ['public'] }), + pgPolicy('documents_select_own', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const messagesInRls = rls.table.withRLS('messages', { + msgId: uuid('msg_id').defaultRandom().primaryKey().notNull(), + senderId: uuid('sender_id').notNull(), + recipientId: uuid('recipient_id').notNull(), + message: text().notNull(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('messages_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(sender_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('messages_visibility', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const projectsInRls = rls.table('projects', { + projectId: uuid('project_id').defaultRandom().primaryKey().notNull(), + name: text().notNull(), + description: text(), + ownerId: uuid('owner_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('projects_visibility', { + as: 'permissive', + for: 'select', + to: ['public'], + using: sql`((owner_id = (CURRENT_USER)::uuid) OR (project_id IN ( SELECT pm.project_id + FROM rls.project_members pm + WHERE (pm.user_id = (CURRENT_USER)::uuid))))`, + }), +]); + +export const projectMembersInRls = rls.table.withRLS('project_members', { + projectId: uuid('project_id').notNull(), + userId: uuid('user_id').notNull(), + role: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInRls.projectId], + name: 'project_members_project_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.projectId, table.userId], name: 'project_members_pkey' }), + pgPolicy('project_members_manage', { + as: 'permissive', + for: 'all', + to: ['public'], + using: sql`(project_id IN ( SELECT p.project_id + FROM rls.projects p + WHERE (p.owner_id = (CURRENT_USER)::uuid)))`, + }), + pgPolicy('project_members_visibility', { as: 'permissive', for: 'select', to: ['public'] }), + check('project_members_role_check', sql`role = ANY (ARRAY['member'::text, 'admin'::text])`), +]); + +export const policy = pgPolicy('new_policy', { + as: 'restrictive', + to: 'postgres', + withCheck: sql`1 = 1`, + for: 'all', +}).link(organizationsInCore); diff --git a/drizzle-kit/tests/postgres/snapshots/schema01.ts b/drizzle-kit/tests/postgres/snapshots/schema01.ts new file mode 100644 index 0000000000..5c5c820326 --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema01.ts @@ -0,0 +1,64 @@ +import { + AnyPgColumn, + foreignKey, + integer, + pgEnum, + pgSchema, + pgTable, + primaryKey, + serial, + text, + unique, +} from 'orm044/pg-core'; + +enum E { + value = 'value', +} + +export const folder = pgSchema('folder'); +export const en = pgEnum('e', E); +export const users = pgTable('users', { + id: serial().primaryKey(), + enum: en(), + text: text().unique(), + text1: text(), + text2: text(), +}, (t) => [unique().on(t.text1, t.text2)]); + +export const users1 = pgTable('users1', { + id1: integer(), + id2: integer(), +}, (t) => [primaryKey({ columns: [t.id1, t.id2] })]); + +export const users2 = pgTable('users2', { + id: serial(), + c1: text().unique(), + c2: text().unique('c2unique'), + c3: text().unique('c3unique', { nulls: 'distinct' }), +}, (t) => [primaryKey({ columns: [t.id] })]); + +export const users3 = pgTable('users3', { + c1: text(), + c2: text(), + c3: text(), +}, (t) => [ + unique().on(t.c1), + unique('u3c2unique').on(t.c2), + unique('u3c3unique').on(t.c3).nullsNotDistinct(), + unique('u3c2c3unique').on(t.c2, t.c3), +]); + +export const users4 = pgTable('users4', { + c1: text().unique().references(() => users3.c1), + c2: text().references((): AnyPgColumn => users4.c1), + c3: text(), + c4: text(), + c5: text().array().default([]), + c6: text().array().array().default([[]]), + c7: text().array().array().array().default([[[]]]), + c8: text().array(2).array(10), +}, (t) => [foreignKey({ columns: [t.c3, t.c4], foreignColumns: [users3.c2, users3.c3] })]); + +export const users5 = pgTable('users5', { + fullName: text(), +}); diff --git a/drizzle-kit/tests/postgres/snapshots/schema01new.ts b/drizzle-kit/tests/postgres/snapshots/schema01new.ts new file mode 100644 index 0000000000..5dbb489c00 --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema01new.ts @@ -0,0 +1,64 @@ +import { + AnyPgColumn, + foreignKey, + integer, + pgEnum, + pgSchema, + pgTable, + primaryKey, + serial, + text, + unique, +} from 'drizzle-orm/pg-core'; + +enum E { + value = 'value', +} + +export const folder = pgSchema('folder'); +export const en = pgEnum('e', E); +export const users = pgTable('users', { + id: serial().primaryKey(), + enum: en(), + text: text().unique(), + text1: text(), + text2: text(), +}, (t) => [unique().on(t.text1, t.text2)]); + +export const users1 = pgTable('users1', { + id1: integer(), + id2: integer(), +}, (t) => [primaryKey({ columns: [t.id1, t.id2] })]); + +export const users2 = pgTable('users2', { + id: serial(), + c1: text().unique(), + c2: text().unique('c2unique'), + c3: text().unique('c3unique', { nulls: 'distinct' }), +}, (t) => [primaryKey({ columns: [t.id] })]); + +export const users3 = pgTable('users3', { + c1: text(), + c2: text(), + c3: text(), +}, (t) => [ + unique().on(t.c1), + unique('u3c2unique').on(t.c2), + unique('u3c3unique').on(t.c3).nullsNotDistinct(), + unique('u3c2c3unique').on(t.c2, t.c3), +]); + +export const users4 = pgTable('users4', { + c1: text().unique().references(() => users3.c1), + c2: text().references((): AnyPgColumn => users4.c1), + c3: text(), + c4: text(), + c5: text().array().default([]), + c6: text().array().array().default([[]]), + c7: text().array().array().array().default([[[]]]), + c8: text().array(2).array(10), +}, (t) => [foreignKey({ columns: [t.c3, t.c4], foreignColumns: [users3.c2, users3.c3] })]); + +export const users5 = pgTable('users5', { + fullName: text(), +}); diff --git a/drizzle-kit/tests/postgres/snapshots/schema02.ts b/drizzle-kit/tests/postgres/snapshots/schema02.ts new file mode 100644 index 0000000000..d9222b914c --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema02.ts @@ -0,0 +1,772 @@ +import { SQL, sql } from 'orm044'; +import { + boolean, + date, + decimal, + index, + integer, + jsonb, + pgTable, + primaryKey, + text, + timestamp, + uniqueIndex, + uuid, + varchar, +} from 'orm044/pg-core'; + +// Enum types for entity classification +type EntityClass = 'ALPHA' | 'BETA' | 'GAMMA'; +type AccessLevel = 'STANDARD' | 'PREMIUM'; +type ProcessStage = 'INITIAL' | 'COMPLETE'; + +export const profiles = pgTable('profiles', { + id: uuid().defaultRandom().primaryKey(), + externalRef: varchar({ length: 255 }).notNull().unique(), + serviceRef: varchar().unique(), + contactEmail: varchar({ length: 255 }).notNull().unique(), + givenName: varchar({ length: 100 }).notNull(), + familyName: varchar({ length: 100 }).notNull(), + accessLevel: varchar().$type().notNull(), + birthDate: date(), + classification: varchar({ length: 50 }).$type(), + contactNumber: varchar({ length: 20 }), + currentStage: varchar().$type().default('INITIAL').notNull(), + // Location fields + recipientName: varchar({ length: 255 }), + primaryAddress: varchar({ length: 255 }), + secondaryAddress: varchar({ length: 255 }), + locality: varchar({ length: 100 }), + region: varchar({ length: 2 }), + postalCode: varchar({ length: 10 }), + territory: varchar({ length: 2 }).default('US').notNull(), + // Additional profile fields + avatarUrl: varchar({ length: 255 }), + lastAccessAt: timestamp({ withTimezone: true }), + emailConfirmed: boolean().default(false).notNull(), + phoneConfirmed: boolean().default(false).notNull(), + // Timestamps + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (profiles) => [ + index().on(profiles.serviceRef), + index().on(profiles.contactEmail), + index().on(profiles.externalRef), +]); + +export type Profile = typeof profiles.$inferSelect; +export type ProfileToInsert = typeof profiles.$inferInsert; + +export const profileAgreements = pgTable( + 'profile_agreements', + { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid() + .references(() => profiles.id, { onDelete: 'cascade' }) + .notNull(), + privacyConsent: boolean().default(false).notNull(), + serviceConsent: boolean().default(false).notNull(), + termsConsent: boolean().default(false).notNull(), + agreementDate: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + uniqueIndex().on(table.profileId), + ], +); + +export type ProfileAgreement = typeof profileAgreements.$inferSelect; +export type ProfileAgreementToInsert = typeof profileAgreements.$inferInsert; + +export const facilities = pgTable('facilities', { + id: uuid().defaultRandom().primaryKey(), + facilityName: varchar({ length: 255 }).notNull(), + serviceId: integer().notNull().unique(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type Facility = typeof facilities.$inferSelect; +export type FacilityToInsert = typeof facilities.$inferInsert; + +export const identifiers = pgTable('identifiers', { + id: uuid().defaultRandom().primaryKey(), + code: varchar({ length: 50 }).notNull().unique(), + displayName: varchar({ length: 255 }).notNull(), + description: text(), + slug: varchar({ length: 255 }).notNull().unique(), + measurementUnit: varchar({ length: 50 }), + standardRanges: jsonb(), + guidelines: jsonb(), + evaluationRules: jsonb(), + isFeatured: boolean().default(false), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type Identifier = typeof identifiers.$inferSelect; +export type IdentifierToInsert = typeof identifiers.$inferInsert; + +export const classifications = pgTable('classifications', { + id: uuid().defaultRandom().primaryKey(), + categoryName: varchar({ length: 255 }).notNull(), + iconType: varchar({ length: 255 }), + themeColor: varchar({ length: 255 }), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type Classification = typeof classifications.$inferSelect; +export type ClassificationToInsert = typeof classifications.$inferInsert; + +export const identifierClassifications = pgTable('identifier_classifications', { + identifierId: uuid().references(() => identifiers.id), + classificationId: uuid().references(() => classifications.id), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [primaryKey({ columns: [table.identifierId, table.classificationId] })]); + +export type IdentifierClassification = typeof identifierClassifications.$inferSelect; +export type IdentifierClassificationToInsert = typeof identifierClassifications.$inferInsert; + +export const impactFactors = pgTable('impact_factors', { + id: uuid().defaultRandom().primaryKey(), + factorName: varchar({ length: 255 }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type ImpactFactor = typeof impactFactors.$inferSelect; +export type ImpactFactorToInsert = typeof impactFactors.$inferInsert; + +export const impactFactorsToIdentifiers = pgTable('impact_factors_to_identifiers', { + impactFactorId: uuid().references(() => impactFactors.id), + identifierId: uuid().references(() => identifiers.id), +}); + +export type ImpactFactorsToIdentifiers = typeof impactFactorsToIdentifiers.$inferSelect; +export type ImpactFactorsToIdentifiersToInsert = typeof impactFactorsToIdentifiers.$inferInsert; + +export const metricClusters = pgTable('metric_clusters', { + id: uuid().defaultRandom().primaryKey(), + clusterName: varchar({ length: 255 }).notNull(), + slug: varchar({ length: 255 }).notNull().unique(), + description: text(), + metricType: varchar({ length: 50 }).default('standard').notNull(), + measurementUnit: varchar({ length: 50 }), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type MetricCluster = typeof metricClusters.$inferSelect; +export type MetricClusterToInsert = typeof metricClusters.$inferInsert; + +export const metricPreferences = pgTable( + 'metric_preferences', + { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id), + identifierId: uuid().references(() => identifiers.id), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + index().on(table.profileId), + index().on(table.identifierId), + ], +); + +export type MetricPreference = typeof metricPreferences.$inferSelect; +export type MetricPreferenceToInsert = typeof metricPreferences.$inferInsert; + +export const dataPoints = pgTable('data_points', { + id: uuid().defaultRandom().primaryKey(), + pointId: integer().notNull(), + clusterId: uuid().references(() => metricClusters.id), + identifierId: uuid().references(() => identifiers.id), + pointName: varchar({ length: 255 }).notNull(), + description: text(), + dataType: varchar({ length: 50 }).default('standard').notNull(), + isParent: boolean().default(false).notNull(), + measurementUnit: varchar({ length: 50 }), + baseRate: decimal({ precision: 10, scale: 2 }), + baseCentRate: integer().generatedAlwaysAs((): SQL => sql`${dataPoints.baseRate} * 100`), + facilityId: uuid().references(() => facilities.id).notNull(), + isActive: boolean().default(true).notNull(), + visualType: varchar({ length: 50 }).default('numeric-trend'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [index().on(table.clusterId, table.facilityId)]); + +export type DataPoint = typeof dataPoints.$inferSelect; +export type DataPointToInsert = typeof dataPoints.$inferInsert; + +export const dataPointRelationships = pgTable( + 'data_point_relationships', + { + parentId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), + childId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), + displayOrder: integer(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + primaryKey({ columns: [table.parentId, table.childId] }), + index().on(table.childId), + ], +); + +export type DataPointRelationship = typeof dataPointRelationships.$inferSelect; +export type DataPointRelationshipToInsert = typeof dataPointRelationships.$inferInsert; + +export const packageClusters = pgTable('package_clusters', { + id: uuid().defaultRandom().primaryKey(), + packageName: varchar({ length: 255 }).notNull(), + slug: varchar({ length: 255 }).notNull().unique(), + description: text(), + partnerId: text().references(() => partners.partnerId, { + onDelete: 'set null', + }), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type PackageCluster = typeof packageClusters.$inferSelect; +export type PackageClusterToInsert = typeof packageClusters.$inferInsert; + +export const servicePackages = pgTable('service_packages', { + id: uuid().defaultRandom().primaryKey(), + clusterId: uuid().references(() => packageClusters.id).notNull(), + packageTitle: varchar({ length: 255 }), + description: text(), + serviceRef: varchar({ length: 100 }).notNull().unique(), + baseRate: decimal({ precision: 10, scale: 2 }).notNull(), + baseCentRate: integer().generatedAlwaysAs((): SQL => sql`${servicePackages.baseRate} * 100`), + discountRate: decimal({ precision: 10, scale: 2 }), + discountCentRate: integer().generatedAlwaysAs((): SQL => sql`${servicePackages.discountRate} * 100`), + facilityId: uuid().references(() => facilities.id).notNull(), + isPartnerCreated: boolean().default(false).notNull(), + allowsRemoteCollection: boolean().default(false).notNull(), + partnerId: text().references(() => partners.partnerId), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.clusterId, table.facilityId), +]); + +export type ServicePackage = typeof servicePackages.$inferSelect; +export type ServicePackageToInsert = typeof servicePackages.$inferInsert; + +export const servicePackageDataPoints = pgTable('service_package_data_points', { + packageId: uuid().references(() => servicePackages.id, { onDelete: 'cascade' }).notNull(), + dataPointId: uuid().references(() => dataPoints.id, { onDelete: 'cascade' }).notNull(), + displayOrder: integer(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [primaryKey({ columns: [table.packageId, table.dataPointId] })]); + +export type ServicePackageDataPoint = typeof servicePackageDataPoints.$inferSelect; +export type ServicePackageDataPointToInsert = typeof servicePackageDataPoints.$inferInsert; + +export const collectionEvents = pgTable('collection_events', { + id: uuid().defaultRandom().primaryKey(), + requestId: uuid().references(() => requests.id, { + onDelete: 'cascade', + }), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }), + facilityId: uuid().references(() => facilities.id), + collectionDate: timestamp({ withTimezone: true }), + reportDate: timestamp({ withTimezone: true }), + receivedDate: timestamp({ withTimezone: true }), + eventStatus: varchar({ length: 50 }).default('initiated'), + dataSource: varchar({ length: 50 }).default(''), + specimenRef: varchar({ length: 100 }), + eventMetadata: jsonb(), + documentUrl: varchar({ length: 255 }), + hasNewData: boolean().notNull().default(false), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type CollectionEvent = typeof collectionEvents.$inferSelect; +export type CollectionEventToInsert = typeof collectionEvents.$inferInsert; + +export const measurements = pgTable( + 'measurements', + { + id: uuid().defaultRandom().primaryKey(), + measurementName: varchar(), + slug: varchar(), + eventId: uuid().references(() => collectionEvents.id, { + onDelete: 'cascade', + }), + profileId: uuid().references(() => profiles.id), + dataPointId: uuid().references(() => dataPoints.id), + identifierId: uuid().references(() => identifiers.id), + resultValue: text(), + numericResult: decimal({ precision: 10, scale: 2 }), + rawResult: varchar({ length: 50 }), + measurementUnit: varchar({ length: 50 }), + facilityInterpretation: varchar({ length: 50 }), + facilityMinRange: decimal({ precision: 10, scale: 2 }), + facilityMaxRange: decimal({ precision: 10, scale: 2 }), + systemNotes: text(), + profileNotes: text(), + profileActions: jsonb(), + measurementMetadata: jsonb(), + processingStatus: varchar({ length: 50 }).default('partial_data'), + recordedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + isNotified: boolean().default(false), + isArchived: boolean().default(false), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + index().on(table.eventId), + index().on(table.identifierId), + index().on(table.dataPointId), + ], +); + +export type Measurement = typeof measurements.$inferSelect; +export type MeasurementToInsert = typeof measurements.$inferInsert; + +export const partners = pgTable('partners', { + id: uuid().defaultRandom().primaryKey(), + partnerId: text().notNull().unique(), + slug: varchar({ length: 255 }).unique(), + promoCode: varchar(), + referralCode: varchar(), + partnerFirstName: varchar({ length: 255 }).notNull(), + partnerLastName: varchar({ length: 255 }).notNull(), + displayName: varchar({ length: 255 }), + description: text(), + logoUrl: varchar({ length: 255 }), + isActive: boolean().default(true), + partnerMetadata: jsonb(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (partners) => [ + index().on(partners.promoCode), + index().on(partners.partnerId), +]); + +export type Partner = typeof partners.$inferSelect; + +export const partnerRelationships = pgTable('partner_relationships', { + parentPartnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + childPartnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + primaryKey({ columns: [table.parentPartnerId, table.childPartnerId] }), + index().on(table.childPartnerId), +]); + +export type RequestStatus = + | 'pending' + | 'processed' + | 'processing_failed' + | 'service_creation_failed' + | 'service_results_failed' + | 'refund_pending' + | 'refunded' + | 'refund_failed' + | 'processing_cancellation' + | 'received.standard.ordered' + | 'received.standard.document_created' + | 'sample_processing.standard.partial_data' + | 'collecting_sample.standard.appointment_scheduled' + | 'completed.standard.completed' + | 'failed.standard.sample_error' + | 'cancelled.standard.cancelled' + | 'received.remote.ordered' + | 'received.remote.document_created' + | 'collecting_sample.remote.appointment_scheduled' + | 'sample_processing.remote.partial_data' + | 'completed.remote.completed' + | 'cancelled.remote.cancelled'; + +export const serviceRequestStatuses: RequestStatus[] = [ + 'service_results_failed', + 'received.standard.ordered', + 'received.standard.document_created', + 'sample_processing.standard.partial_data', + 'completed.standard.completed', + 'failed.standard.sample_error', + 'cancelled.standard.cancelled', + 'received.remote.ordered', + 'received.remote.document_created', + 'collecting_sample.remote.appointment_scheduled', + 'sample_processing.remote.partial_data', + 'completed.remote.completed', + 'cancelled.remote.cancelled', +]; + +export interface Location { + primaryAddress: string; + secondaryAddress?: string; + locality: string; + region: string; + postalCode: string; + territory: string; +} + +export type RequestType = 'standard' | 'remote'; + +export const requests = pgTable('requests', { + id: uuid().defaultRandom().primaryKey(), + requestNumber: integer().notNull(), + serviceRequestId: uuid(), + totalAmount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${requests.totalAmount} * 100`), + requestStatus: varchar({ length: 100 }).$type().notNull(), + promoCode: varchar(), + referralCode: varchar(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + facilityId: uuid().references(() => facilities.id, { onDelete: 'set null' }), + receiptUrl: varchar({ length: 255 }), + itemCount: integer().notNull(), + requestMetadata: jsonb(), + requestType: varchar().$type().default('standard').notNull(), + location: jsonb().$type(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.requestNumber), + index().on(table.requestStatus), + index().on(table.serviceRequestId), + index().on(table.promoCode), + index().on(table.referralCode), + index().on(table.requestType), +]); + +export type Request = typeof requests.$inferSelect; +export type RequestToInsert = typeof requests.$inferInsert; + +export const requestsToDataPoints = pgTable('requests_to_data_points', { + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + dataPointId: uuid().references(() => dataPoints.id, { onDelete: 'cascade' }).notNull(), + itemRate: decimal({ precision: 10, scale: 2 }).notNull(), + centRate: integer().generatedAlwaysAs((): SQL => sql`${requestsToDataPoints.itemRate} * 100`), +}, (table) => [index().on(table.requestId), index().on(table.dataPointId)]); + +export type RequestToDataPoint = typeof requestsToDataPoints.$inferSelect; +export type RequestToDataPointToInsert = typeof requestsToDataPoints.$inferInsert; + +export const requestsToServicePackages = pgTable('requests_to_service_packages', { + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + servicePackageId: uuid().references(() => servicePackages.id, { onDelete: 'cascade' }).notNull(), + packageRate: decimal({ precision: 10, scale: 2 }).notNull(), + centRate: integer().generatedAlwaysAs((): SQL => sql`${requestsToServicePackages.packageRate} * 100`), +}, (table) => [index().on(table.requestId), index().on(table.servicePackageId)]); + +export type RequestToServicePackage = typeof requestsToServicePackages.$inferSelect; +export type RequestToServicePackageToInsert = typeof requestsToServicePackages.$inferInsert; + +export const selections = pgTable('selections', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + facilityId: uuid().references(() => facilities.id), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.facilityId), + uniqueIndex().on(table.id, table.profileId), +]); + +export type Selection = typeof selections.$inferSelect; +export type SelectionToInsert = typeof selections.$inferInsert; + +export const selectionsToDataPoints = pgTable('selections_to_data_points', { + selectionId: uuid() + .references(() => selections.id, { onDelete: 'cascade' }) + .notNull(), + dataPointId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), +}, (table) => [ + index().on(table.selectionId), + index().on(table.dataPointId), + uniqueIndex().on(table.selectionId, table.dataPointId), +]); + +export type SelectionToDataPoint = typeof selectionsToDataPoints.$inferSelect; + +export const selectionsToServicePackages = pgTable('selections_to_service_packages', { + selectionId: uuid() + .references(() => selections.id, { onDelete: 'cascade' }) + .notNull(), + servicePackageId: uuid() + .references(() => servicePackages.id, { onDelete: 'cascade' }) + .notNull(), +}, (table) => [ + index().on(table.selectionId), + index().on(table.servicePackageId), + uniqueIndex().on(table.selectionId, table.servicePackageId), +]); + +export type SelectionToServicePackage = typeof selectionsToServicePackages.$inferSelect; + +export type ProcessorPaymentStatus = 'PENDING' | 'SUCCESS' | 'DECLINE' | 'UNKNOWN'; +export type PaymentProcessor = 'PROCESSOR_A' | 'PROCESSOR_B'; + +export const transactions = pgTable('transactions', { + id: uuid().defaultRandom().primaryKey(), + token: varchar(), + transactionId: varchar().notNull().unique(), + sourceId: varchar(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + requestId: uuid().references(() => requests.id).notNull(), + transactionStatus: varchar({ length: 50 }).notNull(), + amount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${transactions.amount} * 100`), + currency: varchar({ length: 10 }).notNull(), + responseData: jsonb(), + transactionMetadata: jsonb(), + processor: varchar().$type().notNull().default('PROCESSOR_A'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.transactionId, table.processor), + index().on(table.token), + index().on(table.transactionId), + index().on(table.profileId), + index().on(table.requestId), + index().on(table.transactionStatus), +]); + +export type Transaction = typeof transactions.$inferSelect; +export type TransactionToInsert = typeof transactions.$inferInsert; + +export type TransactionEventType = 'transaction.created' | 'transaction.updated'; +export type ProcessorEventType = 'transaction.sale.success' | 'transaction.sale.failure' | 'transaction.sale.unknown'; + +export const transactionEvents = pgTable('transaction_events', { + id: uuid().defaultRandom().primaryKey(), + eventType: varchar({ length: 50 }).$type().notNull(), + eventId: varchar().notNull(), + transactionId: varchar().references(() => transactions.transactionId, { onDelete: 'cascade' }).notNull(), + eventMetadata: jsonb().notNull(), + processor: varchar().$type().notNull().default('PROCESSOR_A'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.eventId), + index().on(table.eventType), + index().on(table.transactionId), +]); + +export type TransactionEvent = typeof transactionEvents.$inferSelect; +export type TransactionEventToInsert = typeof transactionEvents.$inferInsert; + +export const serviceEvents = pgTable('service_events', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + serviceUserId: varchar().notNull(), + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + serviceRequestId: varchar().notNull(), + eventType: varchar().notNull(), + eventId: integer().notNull(), + appointmentEventId: varchar(), + eventStatus: varchar().notNull(), + appointmentStatus: varchar(), + eventMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (serviceEvents) => [ + index().on(serviceEvents.profileId), + index().on(serviceEvents.serviceUserId), + index().on(serviceEvents.requestId), + index().on(serviceEvents.serviceRequestId), + index().on(serviceEvents.eventId), + index().on(serviceEvents.eventType), + index().on(serviceEvents.eventStatus), +]); + +export type ServiceEvent = typeof serviceEvents.$inferSelect; +export type ServiceEventToInsert = typeof serviceEvents.$inferInsert; + +export type PartnerSubscriptionType = 'promo' | 'referral' | 'custom_package'; + +export const partnerSubscriptions = pgTable('partner_subscriptions', { + id: uuid().defaultRandom().primaryKey(), + partnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + promoCode: varchar(), + referralCode: varchar(), + subscriptionType: varchar().$type().notNull(), + expiredAt: timestamp({ withTimezone: true }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (partnerSubscriptions) => [ + uniqueIndex().on(partnerSubscriptions.profileId, partnerSubscriptions.partnerId), + index().on(partnerSubscriptions.profileId), + index().on(partnerSubscriptions.partnerId), + index().on(partnerSubscriptions.promoCode), + index().on(partnerSubscriptions.referralCode), + index().on(partnerSubscriptions.subscriptionType), + index().on(partnerSubscriptions.expiredAt), +]); + +export type PartnerSubscription = typeof partnerSubscriptions.$inferSelect; +export type PartnerSubscriptionToInsert = typeof partnerSubscriptions.$inferInsert; + +export const reversals = pgTable('reversals', { + id: uuid().defaultRandom().primaryKey(), + token: varchar().notNull(), + transactionId: uuid().notNull().references(() => transactions.id), + reversalId: varchar().notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + requestId: uuid().references(() => requests.id).notNull(), + reversalStatus: varchar({ length: 50 }).notNull(), + amount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${reversals.amount} * 100`), + currency: varchar({ length: 10 }).notNull(), + reversalMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.token), + index().on(table.transactionId), + index().on(table.profileId), + index().on(table.requestId), + index().on(table.reversalStatus), + index().on(table.reversalId), +]); + +export type Reversal = typeof reversals.$inferSelect; +export type ReversalToInsert = typeof reversals.$inferInsert; + +export type ReversalEventType = 'reversal.created' | 'reversal.updated'; + +export const reversalEvents = pgTable('reversal_events', { + id: uuid().defaultRandom().primaryKey(), + eventType: varchar({ length: 50 }).$type().notNull(), + eventId: varchar().notNull(), + reversalId: uuid().references(() => reversals.id, { onDelete: 'cascade' }).notNull(), + transactionId: uuid().references(() => transactions.id, { onDelete: 'cascade' }).notNull(), + eventMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.eventId), + index().on(table.eventType), + index().on(table.transactionId), + index().on(table.reversalId), +]); + +export type ReversalEvent = typeof reversalEvents.$inferSelect; +export type ReversalEventToInsert = typeof reversalEvents.$inferInsert; + +export const schedules = pgTable('schedules', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid() + .references(() => profiles.id, { onDelete: 'cascade' }) + .notNull(), + scheduleTitle: varchar({ length: 255 }).notNull(), + description: text(), + startDate: timestamp({ withTimezone: true }).notNull(), + endDate: timestamp({ withTimezone: true }), + isCurrent: boolean().default(false).notNull(), + themeColor: varchar({ length: 50 }).notNull(), + isPrivate: boolean().default(false).notNull(), + applyToAllCharts: boolean().default(false).notNull(), + isVisible: boolean().default(true).notNull(), + isArchived: boolean().default(false).notNull(), + profileActions: jsonb(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.startDate, table.endDate), +]); + +export type Schedule = typeof schedules.$inferSelect; +export type ScheduleToInsert = typeof schedules.$inferInsert; + +export const schedulesToIdentifiers = pgTable('schedules_to_identifiers', { + scheduleId: uuid() + .references(() => schedules.id, { + onDelete: 'cascade', + }) + .notNull(), + identifierId: uuid() + .references(() => identifiers.id, { + onDelete: 'cascade', + }) + .notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}, (table) => [ + primaryKey({ columns: [table.scheduleId, table.identifierId] }), + index().on(table.identifierId), +]); + +export type ScheduleToIdentifier = typeof schedulesToIdentifiers.$inferSelect; +export type ScheduleToIdentifierToInsert = typeof schedulesToIdentifiers.$inferInsert; + +export const scheduleShares = pgTable('schedule_shares', { + id: uuid().defaultRandom().primaryKey(), + shareToken: text().notNull().unique(), + scheduleId: uuid().references(() => schedules.id, { onDelete: 'cascade' }).notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + accessCount: integer().default(0).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.shareToken), + index().on(table.scheduleId), + index().on(table.profileId), +]); + +export type ScheduleShare = typeof scheduleShares.$inferSelect; +export type ScheduleShareToInsert = typeof scheduleShares.$inferInsert; + +export const processingProviders = pgTable('processing_providers', { + id: uuid().defaultRandom().primaryKey(), + processor: varchar().$type().notNull(), + isActive: boolean().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (processingProviders) => [ + index().on(processingProviders.processor), + index().on(processingProviders.isActive), +]); + +export type ProcessingProvider = typeof processingProviders.$inferSelect; diff --git a/drizzle-kit/tests/postgres/snapshots/schema02new.ts b/drizzle-kit/tests/postgres/snapshots/schema02new.ts new file mode 100644 index 0000000000..b0a10e912e --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema02new.ts @@ -0,0 +1,772 @@ +import { SQL, sql } from 'drizzle-orm'; +import { + boolean, + date, + decimal, + index, + integer, + jsonb, + pgTable, + primaryKey, + text, + timestamp, + uniqueIndex, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; + +// Enum types for entity classification +type EntityClass = 'ALPHA' | 'BETA' | 'GAMMA'; +type AccessLevel = 'STANDARD' | 'PREMIUM'; +type ProcessStage = 'INITIAL' | 'COMPLETE'; + +export const profiles = pgTable('profiles', { + id: uuid().defaultRandom().primaryKey(), + externalRef: varchar({ length: 255 }).notNull().unique(), + serviceRef: varchar().unique(), + contactEmail: varchar({ length: 255 }).notNull().unique(), + givenName: varchar({ length: 100 }).notNull(), + familyName: varchar({ length: 100 }).notNull(), + accessLevel: varchar().$type().notNull(), + birthDate: date(), + classification: varchar({ length: 50 }).$type(), + contactNumber: varchar({ length: 20 }), + currentStage: varchar().$type().default('INITIAL').notNull(), + // Location fields + recipientName: varchar({ length: 255 }), + primaryAddress: varchar({ length: 255 }), + secondaryAddress: varchar({ length: 255 }), + locality: varchar({ length: 100 }), + region: varchar({ length: 2 }), + postalCode: varchar({ length: 10 }), + territory: varchar({ length: 2 }).default('US').notNull(), + // Additional profile fields + avatarUrl: varchar({ length: 255 }), + lastAccessAt: timestamp({ withTimezone: true }), + emailConfirmed: boolean().default(false).notNull(), + phoneConfirmed: boolean().default(false).notNull(), + // Timestamps + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (profiles) => [ + index().on(profiles.serviceRef), + index().on(profiles.contactEmail), + index().on(profiles.externalRef), +]); + +export type Profile = typeof profiles.$inferSelect; +export type ProfileToInsert = typeof profiles.$inferInsert; + +export const profileAgreements = pgTable( + 'profile_agreements', + { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid() + .references(() => profiles.id, { onDelete: 'cascade' }) + .notNull(), + privacyConsent: boolean().default(false).notNull(), + serviceConsent: boolean().default(false).notNull(), + termsConsent: boolean().default(false).notNull(), + agreementDate: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + uniqueIndex().on(table.profileId), + ], +); + +export type ProfileAgreement = typeof profileAgreements.$inferSelect; +export type ProfileAgreementToInsert = typeof profileAgreements.$inferInsert; + +export const facilities = pgTable('facilities', { + id: uuid().defaultRandom().primaryKey(), + facilityName: varchar({ length: 255 }).notNull(), + serviceId: integer().notNull().unique(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type Facility = typeof facilities.$inferSelect; +export type FacilityToInsert = typeof facilities.$inferInsert; + +export const identifiers = pgTable('identifiers', { + id: uuid().defaultRandom().primaryKey(), + code: varchar({ length: 50 }).notNull().unique(), + displayName: varchar({ length: 255 }).notNull(), + description: text(), + slug: varchar({ length: 255 }).notNull().unique(), + measurementUnit: varchar({ length: 50 }), + standardRanges: jsonb(), + guidelines: jsonb(), + evaluationRules: jsonb(), + isFeatured: boolean().default(false), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type Identifier = typeof identifiers.$inferSelect; +export type IdentifierToInsert = typeof identifiers.$inferInsert; + +export const classifications = pgTable('classifications', { + id: uuid().defaultRandom().primaryKey(), + categoryName: varchar({ length: 255 }).notNull(), + iconType: varchar({ length: 255 }), + themeColor: varchar({ length: 255 }), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type Classification = typeof classifications.$inferSelect; +export type ClassificationToInsert = typeof classifications.$inferInsert; + +export const identifierClassifications = pgTable('identifier_classifications', { + identifierId: uuid().references(() => identifiers.id), + classificationId: uuid().references(() => classifications.id), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [primaryKey({ columns: [table.identifierId, table.classificationId] })]); + +export type IdentifierClassification = typeof identifierClassifications.$inferSelect; +export type IdentifierClassificationToInsert = typeof identifierClassifications.$inferInsert; + +export const impactFactors = pgTable('impact_factors', { + id: uuid().defaultRandom().primaryKey(), + factorName: varchar({ length: 255 }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}); + +export type ImpactFactor = typeof impactFactors.$inferSelect; +export type ImpactFactorToInsert = typeof impactFactors.$inferInsert; + +export const impactFactorsToIdentifiers = pgTable('impact_factors_to_identifiers', { + impactFactorId: uuid().references(() => impactFactors.id), + identifierId: uuid().references(() => identifiers.id), +}); + +export type ImpactFactorsToIdentifiers = typeof impactFactorsToIdentifiers.$inferSelect; +export type ImpactFactorsToIdentifiersToInsert = typeof impactFactorsToIdentifiers.$inferInsert; + +export const metricClusters = pgTable('metric_clusters', { + id: uuid().defaultRandom().primaryKey(), + clusterName: varchar({ length: 255 }).notNull(), + slug: varchar({ length: 255 }).notNull().unique(), + description: text(), + metricType: varchar({ length: 50 }).default('standard').notNull(), + measurementUnit: varchar({ length: 50 }), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type MetricCluster = typeof metricClusters.$inferSelect; +export type MetricClusterToInsert = typeof metricClusters.$inferInsert; + +export const metricPreferences = pgTable( + 'metric_preferences', + { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id), + identifierId: uuid().references(() => identifiers.id), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + index().on(table.profileId), + index().on(table.identifierId), + ], +); + +export type MetricPreference = typeof metricPreferences.$inferSelect; +export type MetricPreferenceToInsert = typeof metricPreferences.$inferInsert; + +export const dataPoints = pgTable('data_points', { + id: uuid().defaultRandom().primaryKey(), + pointId: integer().notNull(), + clusterId: uuid().references(() => metricClusters.id), + identifierId: uuid().references(() => identifiers.id), + pointName: varchar({ length: 255 }).notNull(), + description: text(), + dataType: varchar({ length: 50 }).default('standard').notNull(), + isParent: boolean().default(false).notNull(), + measurementUnit: varchar({ length: 50 }), + baseRate: decimal({ precision: 10, scale: 2 }), + baseCentRate: integer().generatedAlwaysAs((): SQL => sql`${dataPoints.baseRate} * 100`), + facilityId: uuid().references(() => facilities.id).notNull(), + isActive: boolean().default(true).notNull(), + visualType: varchar({ length: 50 }).default('numeric-trend'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [index().on(table.clusterId, table.facilityId)]); + +export type DataPoint = typeof dataPoints.$inferSelect; +export type DataPointToInsert = typeof dataPoints.$inferInsert; + +export const dataPointRelationships = pgTable( + 'data_point_relationships', + { + parentId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), + childId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), + displayOrder: integer(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + primaryKey({ columns: [table.parentId, table.childId] }), + index().on(table.childId), + ], +); + +export type DataPointRelationship = typeof dataPointRelationships.$inferSelect; +export type DataPointRelationshipToInsert = typeof dataPointRelationships.$inferInsert; + +export const packageClusters = pgTable('package_clusters', { + id: uuid().defaultRandom().primaryKey(), + packageName: varchar({ length: 255 }).notNull(), + slug: varchar({ length: 255 }).notNull().unique(), + description: text(), + partnerId: text().references(() => partners.partnerId, { + onDelete: 'set null', + }), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type PackageCluster = typeof packageClusters.$inferSelect; +export type PackageClusterToInsert = typeof packageClusters.$inferInsert; + +export const servicePackages = pgTable('service_packages', { + id: uuid().defaultRandom().primaryKey(), + clusterId: uuid().references(() => packageClusters.id).notNull(), + packageTitle: varchar({ length: 255 }), + description: text(), + serviceRef: varchar({ length: 100 }).notNull().unique(), + baseRate: decimal({ precision: 10, scale: 2 }).notNull(), + baseCentRate: integer().generatedAlwaysAs((): SQL => sql`${servicePackages.baseRate} * 100`), + discountRate: decimal({ precision: 10, scale: 2 }), + discountCentRate: integer().generatedAlwaysAs((): SQL => sql`${servicePackages.discountRate} * 100`), + facilityId: uuid().references(() => facilities.id).notNull(), + isPartnerCreated: boolean().default(false).notNull(), + allowsRemoteCollection: boolean().default(false).notNull(), + partnerId: text().references(() => partners.partnerId), + isActive: boolean().default(true).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.clusterId, table.facilityId), +]); + +export type ServicePackage = typeof servicePackages.$inferSelect; +export type ServicePackageToInsert = typeof servicePackages.$inferInsert; + +export const servicePackageDataPoints = pgTable('service_package_data_points', { + packageId: uuid().references(() => servicePackages.id, { onDelete: 'cascade' }).notNull(), + dataPointId: uuid().references(() => dataPoints.id, { onDelete: 'cascade' }).notNull(), + displayOrder: integer(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [primaryKey({ columns: [table.packageId, table.dataPointId] })]); + +export type ServicePackageDataPoint = typeof servicePackageDataPoints.$inferSelect; +export type ServicePackageDataPointToInsert = typeof servicePackageDataPoints.$inferInsert; + +export const collectionEvents = pgTable('collection_events', { + id: uuid().defaultRandom().primaryKey(), + requestId: uuid().references(() => requests.id, { + onDelete: 'cascade', + }), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }), + facilityId: uuid().references(() => facilities.id), + collectionDate: timestamp({ withTimezone: true }), + reportDate: timestamp({ withTimezone: true }), + receivedDate: timestamp({ withTimezone: true }), + eventStatus: varchar({ length: 50 }).default('initiated'), + dataSource: varchar({ length: 50 }).default(''), + specimenRef: varchar({ length: 100 }), + eventMetadata: jsonb(), + documentUrl: varchar({ length: 255 }), + hasNewData: boolean().notNull().default(false), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}); + +export type CollectionEvent = typeof collectionEvents.$inferSelect; +export type CollectionEventToInsert = typeof collectionEvents.$inferInsert; + +export const measurements = pgTable( + 'measurements', + { + id: uuid().defaultRandom().primaryKey(), + measurementName: varchar(), + slug: varchar(), + eventId: uuid().references(() => collectionEvents.id, { + onDelete: 'cascade', + }), + profileId: uuid().references(() => profiles.id), + dataPointId: uuid().references(() => dataPoints.id), + identifierId: uuid().references(() => identifiers.id), + resultValue: text(), + numericResult: decimal({ precision: 10, scale: 2 }), + rawResult: varchar({ length: 50 }), + measurementUnit: varchar({ length: 50 }), + facilityInterpretation: varchar({ length: 50 }), + facilityMinRange: decimal({ precision: 10, scale: 2 }), + facilityMaxRange: decimal({ precision: 10, scale: 2 }), + systemNotes: text(), + profileNotes: text(), + profileActions: jsonb(), + measurementMetadata: jsonb(), + processingStatus: varchar({ length: 50 }).default('partial_data'), + recordedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + isNotified: boolean().default(false), + isArchived: boolean().default(false), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + }, + (table) => [ + index().on(table.eventId), + index().on(table.identifierId), + index().on(table.dataPointId), + ], +); + +export type Measurement = typeof measurements.$inferSelect; +export type MeasurementToInsert = typeof measurements.$inferInsert; + +export const partners = pgTable('partners', { + id: uuid().defaultRandom().primaryKey(), + partnerId: text().notNull().unique(), + slug: varchar({ length: 255 }).unique(), + promoCode: varchar(), + referralCode: varchar(), + partnerFirstName: varchar({ length: 255 }).notNull(), + partnerLastName: varchar({ length: 255 }).notNull(), + displayName: varchar({ length: 255 }), + description: text(), + logoUrl: varchar({ length: 255 }), + isActive: boolean().default(true), + partnerMetadata: jsonb(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (partners) => [ + index().on(partners.promoCode), + index().on(partners.partnerId), +]); + +export type Partner = typeof partners.$inferSelect; + +export const partnerRelationships = pgTable('partner_relationships', { + parentPartnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + childPartnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + primaryKey({ columns: [table.parentPartnerId, table.childPartnerId] }), + index().on(table.childPartnerId), +]); + +export type RequestStatus = + | 'pending' + | 'processed' + | 'processing_failed' + | 'service_creation_failed' + | 'service_results_failed' + | 'refund_pending' + | 'refunded' + | 'refund_failed' + | 'processing_cancellation' + | 'received.standard.ordered' + | 'received.standard.document_created' + | 'sample_processing.standard.partial_data' + | 'collecting_sample.standard.appointment_scheduled' + | 'completed.standard.completed' + | 'failed.standard.sample_error' + | 'cancelled.standard.cancelled' + | 'received.remote.ordered' + | 'received.remote.document_created' + | 'collecting_sample.remote.appointment_scheduled' + | 'sample_processing.remote.partial_data' + | 'completed.remote.completed' + | 'cancelled.remote.cancelled'; + +export const serviceRequestStatuses: RequestStatus[] = [ + 'service_results_failed', + 'received.standard.ordered', + 'received.standard.document_created', + 'sample_processing.standard.partial_data', + 'completed.standard.completed', + 'failed.standard.sample_error', + 'cancelled.standard.cancelled', + 'received.remote.ordered', + 'received.remote.document_created', + 'collecting_sample.remote.appointment_scheduled', + 'sample_processing.remote.partial_data', + 'completed.remote.completed', + 'cancelled.remote.cancelled', +]; + +export interface Location { + primaryAddress: string; + secondaryAddress?: string; + locality: string; + region: string; + postalCode: string; + territory: string; +} + +export type RequestType = 'standard' | 'remote'; + +export const requests = pgTable('requests', { + id: uuid().defaultRandom().primaryKey(), + requestNumber: integer().notNull(), + serviceRequestId: uuid(), + totalAmount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${requests.totalAmount} * 100`), + requestStatus: varchar({ length: 100 }).$type().notNull(), + promoCode: varchar(), + referralCode: varchar(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + facilityId: uuid().references(() => facilities.id, { onDelete: 'set null' }), + receiptUrl: varchar({ length: 255 }), + itemCount: integer().notNull(), + requestMetadata: jsonb(), + requestType: varchar().$type().default('standard').notNull(), + location: jsonb().$type(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.requestNumber), + index().on(table.requestStatus), + index().on(table.serviceRequestId), + index().on(table.promoCode), + index().on(table.referralCode), + index().on(table.requestType), +]); + +export type Request = typeof requests.$inferSelect; +export type RequestToInsert = typeof requests.$inferInsert; + +export const requestsToDataPoints = pgTable('requests_to_data_points', { + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + dataPointId: uuid().references(() => dataPoints.id, { onDelete: 'cascade' }).notNull(), + itemRate: decimal({ precision: 10, scale: 2 }).notNull(), + centRate: integer().generatedAlwaysAs((): SQL => sql`${requestsToDataPoints.itemRate} * 100`), +}, (table) => [index().on(table.requestId), index().on(table.dataPointId)]); + +export type RequestToDataPoint = typeof requestsToDataPoints.$inferSelect; +export type RequestToDataPointToInsert = typeof requestsToDataPoints.$inferInsert; + +export const requestsToServicePackages = pgTable('requests_to_service_packages', { + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + servicePackageId: uuid().references(() => servicePackages.id, { onDelete: 'cascade' }).notNull(), + packageRate: decimal({ precision: 10, scale: 2 }).notNull(), + centRate: integer().generatedAlwaysAs((): SQL => sql`${requestsToServicePackages.packageRate} * 100`), +}, (table) => [index().on(table.requestId), index().on(table.servicePackageId)]); + +export type RequestToServicePackage = typeof requestsToServicePackages.$inferSelect; +export type RequestToServicePackageToInsert = typeof requestsToServicePackages.$inferInsert; + +export const selections = pgTable('selections', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + facilityId: uuid().references(() => facilities.id), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.facilityId), + uniqueIndex().on(table.id, table.profileId), +]); + +export type Selection = typeof selections.$inferSelect; +export type SelectionToInsert = typeof selections.$inferInsert; + +export const selectionsToDataPoints = pgTable('selections_to_data_points', { + selectionId: uuid() + .references(() => selections.id, { onDelete: 'cascade' }) + .notNull(), + dataPointId: uuid() + .references(() => dataPoints.id, { onDelete: 'cascade' }) + .notNull(), +}, (table) => [ + index().on(table.selectionId), + index().on(table.dataPointId), + uniqueIndex().on(table.selectionId, table.dataPointId), +]); + +export type SelectionToDataPoint = typeof selectionsToDataPoints.$inferSelect; + +export const selectionsToServicePackages = pgTable('selections_to_service_packages', { + selectionId: uuid() + .references(() => selections.id, { onDelete: 'cascade' }) + .notNull(), + servicePackageId: uuid() + .references(() => servicePackages.id, { onDelete: 'cascade' }) + .notNull(), +}, (table) => [ + index().on(table.selectionId), + index().on(table.servicePackageId), + uniqueIndex().on(table.selectionId, table.servicePackageId), +]); + +export type SelectionToServicePackage = typeof selectionsToServicePackages.$inferSelect; + +export type ProcessorPaymentStatus = 'PENDING' | 'SUCCESS' | 'DECLINE' | 'UNKNOWN'; +export type PaymentProcessor = 'PROCESSOR_A' | 'PROCESSOR_B'; + +export const transactions = pgTable('transactions', { + id: uuid().defaultRandom().primaryKey(), + token: varchar(), + transactionId: varchar().notNull().unique(), + sourceId: varchar(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + requestId: uuid().references(() => requests.id).notNull(), + transactionStatus: varchar({ length: 50 }).notNull(), + amount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${transactions.amount} * 100`), + currency: varchar({ length: 10 }).notNull(), + responseData: jsonb(), + transactionMetadata: jsonb(), + processor: varchar().$type().notNull().default('PROCESSOR_A'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.transactionId, table.processor), + index().on(table.token), + index().on(table.transactionId), + index().on(table.profileId), + index().on(table.requestId), + index().on(table.transactionStatus), +]); + +export type Transaction = typeof transactions.$inferSelect; +export type TransactionToInsert = typeof transactions.$inferInsert; + +export type TransactionEventType = 'transaction.created' | 'transaction.updated'; +export type ProcessorEventType = 'transaction.sale.success' | 'transaction.sale.failure' | 'transaction.sale.unknown'; + +export const transactionEvents = pgTable('transaction_events', { + id: uuid().defaultRandom().primaryKey(), + eventType: varchar({ length: 50 }).$type().notNull(), + eventId: varchar().notNull(), + transactionId: varchar().references(() => transactions.transactionId, { onDelete: 'cascade' }).notNull(), + eventMetadata: jsonb().notNull(), + processor: varchar().$type().notNull().default('PROCESSOR_A'), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.eventId), + index().on(table.eventType), + index().on(table.transactionId), +]); + +export type TransactionEvent = typeof transactionEvents.$inferSelect; +export type TransactionEventToInsert = typeof transactionEvents.$inferInsert; + +export const serviceEvents = pgTable('service_events', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + serviceUserId: varchar().notNull(), + requestId: uuid().references(() => requests.id, { onDelete: 'cascade' }).notNull(), + serviceRequestId: varchar().notNull(), + eventType: varchar().notNull(), + eventId: integer().notNull(), + appointmentEventId: varchar(), + eventStatus: varchar().notNull(), + appointmentStatus: varchar(), + eventMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (serviceEvents) => [ + index().on(serviceEvents.profileId), + index().on(serviceEvents.serviceUserId), + index().on(serviceEvents.requestId), + index().on(serviceEvents.serviceRequestId), + index().on(serviceEvents.eventId), + index().on(serviceEvents.eventType), + index().on(serviceEvents.eventStatus), +]); + +export type ServiceEvent = typeof serviceEvents.$inferSelect; +export type ServiceEventToInsert = typeof serviceEvents.$inferInsert; + +export type PartnerSubscriptionType = 'promo' | 'referral' | 'custom_package'; + +export const partnerSubscriptions = pgTable('partner_subscriptions', { + id: uuid().defaultRandom().primaryKey(), + partnerId: uuid().references(() => partners.id, { onDelete: 'cascade' }).notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + promoCode: varchar(), + referralCode: varchar(), + subscriptionType: varchar().$type().notNull(), + expiredAt: timestamp({ withTimezone: true }).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (partnerSubscriptions) => [ + uniqueIndex().on(partnerSubscriptions.profileId, partnerSubscriptions.partnerId), + index().on(partnerSubscriptions.profileId), + index().on(partnerSubscriptions.partnerId), + index().on(partnerSubscriptions.promoCode), + index().on(partnerSubscriptions.referralCode), + index().on(partnerSubscriptions.subscriptionType), + index().on(partnerSubscriptions.expiredAt), +]); + +export type PartnerSubscription = typeof partnerSubscriptions.$inferSelect; +export type PartnerSubscriptionToInsert = typeof partnerSubscriptions.$inferInsert; + +export const reversals = pgTable('reversals', { + id: uuid().defaultRandom().primaryKey(), + token: varchar().notNull(), + transactionId: uuid().notNull().references(() => transactions.id), + reversalId: varchar().notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + requestId: uuid().references(() => requests.id).notNull(), + reversalStatus: varchar({ length: 50 }).notNull(), + amount: decimal({ precision: 10, scale: 2 }).notNull(), + centAmount: integer().generatedAlwaysAs((): SQL => sql`${reversals.amount} * 100`), + currency: varchar({ length: 10 }).notNull(), + reversalMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), + updatedAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.token), + index().on(table.transactionId), + index().on(table.profileId), + index().on(table.requestId), + index().on(table.reversalStatus), + index().on(table.reversalId), +]); + +export type Reversal = typeof reversals.$inferSelect; +export type ReversalToInsert = typeof reversals.$inferInsert; + +export type ReversalEventType = 'reversal.created' | 'reversal.updated'; + +export const reversalEvents = pgTable('reversal_events', { + id: uuid().defaultRandom().primaryKey(), + eventType: varchar({ length: 50 }).$type().notNull(), + eventId: varchar().notNull(), + reversalId: uuid().references(() => reversals.id, { onDelete: 'cascade' }).notNull(), + transactionId: uuid().references(() => transactions.id, { onDelete: 'cascade' }).notNull(), + eventMetadata: jsonb().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex().on(table.eventId), + index().on(table.eventType), + index().on(table.transactionId), + index().on(table.reversalId), +]); + +export type ReversalEvent = typeof reversalEvents.$inferSelect; +export type ReversalEventToInsert = typeof reversalEvents.$inferInsert; + +export const schedules = pgTable('schedules', { + id: uuid().defaultRandom().primaryKey(), + profileId: uuid() + .references(() => profiles.id, { onDelete: 'cascade' }) + .notNull(), + scheduleTitle: varchar({ length: 255 }).notNull(), + description: text(), + startDate: timestamp({ withTimezone: true }).notNull(), + endDate: timestamp({ withTimezone: true }), + isCurrent: boolean().default(false).notNull(), + themeColor: varchar({ length: 50 }).notNull(), + isPrivate: boolean().default(false).notNull(), + applyToAllCharts: boolean().default(false).notNull(), + isVisible: boolean().default(true).notNull(), + isArchived: boolean().default(false).notNull(), + profileActions: jsonb(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), + updatedAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}, (table) => [ + index().on(table.profileId), + index().on(table.startDate, table.endDate), +]); + +export type Schedule = typeof schedules.$inferSelect; +export type ScheduleToInsert = typeof schedules.$inferInsert; + +export const schedulesToIdentifiers = pgTable('schedules_to_identifiers', { + scheduleId: uuid() + .references(() => schedules.id, { + onDelete: 'cascade', + }) + .notNull(), + identifierId: uuid() + .references(() => identifiers.id, { + onDelete: 'cascade', + }) + .notNull(), + createdAt: timestamp({ withTimezone: true }) + .defaultNow() + .notNull(), +}, (table) => [ + primaryKey({ columns: [table.scheduleId, table.identifierId] }), + index().on(table.identifierId), +]); + +export type ScheduleToIdentifier = typeof schedulesToIdentifiers.$inferSelect; +export type ScheduleToIdentifierToInsert = typeof schedulesToIdentifiers.$inferInsert; + +export const scheduleShares = pgTable('schedule_shares', { + id: uuid().defaultRandom().primaryKey(), + shareToken: text().notNull().unique(), + scheduleId: uuid().references(() => schedules.id, { onDelete: 'cascade' }).notNull(), + profileId: uuid().references(() => profiles.id, { onDelete: 'cascade' }).notNull(), + accessCount: integer().default(0).notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (table) => [ + index().on(table.shareToken), + index().on(table.scheduleId), + index().on(table.profileId), +]); + +export type ScheduleShare = typeof scheduleShares.$inferSelect; +export type ScheduleShareToInsert = typeof scheduleShares.$inferInsert; + +export const processingProviders = pgTable('processing_providers', { + id: uuid().defaultRandom().primaryKey(), + processor: varchar().$type().notNull(), + isActive: boolean().notNull(), + createdAt: timestamp({ withTimezone: true }).defaultNow().notNull(), +}, (processingProviders) => [ + index().on(processingProviders.processor), + index().on(processingProviders.isActive), +]); + +export type ProcessingProvider = typeof processingProviders.$inferSelect; diff --git a/drizzle-kit/tests/postgres/snapshots/schema03.ts b/drizzle-kit/tests/postgres/snapshots/schema03.ts new file mode 100644 index 0000000000..ec7100812b --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema03.ts @@ -0,0 +1,1138 @@ +import { eq, sql } from 'orm044'; +import { + AnyPgColumn, + bigint, + bigserial, + boolean, + char, + check, + decimal, + doublePrecision, + foreignKey, + index, + inet, + integer, + interval, + jsonb, + numeric, + pgEnum, + pgPolicy, + pgSchema, + pgSequence, + pgTable, + primaryKey, + serial, + smallint, + text, + timestamp, + unique, + uniqueIndex, + uuid, +} from 'orm044/pg-core'; + +// generated with AI and updated manually in some places + +export const core = pgSchema('core'); +export const analytics = pgSchema('analytics'); +export const billing = pgSchema('billing'); +export const monitoring = pgSchema('monitoring'); +export const alertAction = pgEnum('alert_action', ['email', 'pagerd/ut"\'y', 'slack', 'webhook']); +export const currencyCode = pgEnum('currency_code', ['USD', 'EUR', 'GBP', 'UAH', 'JPY']); +export const datasetVisibility = pgEnum('dataset_visibility', ['priv"ate', 'team', 'public']); +export const env = pgEnum('env', ['dev', 'staging', 'prod']); +export const featureState = pgEnum('feature_state', ['enabled', 'disabled', 'gradual']); +export const invoiceStatus = pgEnum('invoice_status', ['draft', "iss'ued", 'paid', 'voided', 'failed']); +export const jobState = pgEnum('job_state', ['queued', 'running', 'success', 'failed', 'cancelled']); +export const notificationChannel = pgEnum('notification_channel', ['email', 'sms', 'in_app', 'webhook']); +export const paymentMethod = pgEnum('payment_method', ['card', 'bank_transfer', 'paypal', 'crypto']); +export const pipelineStatus = pgEnum('pipeline_status', ['created', 'running', 'paused', 'completed', 'errored']); +export const roleKind = pgEnum('role_kind', ['system', 'custom']); +export const ruleConditionOperator = pgEnum('rule_condition_operator', [ + 'eq', + 'neq', + 'gt', + 'lt', + 'gte', + 'lte', + 'in', + 'nin', +]); +export const severityLevel = pgEnum('severity_level', ['low', 'medium', 'high', 'critical']); +export const userStatus = pgEnum('user_status', ['active', 'inactive', 'suspended', 'pending']); + +export const seqOrgCode = pgSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', + cycle: false, +}); + +export const organizationsInCore = core.table('organizations', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: bigint({ mode: 'number' }).default(sql`nextval('seq_org_code'::regclass)`).notNull(), + name: text().notNull(), + domain: text(), + currency: currencyCode().default('EUR').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_org_name_idx').using('btree', table.name.asc().nullsLast().op('text_ops')), + index('organizations_code_idx').using('btree', table.code.asc().nullsLast().op('int8_ops')), + unique('organizations_domain_key').on(table.domain), + check('organizations_name_check', sql`char_length(name) > 1`), +]); + +export const usersInCore = core.table('users', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + username: text().notNull(), + status: userStatus().default('pending').notNull(), + locale: text().default('en-US').notNull(), + lastLogin: timestamp('last_login', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + bio: text().$onUpdate(() => sql`bio || 'some test'`), + profile: jsonb(), +}, (table) => [ + index('core_users_username_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.username.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'users_organization_id_fkey', + }).onDelete('cascade'), + unique('users_org_username_unique').on(table.organizationId, table.username), +]); + +export const rolesInCore = core.table('roles', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull().references(() => organizationsInCore.id, { onDelete: 'cascade' }), + name: text().notNull(), + kind: roleKind().default('custom').notNull(), + builtin: boolean().default(false).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + unique('roles_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const permissionsInCore = core.table('permissions', { + id: serial().primaryKey().notNull(), + code: text().notNull().unique(), + description: text(), +}); + +export const membershipsInCore = core.table('memberships', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + roleId: integer('role_id').notNull(), + organizationId: uuid('organization_id').notNull(), + joinedAt: timestamp('joined_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'memberships_user_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'memberships_role_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'memberships_organization_id_fkey', + }).onDelete('cascade'), + unique('unique_membership').on(table.userId, table.organizationId), +]); + +export const apiKeysInCore = core.table('api_keys', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + userId: uuid('user_id'), + name: text().notNull(), + keyHash: text('key_hash').notNull(), + revoked: boolean().default(false).notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb().generatedAlwaysAs(sql`'{"some":"test"}'`), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_apikey_org_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(revoked = false)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'api_keys_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'api_keys_user_id_fkey', + }).onDelete('set null'), + unique('api_keys_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const sessionsInCore = core.table('sessions', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + ip: inet(), + userAgent: text('user_agent'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }).notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + index('core_sessions_user_expires').using( + 'btree', + table.userId.asc().nullsLast(), + table.expiresAt.asc().nullsLast().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'sessions_user_id_fkey', + }).onDelete('cascade'), +]); + +export const oauthProvidersInCore = core.table('oauth_providers', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + provider: text().notNull(), + clientId: text('client_id').notNull(), + clientSecret: text('client_secret').notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'oauth_providers_organization_id_fkey', + }).onDelete('cascade'), + unique('oauth_providers_organization_id_provider_key').on(table.organizationId, table.provider), +]); + +export const featureFlagsInCore = core.table('feature_flags', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + state: featureState().default('disabled').notNull(), + rolloutPercent: smallint('rollout_percent').default(0), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'feature_flags_organization_id_fkey', + }).onDelete('cascade'), + unique('feature_flags_organization_id_key_key').on(table.organizationId, table.key), + check('feature_flags_rollout_percent_check', sql`(rollout_percent >= 0) AND (rollout_percent <= 100)`), +]); + +export const projectsInCore = core.table('projects', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + slug: text().notNull(), + description: text(), + visibility: datasetVisibility().default('priv"ate').notNull(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_projects_org_name_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.name.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'projects_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'projects_created_by_fkey', + }), + unique('projects_org_slug_unique').on(table.organizationId, table.slug), +]); + +export const repositoriesInCore = core.table('repositories', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + provider: text().notNull(), + repoOwner: text('repo_owner').notNull(), + repoName: text('repo_name').notNull(), + defaultBranch: text('default_branch').default('main').notNull(), + cloneUrl: text('clone_url'), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'repositories_project_id_fkey', + }).onDelete('cascade'), + unique('repositories_project_id_provider_repo_owner_repo_name_key').on( + table.projectId, + table.provider, + table.repoOwner, + table.repoName, + ), +]); + +export const buildsInCore = core.table('builds', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + triggeredBy: uuid('triggered_by'), + commitSha: char('commit_sha', { length: 40 }).notNull(), + status: pipelineStatus().default('created').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + index('core_builds_project_status_idx').using( + 'btree', + table.projectId.asc().nullsLast().op('uuid_ops'), + table.status.asc().nullsLast(), + ), + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'builds_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.triggeredBy], + foreignColumns: [usersInCore.id], + name: 'builds_triggered_by_fkey', + }), + unique('builds_project_id_commit_sha_key').on(table.projectId, table.commitSha), +]); + +export const pipelinesInCore = core.table('pipelines', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + name: text().notNull(), + spec: jsonb().notNull(), + status: pipelineStatus().default('created').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'pipelines_project_id_fkey', + }).onDelete('cascade'), + unique('pipelines_project_id_name_key').on(table.projectId, table.name), +]); + +export const pipelineRunsInAnalytics = analytics.table('pipeline_runs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineId: uuid('pipeline_id').notNull(), + + runNumber: bigint('run_number', { mode: 'number' }).notNull(), + state: jobState().default('queued').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + logs: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_pipeline_runs_state_idx').using('btree', table.state.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.pipelineId], + foreignColumns: [pipelinesInCore.id], + name: 'pipeline_runs_pipeline_id_fkey', + }).onDelete('cascade'), + unique('pipeline_runs_unique_run').on(table.pipelineId, table.runNumber), +]); + +export const jobsInAnalytics = analytics.table('jobs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineRunId: uuid('pipeline_run_id'), + name: text().notNull(), + state: jobState().default('queued').notNull(), + attempts: integer().default(0).notNull(), + lastError: text('last_error'), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_jobs_state_attempts_idx').using( + 'btree', + table.state.asc().nullsLast(), + table.attempts.asc().nullsLast().op('int4_ops'), + ), + foreignKey({ + columns: [table.pipelineRunId], + foreignColumns: [pipelineRunsInAnalytics.id], + name: 'jobs_pipeline_run_id_fkey', + }).onDelete('cascade'), +]); + +export const storageBucketsInCore = core.table('storage_buckets', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + region: text().notNull(), + config: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'storage_buckets_organization_id_fkey', + }).onDelete('cascade'), + unique('storage_buckets_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const objectsInCore = core.table('objects', { + id: uuid().defaultRandom().primaryKey().notNull(), + bucketId: uuid('bucket_id').notNull(), + path: text().notNull(), + + size: bigint({ mode: 'number' }).default(0).notNull(), + contentType: text('content_type'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_objects_bucket_path_gin').using('gin', table.metadata.asc().nullsLast().op('jsonb_ops')), + foreignKey({ + columns: [table.bucketId], + foreignColumns: [storageBucketsInCore.id], + name: 'objects_bucket_id_fkey', + }).onDelete('cascade'), + unique('objects_bucket_id_path_key').on(table.bucketId, table.path), +]); + +export const filesInCore = core.table('files', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + name: text().notNull(), + latestObjectId: uuid('latest_object_id'), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'files_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.latestObjectId], + foreignColumns: [objectsInCore.id], + name: 'files_latest_object_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'files_created_by_fkey', + }), + unique('files_project_id_name_key').on(table.projectId, table.name), +]); + +export const fileVersionsInCore = core.table('file_versions', { + id: uuid().defaultRandom().primaryKey().notNull(), + fileId: uuid('file_id').notNull(), + objectId: uuid('object_id').notNull(), + versionNumber: integer('version_number').notNull(), + checksum: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.fileId], + foreignColumns: [filesInCore.id], + name: 'file_versions_file_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.objectId], + foreignColumns: [objectsInCore.id], + name: 'file_versions_object_id_fkey', + }).onDelete('cascade'), + unique('file_versions_file_id_version_number_key').on(table.fileId, table.versionNumber), +]); + +export const tagsInCore = core.table('tags', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + value: text(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'tags_organization_id_fkey', + }).onDelete('cascade'), + unique('tags_organization_id_key_value_key').on(table.organizationId, table.key, table.value), +]); + +export const conversationsInCore = core.table('conversations', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + title: text(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'conversations_project_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'conversations_created_by_fkey', + }), +]); + +export const chatMessagesInCore = core.table('chat_messages', { + id: uuid().defaultRandom().primaryKey().notNull(), + conversationId: uuid('conversation_id').notNull(), + senderId: uuid('sender_id'), + body: text().notNull(), + attachments: jsonb(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + editedAt: timestamp('edited_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + index('core_chat_conv_sent_at_idx').using( + 'btree', + table.conversationId.asc().nullsLast(), + table.sentAt.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.conversationId], + foreignColumns: [conversationsInCore.id], + name: 'chat_messages_conversation_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.senderId], + foreignColumns: [usersInCore.id], + name: 'chat_messages_sender_id_fkey', + }).onDelete('set null'), +]); + +export const notificationsInCore = core.table('notifications', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + channel: notificationChannel().default('in_app').notNull(), + payload: jsonb().notNull(), + seen: boolean().default(false).notNull(), + deliveredAt: timestamp('delivered_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_notifications_unseen_idx').using('btree', table.userId.asc().nullsLast().op('uuid_ops')).where( + sql`(seen = false)`, + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'notifications_user_id_fkey', + }).onDelete('cascade'), +]); + +export const customersInBilling = billing.table('customers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + name: text().notNull(), + address: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'customers_organization_id_fkey', + }).onDelete('cascade'), + unique('customers_organization_id_key').on(table.organizationId), + unique('idnameunique').on(table.id, table.name), +]); + +export const subscriptionsInBilling = billing.table('subscriptions', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + plan: text().notNull(), + status: text().default('active').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + endedAt: timestamp('ended_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'subscriptions_customer_id_fkey', + }).onDelete('cascade'), +]); + +export const paymentsInBilling = billing.table('payments', { + id: uuid().defaultRandom().primaryKey().notNull(), + invoiceId: uuid('invoice_id').notNull(), + paidAt: timestamp('paid_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + amount: numeric({ precision: 12, scale: 2 }).notNull(), + amount2: decimal({ precision: 12, scale: 2 }).notNull(), + method: paymentMethod().notNull(), + transactionRef: text('transaction_ref'), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.invoiceId], + foreignColumns: [invoicesInBilling.id], + name: 'payments_invoice_id_fkey', + }).onDelete('cascade'), +]); + +export const couponsInBilling = billing.table('coupons', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: text().notNull(), + description: text(), + discountPercent: smallint('discount_percent'), + redeemableFrom: timestamp('redeemable_from', { withTimezone: true, mode: 'string' }), + redeemableTo: timestamp('redeemable_to', { withTimezone: true, mode: 'string' }), + maxRedemptions: integer('max_redemptions').generatedAlwaysAsIdentity(), + metadata: jsonb(), +}, (table) => [ + unique('coupons_code_key').on(table.code), + check('coupons_discount_percent_check', sql`(discount_percent >= 0) AND (discount_percent <= 100)`), +]); + +export const webhooksInCore = core.table('webhooks', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + url: text().notNull(), + secret: text(), + events: text().array().notNull(), + active: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_webhooks_org_active_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(active = true)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'webhooks_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const metricSourcesInAnalytics = analytics.table('metric_sources', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'metric_sources_organization_id_fkey', + }).onDelete('cascade'), + unique('metric_sources_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const metricsInAnalytics = analytics.table('metrics', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + sourceId: uuid('source_id').notNull(), + metricKey: text('metric_key').notNull(), + ts: timestamp({ withTimezone: true, mode: 'string' }).notNull(), + value: doublePrecision().notNull(), + tags: jsonb(), +}, (table) => [ + index('analytics_metrics_key_ts_idx').using( + 'btree', + table.metricKey.asc().nullsLast().op('text_ops'), + table.ts.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.sourceId], + foreignColumns: [metricSourcesInAnalytics.id], + name: 'metrics_source_id_fkey', + }).onDelete('cascade'), + unique('metrics_source_id_metric_key_ts_key').on(table.sourceId, table.metricKey, table.ts), +]); + +export const alertRulesInMonitoring = monitoring.table('alert_rules', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + description: text(), + severity: severityLevel().default('medium').notNull(), + enabled: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'alert_rules_organization_id_fkey', + }).onDelete('cascade'), + unique('alert_rules_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const ruleConditionsInMonitoring = monitoring.table('rule_conditions', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + metricKey: text('metric_key').notNull(), + operator: ruleConditionOperator().notNull().unique('some_name', { nulls: 'not distinct' }), + threshold: doublePrecision().notNull(), + window: interval().default('00:05:00').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'rule_conditions_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const alertsInMonitoring = monitoring.table('alerts', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + triggeredAt: timestamp('triggered_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + resolvedAt: timestamp('resolved_at', { withTimezone: true, mode: 'string' }), + payload: jsonb(), + state: text().default('firing').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'alerts_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const escalationsInMonitoring = monitoring.table('escalations', { + id: uuid().defaultRandom().primaryKey().notNull(), + alertId: uuid('alert_id').notNull(), + action: alertAction().notNull(), + target: text().notNull(), + executedAt: timestamp('executed_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + foreignKey({ + columns: [table.alertId], + foreignColumns: [alertsInMonitoring.id], + name: 'escalations_alert_id_fkey', + }).onDelete('cascade'), +]); + +export const ssoProvidersInCore = core.table('sso_providers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + config: jsonb().notNull(), + enabled: boolean().default(false).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'sso_providers_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const auditLogsInCore = core.table('audit_logs', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + organizationId: uuid('organization_id'), + actorId: uuid('actor_id'), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').array().array().array(), + action: text().notNull(), + beforeState: jsonb('before_state'), + afterState: jsonb('after_state'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_audit_org_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.createdAt.desc().nullsFirst().op('timestamptz_ops'), + ), +]); + +export const rateLimitsInCore = core.table('rate_limits', { + id: uuid().defaultRandom().primaryKey().notNull(), + apiKeyId: uuid('api_key_id').notNull(), + windowStart: timestamp('window_start', { withTimezone: true, mode: 'string' }).notNull(), + requests: integer().generatedByDefaultAsIdentity().notNull().array(), + limit: integer().generatedAlwaysAs(() => sql`1`).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.apiKeyId], + foreignColumns: [apiKeysInCore.id], + name: 'rate_limits_api_key_id_fkey', + }).onDelete('cascade'), + unique('rate_limits_api_key_id_window_start_key').on(table.apiKeyId, table.windowStart).nullsNotDistinct(), +]); + +export const experimentsInCore = core.table('experiments', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'experiments_organization_id_fkey', + }).onDelete('cascade'), + unique('experiments_organization_id_key_key').on(table.organizationId, table.key), +]); + +export const experimentVariantsInCore = core.table('experiment_variants', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + name: text().notNull(), + allocationPercent: smallint('allocation_percent').default(0).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_variants_experiment_id_fkey', + }).onDelete('cascade'), + unique('experiment_variants_experiment_id_name_key').on(table.experimentId, table.name), + check('experiment_variants_allocation_percent_check', sql`(allocation_percent >= 0) AND (allocation_percent <= 100)`), +]); + +export const experimentAssignmentsInCore = core.table('experiment_assignments', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + variantId: uuid('variant_id').notNull(), + userId: uuid('user_id').notNull(), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_assignments_experiment_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.variantId], + foreignColumns: [experimentVariantsInCore.id], + name: 'experiment_assignments_variant_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'experiment_assignments_user_id_fkey', + }).onDelete('cascade'), + unique('experiment_assignments_experiment_id_user_id_key').on(table.experimentId, table.userId), +]); + +export const deploymentsInCore = core.table('deployments', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + environment: env().default('dev').notNull(), + version: text().notNull(), + deployedBy: uuid('deployed_by'), + deployedAt: timestamp('deployed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + notes: text(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'deployments_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.deployedBy], + foreignColumns: [usersInCore.id], + name: 'deployments_deployed_by_fkey', + }), + unique('deployments_project_id_environment_version_key').on(table.projectId, table.environment, table.version), +]); + +export const servicesInCore = core.table('services', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + kind: text(), + ownerId: uuid('owner_id'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string', precision: 6 }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'services_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.ownerId], + foreignColumns: [usersInCore.id], + name: 'services_owner_id_fkey', + }), + unique('services_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const locksInCore = core.table('locks', { + name: text().primaryKey().notNull(), + owner: text(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string', precision: 2 }), +}); + +export const entitiesInCore = core.table('entities', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + data: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'entities_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const taskQueueInAnalytics = analytics.table('task_queue', { + id: uuid().defaultRandom().primaryKey().notNull(), + queueName: text('queue_name').default('default').notNull(), + payload: jsonb().notNull(), + priority: smallint().default(100).notNull(), + reserved: boolean().default(false).notNull(), + reservedUntil: timestamp('reserved_until', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex('analytics_task_queue_unique_unreserved').using( + 'btree', + sql`queue_name`, + sql`((payload ->> 'task_type'::text))`, + ).where(sql`(reserved = false)`), +]); + +export const invoicesInBilling = billing.table('invoices', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + number: text().notNull(), + issuedAt: timestamp('issued_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + dueAt: timestamp('due_at', { withTimezone: true, mode: 'string' }), + totalAmount: numeric('total_amount', { precision: 12, scale: 2 }).default('0.0').notNull(), + currency: currencyCode().default('USD').notNull(), + status: invoiceStatus().default('draft').notNull(), + notes: text(), +}, (table) => [ + index('billing_invoices_status_idx').using('btree', table.status.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.customerId, table.number], + foreignColumns: [customersInBilling.id, customersInBilling.name], + name: 'invoices_customer_id_fkey', + }).onDelete('cascade'), + unique('invoices_customer_id_number_key').on(table.customerId, table.number), + check('invoices_total_nonnegative', sql`total_amount >= (0)::numeric`), +]); + +export const aliasesInCore = core.table('aliases', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + alias: text().notNull().unique('unique_with_name'), + organizationId: uuid('organization_id'), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'aliases_organization_id_fkey', + }).onUpdate('cascade'), + unique('aliases_object_type_object_id_alias_key').on(table.objectType, table.objectId, table.alias), +]); + +export const selfRef = core.table('self_ref', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull().unique().references((): AnyPgColumn => selfRef.organizationId), + organizationId: text('organization_id').notNull().unique(), +}); + +export const couponRedemptionsInBilling = billing.table('coupon_redemptions', { + couponId: uuid('coupon_id').notNull(), + customerId: uuid('customer_id').notNull(), + redeemedAt: timestamp('redeemed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.couponId], + foreignColumns: [couponsInBilling.id], + name: 'coupon_redemptions_coupon_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'coupon_redemptions_customer_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.couponId, table.customerId], name: 'coupon_redemptions_pkey' }), +]); + +export const entityLinksInCore = core.table('entity_links', { + parentEntityId: uuid('parent_entity_id').notNull(), + childEntityId: uuid('child_entity_id').notNull(), + relationship: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.parentEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_parent_entity_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.childEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_child_entity_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.parentEntityId, table.childEntityId, table.relationship], name: 'entity_links_pkey' }), +]); + +export const rolePermissionsInCore = core.table('role_permissions', { + roleId: integer('role_id').notNull(), + permissionId: integer('permission_id').notNull(), + assignedBy: uuid('assigned_by'), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'role_permissions_role_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.permissionId], + foreignColumns: [permissionsInCore.id], + name: 'role_permissions_permission_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.assignedBy], + foreignColumns: [usersInCore.id], + name: 'role_permissions_assigned_by_fkey', + }), + primaryKey({ columns: [table.roleId, table.permissionId], name: 'role_permissions_pkey' }), +]); + +export const taggingsInCore = core.table('taggings', { + tagId: integer('tag_id').notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.tagId], + foreignColumns: [tagsInCore.id], + name: 'taggings_tag_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.tagId, table.objectType, table.objectId], name: 'taggings_pkey' }), +]); + +export const reactionsInCore = core.table('reactions', { + messageId: uuid('message_id').notNull(), + userId: uuid('user_id').notNull(), + reaction: text().notNull().array(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.messageId], + foreignColumns: [chatMessagesInCore.id], + name: 'reactions_message_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'reactions_user_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.messageId, table.userId, table.reaction], name: 'reactions_pkey' }), +]); + +// views +export const projectSearchInAnalytics = analytics.materializedView('project_search', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().as( + sql`SELECT id, name, slug, description FROM core.projects p`, + ); + +export const projectSearchInAnalytics2 = analytics.materializedView('project_search2', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().existing(); + +export const vActiveUsersInCore = core.view('v_active_users').as((qb) => + qb.select({ + id: usersInCore.id, + username: usersInCore.username, + organization_id: usersInCore.organizationId, + }).from(usersInCore).where(eq(usersInCore.status, 'active')) +); +export const vActiveUsersInCore2 = core.view('v_active_users2', {}).existing(); + +// polices +export const rls = pgSchema('rls'); +export const documentsInRls = rls.table('documents', { + docId: uuid('doc_id').defaultRandom().primaryKey().notNull(), + ownerId: uuid('owner_id').notNull(), + title: text().notNull(), + content: text().notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('documents_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(owner_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('documents_update_own', { as: 'permissive', for: 'update', to: ['public'] }), + pgPolicy('documents_select_own', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const messagesInRls = rls.table('messages', { + msgId: uuid('msg_id').defaultRandom().primaryKey().notNull(), + senderId: uuid('sender_id').notNull(), + recipientId: uuid('recipient_id').notNull(), + message: text().notNull(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('messages_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(sender_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('messages_visibility', { as: 'permissive', for: 'select', to: ['public'] }), +]).enableRLS(); + +export const projectsInRls = rls.table('projects', { + projectId: uuid('project_id').defaultRandom().primaryKey().notNull(), + name: text().notNull(), + description: text(), + ownerId: uuid('owner_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('projects_visibility', { + as: 'permissive', + for: 'select', + to: ['public'], + using: sql`((owner_id = (CURRENT_USER)::uuid) OR (project_id IN ( SELECT pm.project_id + FROM rls.project_members pm + WHERE (pm.user_id = (CURRENT_USER)::uuid))))`, + }), +]); + +export const projectMembersInRls = rls.table('project_members', { + projectId: uuid('project_id').notNull(), + userId: uuid('user_id').notNull(), + role: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInRls.projectId], + name: 'project_members_project_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.projectId, table.userId], name: 'project_members_pkey' }), + pgPolicy('project_members_manage', { + as: 'permissive', + for: 'all', + to: ['public'], + using: sql`(project_id IN ( SELECT p.project_id + FROM rls.projects p + WHERE (p.owner_id = (CURRENT_USER)::uuid)))`, + }), + pgPolicy('project_members_visibility', { as: 'permissive', for: 'select', to: ['public'] }), + check('project_members_role_check', sql`role = ANY (ARRAY['member'::text, 'admin'::text])`), +]).enableRLS(); + +export const policy = pgPolicy('new_policy', { + as: 'restrictive', + to: 'postgres', + withCheck: sql`1 = 1`, + for: 'all', +}).link(organizationsInCore); diff --git a/drizzle-kit/tests/postgres/snapshots/schema03new.ts b/drizzle-kit/tests/postgres/snapshots/schema03new.ts new file mode 100644 index 0000000000..3ad9126469 --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema03new.ts @@ -0,0 +1,1138 @@ +import { eq, sql } from 'drizzle-orm'; +import { decimal } from 'drizzle-orm/cockroach-core'; +import { + AnyPgColumn, + bigint, + bigserial, + boolean, + char, + check, + doublePrecision, + foreignKey, + index, + inet, + integer, + interval, + jsonb, + numeric, + pgEnum, + pgPolicy, + pgSchema, + pgSequence, + pgTable, + primaryKey, + serial, + smallint, + text, + timestamp, + unique, + uniqueIndex, + uuid, +} from 'drizzle-orm/pg-core'; + +// generated with AI and updated manually in some places + +export const core = pgSchema('core'); +export const analytics = pgSchema('analytics'); +export const billing = pgSchema('billing'); +export const monitoring = pgSchema('monitoring'); +export const alertAction = pgEnum('alert_action', ['email', 'pagerd/ut"\'y', 'slack', 'webhook']); +export const currencyCode = pgEnum('currency_code', ['USD', 'EUR', 'GBP', 'UAH', 'JPY']); +export const datasetVisibility = pgEnum('dataset_visibility', ['priv"ate', 'team', 'public']); +export const env = pgEnum('env', ['dev', 'staging', 'prod']); +export const featureState = pgEnum('feature_state', ['enabled', 'disabled', 'gradual']); +export const invoiceStatus = pgEnum('invoice_status', ['draft', "iss'ued", 'paid', 'voided', 'failed']); +export const jobState = pgEnum('job_state', ['queued', 'running', 'success', 'failed', 'cancelled']); +export const notificationChannel = pgEnum('notification_channel', ['email', 'sms', 'in_app', 'webhook']); +export const paymentMethod = pgEnum('payment_method', ['card', 'bank_transfer', 'paypal', 'crypto']); +export const pipelineStatus = pgEnum('pipeline_status', ['created', 'running', 'paused', 'completed', 'errored']); +export const roleKind = pgEnum('role_kind', ['system', 'custom']); +export const ruleConditionOperator = pgEnum('rule_condition_operator', [ + 'eq', + 'neq', + 'gt', + 'lt', + 'gte', + 'lte', + 'in', + 'nin', +]); +export const severityLevel = pgEnum('severity_level', ['low', 'medium', 'high', 'critical']); +export const userStatus = pgEnum('user_status', ['active', 'inactive', 'suspended', 'pending']); + +export const seqOrgCode = pgSequence('seq_org_code', { + startWith: '1000', + increment: '1', + minValue: '1', + maxValue: '9223372036854775807', + cache: '1', + cycle: false, +}); + +export const organizationsInCore = core.table('organizations', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: bigint({ mode: 'number' }).default(sql`nextval('seq_org_code'::regclass)`).notNull(), + name: text().notNull(), + domain: text(), + currency: currencyCode().default('EUR').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_org_name_idx').using('btree', table.name.asc().nullsLast().op('text_ops')), + index('organizations_code_idx').using('btree', table.code.asc().nullsLast().op('int8_ops')), + unique('organizations_domain_key').on(table.domain), + check('organizations_name_check', sql`char_length(name) > 1`), +]); + +export const usersInCore = core.table('users', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + username: text().notNull(), + status: userStatus().default('pending').notNull(), + locale: text().default('en-US').notNull(), + lastLogin: timestamp('last_login', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + bio: text().$onUpdate(() => sql`bio || 'some test'`), + profile: jsonb(), +}, (table) => [ + index('core_users_username_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.username.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'users_organization_id_fkey', + }).onDelete('cascade'), + unique('users_org_username_unique').on(table.organizationId, table.username), +]); + +export const rolesInCore = core.table('roles', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull().references(() => organizationsInCore.id, { onDelete: 'cascade' }), + name: text().notNull(), + kind: roleKind().default('custom').notNull(), + builtin: boolean().default(false).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + unique('roles_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const permissionsInCore = core.table('permissions', { + id: serial().primaryKey().notNull(), + code: text().notNull().unique(), + description: text(), +}); + +export const membershipsInCore = core.table('memberships', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + roleId: integer('role_id').notNull(), + organizationId: uuid('organization_id').notNull(), + joinedAt: timestamp('joined_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'memberships_user_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'memberships_role_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'memberships_organization_id_fkey', + }).onDelete('cascade'), + unique('unique_membership').on(table.userId, table.organizationId), +]); + +export const apiKeysInCore = core.table('api_keys', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + userId: uuid('user_id'), + name: text().notNull(), + keyHash: text('key_hash').notNull(), + revoked: boolean().default(false).notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb().generatedAlwaysAs(sql`'{"some":"test"}'`), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_apikey_org_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(revoked = false)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'api_keys_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'api_keys_user_id_fkey', + }).onDelete('set null'), + unique('api_keys_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const sessionsInCore = core.table('sessions', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + ip: inet(), + userAgent: text('user_agent'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string' }).notNull(), + active: boolean().default(true).notNull(), +}, (table) => [ + index('core_sessions_user_expires').using( + 'btree', + table.userId.asc().nullsLast(), + table.expiresAt.asc().nullsLast().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'sessions_user_id_fkey', + }).onDelete('cascade'), +]); + +export const oauthProvidersInCore = core.table('oauth_providers', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + provider: text().notNull(), + clientId: text('client_id').notNull(), + clientSecret: text('client_secret').notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'oauth_providers_organization_id_fkey', + }).onDelete('cascade'), + unique('oauth_providers_organization_id_provider_key').on(table.organizationId, table.provider), +]); + +export const featureFlagsInCore = core.table('feature_flags', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + state: featureState().default('disabled').notNull(), + rolloutPercent: smallint('rollout_percent').default(0), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'feature_flags_organization_id_fkey', + }).onDelete('cascade'), + unique('feature_flags_organization_id_key_key').on(table.organizationId, table.key), + check('feature_flags_rollout_percent_check', sql`(rollout_percent >= 0) AND (rollout_percent <= 100)`), +]); + +export const projectsInCore = core.table('projects', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + slug: text().notNull(), + description: text(), + visibility: datasetVisibility().default('priv"ate').notNull(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_projects_org_name_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.name.asc().nullsLast().op('text_ops'), + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'projects_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'projects_created_by_fkey', + }), + unique('projects_org_slug_unique').on(table.organizationId, table.slug), +]); + +export const repositoriesInCore = core.table('repositories', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + provider: text().notNull(), + repoOwner: text('repo_owner').notNull(), + repoName: text('repo_name').notNull(), + defaultBranch: text('default_branch').default('main').notNull(), + cloneUrl: text('clone_url'), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'repositories_project_id_fkey', + }).onDelete('cascade'), + unique('repositories_project_id_provider_repo_owner_repo_name_key').on( + table.projectId, + table.provider, + table.repoOwner, + table.repoName, + ), +]); + +export const buildsInCore = core.table('builds', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + triggeredBy: uuid('triggered_by'), + commitSha: char('commit_sha', { length: 40 }).notNull(), + status: pipelineStatus().default('created').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + index('core_builds_project_status_idx').using( + 'btree', + table.projectId.asc().nullsLast().op('uuid_ops'), + table.status.asc().nullsLast(), + ), + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'builds_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.triggeredBy], + foreignColumns: [usersInCore.id], + name: 'builds_triggered_by_fkey', + }), + unique('builds_project_id_commit_sha_key').on(table.projectId, table.commitSha), +]); + +export const pipelinesInCore = core.table('pipelines', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + name: text().notNull(), + spec: jsonb().notNull(), + status: pipelineStatus().default('created').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'pipelines_project_id_fkey', + }).onDelete('cascade'), + unique('pipelines_project_id_name_key').on(table.projectId, table.name), +]); + +export const pipelineRunsInAnalytics = analytics.table('pipeline_runs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineId: uuid('pipeline_id').notNull(), + + runNumber: bigint('run_number', { mode: 'number' }).notNull(), + state: jobState().default('queued').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + logs: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_pipeline_runs_state_idx').using('btree', table.state.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.pipelineId], + foreignColumns: [pipelinesInCore.id], + name: 'pipeline_runs_pipeline_id_fkey', + }).onDelete('cascade'), + unique('pipeline_runs_unique_run').on(table.pipelineId, table.runNumber), +]); + +export const jobsInAnalytics = analytics.table('jobs', { + id: uuid().defaultRandom().primaryKey().notNull(), + pipelineRunId: uuid('pipeline_run_id'), + name: text().notNull(), + state: jobState().default('queued').notNull(), + attempts: integer().default(0).notNull(), + lastError: text('last_error'), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }), + finishedAt: timestamp('finished_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('analytics_jobs_state_attempts_idx').using( + 'btree', + table.state.asc().nullsLast(), + table.attempts.asc().nullsLast().op('int4_ops'), + ), + foreignKey({ + columns: [table.pipelineRunId], + foreignColumns: [pipelineRunsInAnalytics.id], + name: 'jobs_pipeline_run_id_fkey', + }).onDelete('cascade'), +]); + +export const storageBucketsInCore = core.table('storage_buckets', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + region: text().notNull(), + config: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'storage_buckets_organization_id_fkey', + }).onDelete('cascade'), + unique('storage_buckets_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const objectsInCore = core.table('objects', { + id: uuid().defaultRandom().primaryKey().notNull(), + bucketId: uuid('bucket_id').notNull(), + path: text().notNull(), + + size: bigint({ mode: 'number' }).default(0).notNull(), + contentType: text('content_type'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_objects_bucket_path_gin').using('gin', table.metadata.asc().nullsLast().op('jsonb_ops')), + foreignKey({ + columns: [table.bucketId], + foreignColumns: [storageBucketsInCore.id], + name: 'objects_bucket_id_fkey', + }).onDelete('cascade'), + unique('objects_bucket_id_path_key').on(table.bucketId, table.path), +]); + +export const filesInCore = core.table('files', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + name: text().notNull(), + latestObjectId: uuid('latest_object_id'), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'files_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.latestObjectId], + foreignColumns: [objectsInCore.id], + name: 'files_latest_object_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'files_created_by_fkey', + }), + unique('files_project_id_name_key').on(table.projectId, table.name), +]); + +export const fileVersionsInCore = core.table('file_versions', { + id: uuid().defaultRandom().primaryKey().notNull(), + fileId: uuid('file_id').notNull(), + objectId: uuid('object_id').notNull(), + versionNumber: integer('version_number').notNull(), + checksum: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.fileId], + foreignColumns: [filesInCore.id], + name: 'file_versions_file_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.objectId], + foreignColumns: [objectsInCore.id], + name: 'file_versions_object_id_fkey', + }).onDelete('cascade'), + unique('file_versions_file_id_version_number_key').on(table.fileId, table.versionNumber), +]); + +export const tagsInCore = core.table('tags', { + id: serial().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + value: text(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'tags_organization_id_fkey', + }).onDelete('cascade'), + unique('tags_organization_id_key_value_key').on(table.organizationId, table.key, table.value), +]); + +export const conversationsInCore = core.table('conversations', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id'), + title: text(), + createdBy: uuid('created_by'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'conversations_project_id_fkey', + }).onDelete('set null'), + foreignKey({ + columns: [table.createdBy], + foreignColumns: [usersInCore.id], + name: 'conversations_created_by_fkey', + }), +]); + +export const chatMessagesInCore = core.table('chat_messages', { + id: uuid().defaultRandom().primaryKey().notNull(), + conversationId: uuid('conversation_id').notNull(), + senderId: uuid('sender_id'), + body: text().notNull(), + attachments: jsonb(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + editedAt: timestamp('edited_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + index('core_chat_conv_sent_at_idx').using( + 'btree', + table.conversationId.asc().nullsLast(), + table.sentAt.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.conversationId], + foreignColumns: [conversationsInCore.id], + name: 'chat_messages_conversation_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.senderId], + foreignColumns: [usersInCore.id], + name: 'chat_messages_sender_id_fkey', + }).onDelete('set null'), +]); + +export const notificationsInCore = core.table('notifications', { + id: uuid().defaultRandom().primaryKey().notNull(), + userId: uuid('user_id').notNull(), + channel: notificationChannel().default('in_app').notNull(), + payload: jsonb().notNull(), + seen: boolean().default(false).notNull(), + deliveredAt: timestamp('delivered_at', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_notifications_unseen_idx').using('btree', table.userId.asc().nullsLast().op('uuid_ops')).where( + sql`(seen = false)`, + ), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'notifications_user_id_fkey', + }).onDelete('cascade'), +]); + +export const customersInBilling = billing.table('customers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id'), + name: text().notNull(), + address: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'customers_organization_id_fkey', + }).onDelete('cascade'), + unique('customers_organization_id_key').on(table.organizationId), + unique('idnameunique').on(table.id, table.name), +]); + +export const subscriptionsInBilling = billing.table('subscriptions', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + plan: text().notNull(), + status: text().default('active').notNull(), + startedAt: timestamp('started_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + endedAt: timestamp('ended_at', { withTimezone: true, mode: 'string' }), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'subscriptions_customer_id_fkey', + }).onDelete('cascade'), +]); + +export const paymentsInBilling = billing.table('payments', { + id: uuid().defaultRandom().primaryKey().notNull(), + invoiceId: uuid('invoice_id').notNull(), + paidAt: timestamp('paid_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + amount: numeric({ precision: 12, scale: 2 }).notNull(), + amount2: decimal({ precision: 12, scale: 2 }).notNull(), + method: paymentMethod().notNull(), + transactionRef: text('transaction_ref'), + metadata: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.invoiceId], + foreignColumns: [invoicesInBilling.id], + name: 'payments_invoice_id_fkey', + }).onDelete('cascade'), +]); + +export const couponsInBilling = billing.table('coupons', { + id: uuid().defaultRandom().primaryKey().notNull(), + code: text().notNull(), + description: text(), + discountPercent: smallint('discount_percent'), + redeemableFrom: timestamp('redeemable_from', { withTimezone: true, mode: 'string' }), + redeemableTo: timestamp('redeemable_to', { withTimezone: true, mode: 'string' }), + maxRedemptions: integer('max_redemptions').generatedAlwaysAsIdentity(), + metadata: jsonb(), +}, (table) => [ + unique('coupons_code_key').on(table.code), + check('coupons_discount_percent_check', sql`(discount_percent >= 0) AND (discount_percent <= 100)`), +]); + +export const webhooksInCore = core.table('webhooks', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + url: text().notNull(), + secret: text(), + events: text().array().notNull(), + active: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_webhooks_org_active_idx').using('btree', table.organizationId.asc().nullsLast().op('uuid_ops')).where( + sql`(active = true)`, + ), + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'webhooks_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const metricSourcesInAnalytics = analytics.table('metric_sources', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + config: jsonb(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'metric_sources_organization_id_fkey', + }).onDelete('cascade'), + unique('metric_sources_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const metricsInAnalytics = analytics.table('metrics', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + sourceId: uuid('source_id').notNull(), + metricKey: text('metric_key').notNull(), + ts: timestamp({ withTimezone: true, mode: 'string' }).notNull(), + value: doublePrecision().notNull(), + tags: jsonb(), +}, (table) => [ + index('analytics_metrics_key_ts_idx').using( + 'btree', + table.metricKey.asc().nullsLast().op('text_ops'), + table.ts.desc().nullsFirst().op('timestamptz_ops'), + ), + foreignKey({ + columns: [table.sourceId], + foreignColumns: [metricSourcesInAnalytics.id], + name: 'metrics_source_id_fkey', + }).onDelete('cascade'), + unique('metrics_source_id_metric_key_ts_key').on(table.sourceId, table.metricKey, table.ts), +]); + +export const alertRulesInMonitoring = monitoring.table('alert_rules', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + description: text(), + severity: severityLevel().default('medium').notNull(), + enabled: boolean().default(true).notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'alert_rules_organization_id_fkey', + }).onDelete('cascade'), + unique('alert_rules_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const ruleConditionsInMonitoring = monitoring.table('rule_conditions', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + metricKey: text('metric_key').notNull(), + operator: ruleConditionOperator().notNull().unique('some_name', { nulls: 'not distinct' }), + threshold: doublePrecision().notNull(), + window: interval().default('00:05:00').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'rule_conditions_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const alertsInMonitoring = monitoring.table('alerts', { + id: uuid().defaultRandom().primaryKey().notNull(), + ruleId: uuid('rule_id').notNull(), + triggeredAt: timestamp('triggered_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + resolvedAt: timestamp('resolved_at', { withTimezone: true, mode: 'string' }), + payload: jsonb(), + state: text().default('firing').notNull(), +}, (table) => [ + foreignKey({ + columns: [table.ruleId], + foreignColumns: [alertRulesInMonitoring.id], + name: 'alerts_rule_id_fkey', + }).onDelete('cascade'), +]); + +export const escalationsInMonitoring = monitoring.table('escalations', { + id: uuid().defaultRandom().primaryKey().notNull(), + alertId: uuid('alert_id').notNull(), + action: alertAction().notNull(), + target: text().notNull(), + executedAt: timestamp('executed_at', { withTimezone: true, mode: 'string' }), +}, (table) => [ + foreignKey({ + columns: [table.alertId], + foreignColumns: [alertsInMonitoring.id], + name: 'escalations_alert_id_fkey', + }).onDelete('cascade'), +]); + +export const ssoProvidersInCore = core.table('sso_providers', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + config: jsonb().notNull(), + enabled: boolean().default(false).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'sso_providers_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const auditLogsInCore = core.table('audit_logs', { + id: bigserial({ mode: 'bigint' }).primaryKey().notNull(), + organizationId: uuid('organization_id'), + actorId: uuid('actor_id'), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').array().array().array(), + action: text().notNull(), + beforeState: jsonb('before_state'), + afterState: jsonb('after_state'), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + index('core_audit_org_idx').using( + 'btree', + table.organizationId.asc().nullsLast(), + table.createdAt.desc().nullsFirst().op('timestamptz_ops'), + ), +]); + +export const rateLimitsInCore = core.table('rate_limits', { + id: uuid().defaultRandom().primaryKey().notNull(), + apiKeyId: uuid('api_key_id').notNull(), + windowStart: timestamp('window_start', { withTimezone: true, mode: 'string' }).notNull(), + requests: integer().generatedByDefaultAsIdentity().notNull().array(), + limit: integer().generatedAlwaysAs(() => sql`1`).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.apiKeyId], + foreignColumns: [apiKeysInCore.id], + name: 'rate_limits_api_key_id_fkey', + }).onDelete('cascade'), + unique('rate_limits_api_key_id_window_start_key').on(table.apiKeyId, table.windowStart).nullsNotDistinct(), +]); + +export const experimentsInCore = core.table('experiments', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + key: text().notNull(), + description: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'experiments_organization_id_fkey', + }).onDelete('cascade'), + unique('experiments_organization_id_key_key').on(table.organizationId, table.key), +]); + +export const experimentVariantsInCore = core.table('experiment_variants', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + name: text().notNull(), + allocationPercent: smallint('allocation_percent').default(0).notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_variants_experiment_id_fkey', + }).onDelete('cascade'), + unique('experiment_variants_experiment_id_name_key').on(table.experimentId, table.name), + check('experiment_variants_allocation_percent_check', sql`(allocation_percent >= 0) AND (allocation_percent <= 100)`), +]); + +export const experimentAssignmentsInCore = core.table('experiment_assignments', { + id: uuid().defaultRandom().primaryKey().notNull(), + experimentId: uuid('experiment_id').notNull(), + variantId: uuid('variant_id').notNull(), + userId: uuid('user_id').notNull(), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.experimentId], + foreignColumns: [experimentsInCore.id], + name: 'experiment_assignments_experiment_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.variantId], + foreignColumns: [experimentVariantsInCore.id], + name: 'experiment_assignments_variant_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'experiment_assignments_user_id_fkey', + }).onDelete('cascade'), + unique('experiment_assignments_experiment_id_user_id_key').on(table.experimentId, table.userId), +]); + +export const deploymentsInCore = core.table('deployments', { + id: uuid().defaultRandom().primaryKey().notNull(), + projectId: uuid('project_id').notNull(), + environment: env().default('dev').notNull(), + version: text().notNull(), + deployedBy: uuid('deployed_by'), + deployedAt: timestamp('deployed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + notes: text(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInCore.id], + name: 'deployments_project_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.deployedBy], + foreignColumns: [usersInCore.id], + name: 'deployments_deployed_by_fkey', + }), + unique('deployments_project_id_environment_version_key').on(table.projectId, table.environment, table.version), +]); + +export const servicesInCore = core.table('services', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + name: text().notNull(), + kind: text(), + ownerId: uuid('owner_id'), + metadata: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string', precision: 6 }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'services_organization_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.ownerId], + foreignColumns: [usersInCore.id], + name: 'services_owner_id_fkey', + }), + unique('services_organization_id_name_key').on(table.organizationId, table.name), +]); + +export const locksInCore = core.table('locks', { + name: text().primaryKey().notNull(), + owner: text(), + expiresAt: timestamp('expires_at', { withTimezone: true, mode: 'string', precision: 2 }), +}); + +export const entitiesInCore = core.table('entities', { + id: uuid().defaultRandom().primaryKey().notNull(), + organizationId: uuid('organization_id').notNull(), + type: text().notNull(), + data: jsonb(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'entities_organization_id_fkey', + }).onDelete('cascade'), +]); + +export const taskQueueInAnalytics = analytics.table('task_queue', { + id: uuid().defaultRandom().primaryKey().notNull(), + queueName: text('queue_name').default('default').notNull(), + payload: jsonb().notNull(), + priority: smallint().default(100).notNull(), + reserved: boolean().default(false).notNull(), + reservedUntil: timestamp('reserved_until', { withTimezone: true, mode: 'string' }), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + uniqueIndex('analytics_task_queue_unique_unreserved').using( + 'btree', + sql`queue_name`, + sql`((payload ->> 'task_type'::text))`, + ).where(sql`(reserved = false)`), +]); + +export const invoicesInBilling = billing.table('invoices', { + id: uuid().defaultRandom().primaryKey().notNull(), + customerId: uuid('customer_id').notNull(), + number: text().notNull(), + issuedAt: timestamp('issued_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), + dueAt: timestamp('due_at', { withTimezone: true, mode: 'string' }), + totalAmount: numeric('total_amount', { precision: 12, scale: 2 }).default('0.0').notNull(), + currency: currencyCode().default('USD').notNull(), + status: invoiceStatus().default('draft').notNull(), + notes: text(), +}, (table) => [ + index('billing_invoices_status_idx').using('btree', table.status.asc().nullsLast().op('enum_ops')), + foreignKey({ + columns: [table.customerId, table.number], + foreignColumns: [customersInBilling.id, customersInBilling.name], + name: 'invoices_customer_id_fkey', + }).onDelete('cascade'), + unique('invoices_customer_id_number_key').on(table.customerId, table.number), + check('invoices_total_nonnegative', sql`total_amount >= (0)::numeric`), +]); + +export const aliasesInCore = core.table('aliases', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + alias: text().notNull().unique('unique_with_name'), + organizationId: uuid('organization_id'), +}, (table) => [ + foreignKey({ + columns: [table.organizationId], + foreignColumns: [organizationsInCore.id], + name: 'aliases_organization_id_fkey', + }).onUpdate('cascade'), + unique('aliases_object_type_object_id_alias_key').on(table.objectType, table.objectId, table.alias), +]); + +export const selfRef = core.table('self_ref', { + id: uuid().defaultRandom().primaryKey().notNull(), + objectType: text('object_type').notNull().unique().references((): AnyPgColumn => selfRef.organizationId), + organizationId: text('organization_id').notNull().unique(), +}); + +export const couponRedemptionsInBilling = billing.table('coupon_redemptions', { + couponId: uuid('coupon_id').notNull(), + customerId: uuid('customer_id').notNull(), + redeemedAt: timestamp('redeemed_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.couponId], + foreignColumns: [couponsInBilling.id], + name: 'coupon_redemptions_coupon_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.customerId], + foreignColumns: [customersInBilling.id], + name: 'coupon_redemptions_customer_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.couponId, table.customerId], name: 'coupon_redemptions_pkey' }), +]); + +export const entityLinksInCore = core.table('entity_links', { + parentEntityId: uuid('parent_entity_id').notNull(), + childEntityId: uuid('child_entity_id').notNull(), + relationship: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.parentEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_parent_entity_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.childEntityId], + foreignColumns: [entitiesInCore.id], + name: 'entity_links_child_entity_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.parentEntityId, table.childEntityId, table.relationship], name: 'entity_links_pkey' }), +]); + +export const rolePermissionsInCore = core.table('role_permissions', { + roleId: integer('role_id').notNull(), + permissionId: integer('permission_id').notNull(), + assignedBy: uuid('assigned_by'), + assignedAt: timestamp('assigned_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.roleId], + foreignColumns: [rolesInCore.id], + name: 'role_permissions_role_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.permissionId], + foreignColumns: [permissionsInCore.id], + name: 'role_permissions_permission_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.assignedBy], + foreignColumns: [usersInCore.id], + name: 'role_permissions_assigned_by_fkey', + }), + primaryKey({ columns: [table.roleId, table.permissionId], name: 'role_permissions_pkey' }), +]); + +export const taggingsInCore = core.table('taggings', { + tagId: integer('tag_id').notNull(), + objectType: text('object_type').notNull(), + objectId: uuid('object_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.tagId], + foreignColumns: [tagsInCore.id], + name: 'taggings_tag_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.tagId, table.objectType, table.objectId], name: 'taggings_pkey' }), +]); + +export const reactionsInCore = core.table('reactions', { + messageId: uuid('message_id').notNull(), + userId: uuid('user_id').notNull(), + reaction: text().notNull().array(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.messageId], + foreignColumns: [chatMessagesInCore.id], + name: 'reactions_message_id_fkey', + }).onDelete('cascade'), + foreignKey({ + columns: [table.userId], + foreignColumns: [usersInCore.id], + name: 'reactions_user_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.messageId, table.userId, table.reaction], name: 'reactions_pkey' }), +]); + +// views +export const projectSearchInAnalytics = analytics.materializedView('project_search', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().as( + sql`SELECT id, name, slug, description FROM core.projects p`, + ); + +export const projectSearchInAnalytics2 = analytics.materializedView('project_search2', { + id: uuid(), + name: text(), + slug: text(), + description: text(), +}).with({ autovacuumEnabled: true, autovacuumMultixactFreezeTableAge: 12 }) + .withNoData().existing(); + +export const vActiveUsersInCore = core.view('v_active_users').as((qb) => + qb.select({ + id: usersInCore.id, + username: usersInCore.username, + organization_id: usersInCore.organizationId, + }).from(usersInCore).where(eq(usersInCore.status, 'active')) +); +export const vActiveUsersInCore2 = core.view('v_active_users2', {}).existing(); + +// polices +export const rls = pgSchema('rls'); +export const documentsInRls = rls.table('documents', { + docId: uuid('doc_id').defaultRandom().primaryKey().notNull(), + ownerId: uuid('owner_id').notNull(), + title: text().notNull(), + content: text().notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('documents_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(owner_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('documents_update_own', { as: 'permissive', for: 'update', to: ['public'] }), + pgPolicy('documents_select_own', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const messagesInRls = rls.table.withRLS('messages', { + msgId: uuid('msg_id').defaultRandom().primaryKey().notNull(), + senderId: uuid('sender_id').notNull(), + recipientId: uuid('recipient_id').notNull(), + message: text().notNull(), + sentAt: timestamp('sent_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('messages_delete_own', { + as: 'permissive', + for: 'delete', + to: ['public'], + using: sql`(sender_id = (CURRENT_USER)::uuid)`, + }), + pgPolicy('messages_visibility', { as: 'permissive', for: 'select', to: ['public'] }), +]); + +export const projectsInRls = rls.table('projects', { + projectId: uuid('project_id').defaultRandom().primaryKey().notNull(), + name: text().notNull(), + description: text(), + ownerId: uuid('owner_id').notNull(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }).defaultNow(), +}, (table) => [ + pgPolicy('projects_visibility', { + as: 'permissive', + for: 'select', + to: ['public'], + using: sql`((owner_id = (CURRENT_USER)::uuid) OR (project_id IN ( SELECT pm.project_id + FROM rls.project_members pm + WHERE (pm.user_id = (CURRENT_USER)::uuid))))`, + }), +]); + +export const projectMembersInRls = rls.table.withRLS('project_members', { + projectId: uuid('project_id').notNull(), + userId: uuid('user_id').notNull(), + role: text().notNull(), +}, (table) => [ + foreignKey({ + columns: [table.projectId], + foreignColumns: [projectsInRls.projectId], + name: 'project_members_project_id_fkey', + }).onDelete('cascade'), + primaryKey({ columns: [table.projectId, table.userId], name: 'project_members_pkey' }), + pgPolicy('project_members_manage', { + as: 'permissive', + for: 'all', + to: ['public'], + using: sql`(project_id IN ( SELECT p.project_id + FROM rls.projects p + WHERE (p.owner_id = (CURRENT_USER)::uuid)))`, + }), + pgPolicy('project_members_visibility', { as: 'permissive', for: 'select', to: ['public'] }), + check('project_members_role_check', sql`role = ANY (ARRAY['member'::text, 'admin'::text])`), +]); + +export const policy = pgPolicy('new_policy', { + as: 'restrictive', + to: 'postgres', + withCheck: sql`1 = 1`, + for: 'all', +}).link(organizationsInCore); diff --git a/drizzle-kit/tests/postgres/snapshots/schema04.ts b/drizzle-kit/tests/postgres/snapshots/schema04.ts new file mode 100644 index 0000000000..05cc7512df --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema04.ts @@ -0,0 +1,596 @@ +// src/db/schema.ts +import { sql } from 'orm044'; +import { + bigint, + bigserial, + bit, + boolean, + char, + cidr, + customType, + date, + decimal, + doublePrecision, + foreignKey, + geometry, + halfvec, + index, + inet, + integer, + interval, + json, + jsonb, + line, + macaddr, + macaddr8, + numeric, + pgEnum, + pgMaterializedView, + pgSchema, + pgSequence, + pgTable, + pgView, + point, + primaryKey, + real, + serial, + smallint, + smallserial, + sparsevec, + text, + time, + timestamp, + uniqueIndex, + uuid, + varchar, + vector, +} from 'orm044/pg-core'; + +export const citext = customType<{ data: string }>({ + dataType() { + return 'citext'; + }, +}); + +export const customSchema = pgSchema('schemass'); +export const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); +export const enumname = pgEnum('enumname', ['three', 'two', 'one']); +export const test = pgEnum('test', ['ds']); +export const testHello = pgEnum('test_hello', ['ds']); + +export const invoiceSeqCustom = customSchema.sequence('invoice_seq', { + increment: 1, + startWith: 1000, + minValue: 1000, + cache: 1, + cycle: false, +}); +export const invoiceSeq = pgSequence('invoice_seq', { + increment: 1, + startWith: 1000, + minValue: 1000, + cache: 1, + cycle: false, +}); + +export const schemaTest = pgTable('schema_test', { + columnAll: uuid('column_all').defaultRandom(), + column: transactionStatusEnum('column').notNull(), +}); + +export const allSmallIntsCustom = customSchema.table( + 'schema_test2_custom', + { + column: smallint('column').notNull().array().generatedAlwaysAs([1]).default([124]), + column1: smallint('column1').default(1), + column2: smallint('column2').notNull().array().array(), + column3: smallint('column3').notNull().array().array(), + column4: smallint('column4').notNull().array().default([1]), + }, + ( + t, + ) => [ + uniqueIndex().on(t.column1), + uniqueIndex().on(t.column2), + uniqueIndex('testdfds').on(t.column3), + uniqueIndex('testdfds1').on(t.column4), + ], +); + +export const allEnumsCustom = customSchema.table( + 'all_enums_custom', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns').array().generatedAlwaysAs(['three']), + }, + (t: any) => [index('ds').on(t.column)], +); + +export const allTimestampsCustom = customSchema.table('all_timestamps_custom', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), +}); + +export const allUuidsCustom = customSchema.table('all_uuids_custom', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), +}); + +export const allDatesCustom = customSchema.table('all_dates_custom', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), +}); + +export const allRealsCustom = customSchema.table('all_reals_custom', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), +}); + +export const allBigintsCustom = pgTable('all_bigints_custom', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), +}); + +export const allBigserialsCustom = customSchema.table('all_bigserials_custom', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), +}); + +export const allIntervalsCustom = customSchema.table('all_intervals_custom', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), +}); + +export const allSerialsCustom = customSchema.table('all_serials_custom', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), +}); + +export const allSmallserialsCustom = pgTable('all_smallserials_custom', { + columnAll: smallserial('column_all').notNull(), + column: smallserial('column').notNull(), +}); + +export const allTextsCustom = customSchema.table( + 'all_texts_custom', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + }, + (t: any) => [index('test').on(t.column)], +); + +export const allBoolsCustom = customSchema.table('all_bools_custom', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), +}); + +export const allVarcharsCustom = customSchema.table('all_varchars_custom', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), +}); + +export const allTimesCustom = customSchema.table('all_times_custom', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), +}); + +export const allCharsCustom = customSchema.table('all_chars_custom', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), +}); + +export const allDoublePrecisionCustom = customSchema.table('all_double_precision_custom', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), +}); + +export const allJsonbCustom = customSchema.table('all_jsonb_custom', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), +}); + +export const allJsonCustom = customSchema.table('all_json_custom', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), +}); + +export const allIntegersCustom = customSchema.table('all_integers_custom', { + columnAll: integer('column_all').primaryKey(), + column: integer('column'), + columnPrimary: integer('column_primary'), +}); + +export const allNumericsCustom = customSchema.table('all_numerics_custom', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), +}); + +export const allCidrCustom = customSchema.table('all_cidr_custom', { + columnAll: cidr('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: cidr('column').default('0.0.0.0/0'), + columnPrimary: cidr('column_primary').primaryKey().notNull(), +}); + +export const allCustomCustom = customSchema.table('all_custom_custom', { + columnAll: citext('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: citext('column').default('test{}\'://`"'), + columnPrimary: citext('column_primary').primaryKey().notNull(), +}); + +export const allInetCustom = customSchema.table('all_inet_custom', { + columnAll: inet('column_all').notNull().array().generatedAlwaysAs(['127.0.0.1']), + column: inet('column').default('127.0.0.1'), + columnPrimary: inet('column_primary').primaryKey().notNull(), +}); + +export const allLineCustom = customSchema.table('all_line_custom', { + columnAll: line('column_all').notNull().array().generatedAlwaysAs([[1, 1, 1]]), + column: line('column').default([1, 1, 1]), + columnPrimary: line('column_primary').primaryKey().notNull(), +}); + +export const allMacaddrCustom = customSchema.table('all_macaddr_custom', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03']), + column: macaddr('column').default('08:00:2b:01:02:03'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), +}); + +export const allMacaddr8Custom = customSchema.table('all_macaddr8_custom', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03:04:05']), + column: macaddr('column').default('08:00:2b:01:02:03:04:05'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), +}); + +export const allPointCustom = customSchema.table('all_point_custom', { + columnAll: point('column_all', { mode: 'xy' }).notNull().array().generatedAlwaysAs([{ x: 1, y: 2 }]), + columnAll1: point('column_all1', { mode: 'tuple' }).notNull().array().generatedAlwaysAs([[1, 2]]), + column: point('column', { mode: 'xy' }).default({ x: 1, y: 2 }), + column1: point('column1', { mode: 'tuple' }).default([1, 2]), + columnPrimary: point('column_primary').primaryKey().notNull(), +}); + +export const allDecimalsCustom = customSchema.table('all_decimals_custom', { + columnAll: decimal('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: decimal('column'), + columnPrimary: decimal('column_primary').primaryKey().notNull(), +}); + +export const allGeometryCustom = pgTable('all_geometry_custom', { + columnAll: geometry('column_all', { mode: 'xy', srid: 4326, type: 'point' }).default({ x: 30.5234, y: 50.4501 }) + .notNull(), + columnAll1: geometry('column_all1', { mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }).notNull(), + columnAll2: geometry('column_all2', { mode: 'tuple', srid: 4326, type: 'point' }).default([30.5234, 50.4501]) + .notNull(), + columnAll3: geometry('column_all3', { mode: 'tuple', type: 'point' }).default([30.5234, 50.4501]).notNull(), + column: geometry('column').array(), + columnPrimary: geometry('column_primary').primaryKey().notNull(), +}); + +export const allBitCustom = pgTable('all_bit_custom', { + columnAll: bit('column_all', { dimensions: 1 }).default('1').notNull(), + columnAll1: bit('column_all1', { dimensions: 2 }).default('11').notNull(), + column: bit('column', { dimensions: 3 }).array(), + columnPrimary: bit('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allHalfvecCustom = pgTable('all_halfvec_custom', { + columnAll: halfvec('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: halfvec('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + column: halfvec('column', { dimensions: 3 }).array(), + columnPrimary: halfvec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allVecCustom = pgTable('all_vec_custom', { + columnAll: vector('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: vector('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + columnAll2: vector('column_all2', { dimensions: 2 }).array().default([[0, -2, 3]]).notNull(), + columnPrimary: vector('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSparcevecCustom = pgTable('all_sparcevec_custom', { + columnAll: sparsevec('column_all', { dimensions: 1 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll1: sparsevec('column_all1', { dimensions: 2 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll3: sparsevec('column_all3', { dimensions: 2 }).array().default(['{1:-1,3:2,5:3}/5']).notNull(), + columnPrimary: sparsevec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSmallInts = pgTable( + 'schema_test2', + { + columnAll: smallint('column_all').default(124).notNull(), + column: smallint('columns').array(), + column1: smallint('column1').array().array(), + column2: smallint('column2').array().array(), + column3: smallint('column3').array(), + column4: smallint('column4').array().notNull(), + }, + (t: any) => [uniqueIndex('testdfds').on(t.column)], +); + +export const allEnums = pgTable( + 'all_enums', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns'), + column3: enumname('column3').array().notNull(), + }, + (t: any) => [index('ds').on(t.column)], +); + +export const allTimestamps = pgTable('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), + column3: timestamp('column3').array().notNull(), +}); + +export const allUuids = pgTable('all_uuids', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), + column3: uuid('column3').array().notNull(), +}); + +export const allDates = pgTable('all_dates', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), + column3: date('column3').array().notNull(), +}); + +export const allReals = pgTable('all_reals', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), + column3: real('column3').array().notNull(), +}); + +export const allBigints = pgTable('all_bigints', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), + column3: bigint('column3', { mode: 'number' }).array().notNull(), +}); + +export const allBigserials = pgTable('all_bigserials', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), + column3: bigserial('column3', { mode: 'number' }).array().notNull(), +}); + +export const allIntervals = pgTable('all_intervals', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), + column3: interval('column3').array().notNull(), +}); + +export const allSerials = pgTable('all_serials', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), + column3: serial('column3').array().notNull(), +}); + +export const allSmallserials = pgTable('all_smallserials', { + columnAll: smallserial('column_all').notNull(), + column: smallserial('column').notNull(), + column3: smallserial('column3').array().notNull(), +}); + +export const allTexts = pgTable( + 'all_texts', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + column3: text('column3').array().notNull(), + }, + (t: any) => [index('test').on(t.column)], +); + +export const allBools = pgTable('all_bools', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), + column3: boolean('column3').array().notNull(), +}); + +export const allVarchars = pgTable('all_varchars', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), + column3: varchar('column3').array().notNull(), +}); + +export const allTimes = pgTable('all_times', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), + column3: time('column3').array().notNull(), +}); + +export const allChars = pgTable('all_chars', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), + column3: char('column3').array().notNull(), +}); + +export const allDoublePrecision = pgTable('all_double_precision', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), + column3: doublePrecision('column3').array().notNull(), +}); + +export const allJsonb = pgTable('all_jsonb', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), + column3: jsonb('column3').array().notNull(), +}); + +export const allJson = pgTable('all_json', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + column3: json('column3').array().notNull(), +}); + +export const allIntegers = pgTable('all_integers', { + columnAll: integer('column_all').primaryKey(), + column: integer('column').default(1), + columnPrimary: integer('column_primary'), + column3: integer('column3').array().notNull(), +}); + +export const allNumerics = pgTable('all_numerics', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), + column3: numeric('column3').array().notNull(), +}); + +export const allCidr = pgTable('all_cidr', { + columnAll: cidr('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: cidr('column').default('0.0.0.0/0'), + columnPrimary: cidr('column_primary').primaryKey().notNull(), + column3: cidr('column3').array().notNull(), +}); + +export const allCustom = pgTable('all_custom', { + columnAll: citext('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: citext('column').default('test{}\'://`"'), + columnPrimary: citext('column_primary').primaryKey().notNull(), + column3: citext('column3').array().notNull(), +}); + +export const allInet = pgTable('all_inet', { + columnAll: inet('column_all').notNull().array().generatedAlwaysAs(['127.0.0.1']), + column: inet('column').default('127.0.0.1'), + columnPrimary: inet('column_primary').primaryKey().notNull(), + column3: inet('column3').array().notNull(), +}); + +export const allLine = pgTable('all_line', { + columnAll: line('column_all').notNull().array().generatedAlwaysAs([[1, 1, 1]]), + column: line('column').default([1, 1, 1]), + columnPrimary: line('column_primary').primaryKey().notNull(), + column3: line('column3').array().notNull(), +}); + +export const allMacaddr = pgTable('all_macaddr', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03']), + column: macaddr('column').default('08:00:2b:01:02:03'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), + column3: macaddr('column3').notNull().array(), +}); + +export const allMacaddr8 = pgTable('all_macaddr8', { + columnAll: macaddr8('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03:04:05']), + column: macaddr8('column').default('08:00:2b:01:02:03:04:05'), + columnPrimary: macaddr8('column_primary').primaryKey().notNull(), + column3: macaddr8('column3').notNull().array(), +}); + +export const allPoint = pgTable('all_point', { + columnAll: point('column_all', { mode: 'xy' }).notNull().array().generatedAlwaysAs([{ x: 1, y: 2 }]), + columnAll1: point('column_all1', { mode: 'tuple' }).notNull().array().generatedAlwaysAs([[1, 2]]), + column: point('column', { mode: 'xy' }).default({ x: 1, y: 2 }), + column1: point('column1', { mode: 'tuple' }).default([1, 2]), + columnPrimary: point('column_primary').primaryKey().notNull(), + column3: point('column3').notNull().array(), +}); + +export const allDecimals = pgTable('all_decimals', { + columnAll: decimal('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: decimal('column').array(), + columnPrimary: decimal('column_primary').primaryKey().notNull(), +}); + +export const allGeometry = pgTable('all_geometry', { + columnAll: geometry('column_all', { mode: 'xy', srid: 4326, type: 'point' }).default({ x: 30.5234, y: 50.4501 }) + .notNull(), + columnAll1: geometry('column_all1', { mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }).notNull(), + columnAll2: geometry('column_all2', { mode: 'tuple', srid: 4326, type: 'point' }).default([30.5234, 50.4501]) + .notNull(), + columnAll3: geometry('column_all3', { mode: 'tuple', type: 'point' }).default([30.5234, 50.4501]).notNull(), + column: geometry('column').array(), + columnPrimary: geometry('column_primary').primaryKey().notNull(), +}); + +export const allBit = pgTable('all_bit', { + columnAll: bit('column_all', { dimensions: 1 }).default('1').notNull(), + columnAll1: bit('column_all1', { dimensions: 2 }).default('11').notNull(), + column: bit('column', { dimensions: 3 }).array(), + columnPrimary: bit('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allHalfvec = pgTable('all_halfvec', { + columnAll: halfvec('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: halfvec('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + column: halfvec('column', { dimensions: 3 }).array(), + columnPrimary: halfvec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allVec = pgTable('all_vec', { + columnAll: vector('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: vector('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + columnAll2: vector('column_all2', { dimensions: 2 }).array().default([[0, -2, 3]]).notNull(), + columnPrimary: vector('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSparcevec = pgTable('all_sparcevec', { + columnAll: sparsevec('column_all', { dimensions: 1 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll1: sparsevec('column_all1', { dimensions: 2 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll3: sparsevec('column_all3', { dimensions: 2 }).array().default(['{1:-1,3:2,5:3}/5']).notNull(), + columnPrimary: sparsevec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); diff --git a/drizzle-kit/tests/postgres/snapshots/schema04new.ts b/drizzle-kit/tests/postgres/snapshots/schema04new.ts new file mode 100644 index 0000000000..b911e943cb --- /dev/null +++ b/drizzle-kit/tests/postgres/snapshots/schema04new.ts @@ -0,0 +1,596 @@ +// src/db/schema.ts +import { sql } from 'drizzle-orm'; +import { + bigint, + bigserial, + bit, + boolean, + char, + cidr, + customType, + date, + decimal, + doublePrecision, + foreignKey, + geometry, + halfvec, + index, + inet, + integer, + interval, + json, + jsonb, + line, + macaddr, + macaddr8, + numeric, + pgEnum, + pgMaterializedView, + pgSchema, + pgSequence, + pgTable, + pgView, + point, + primaryKey, + real, + serial, + smallint, + smallserial, + sparsevec, + text, + time, + timestamp, + uniqueIndex, + uuid, + varchar, + vector, +} from 'drizzle-orm/pg-core'; + +export const citext = customType<{ data: string }>({ + dataType() { + return 'citext'; + }, +}); + +export const customSchema = pgSchema('schemass'); +export const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); +export const enumname = pgEnum('enumname', ['three', 'two', 'one']); +export const test = pgEnum('test', ['ds']); +export const testHello = pgEnum('test_hello', ['ds']); + +export const invoiceSeqCustom = customSchema.sequence('invoice_seq', { + increment: 1, + startWith: 1000, + minValue: 1000, + cache: 1, + cycle: false, +}); +export const invoiceSeq = pgSequence('invoice_seq', { + increment: 1, + startWith: 1000, + minValue: 1000, + cache: 1, + cycle: false, +}); + +export const schemaTest = pgTable('schema_test', { + columnAll: uuid('column_all').defaultRandom(), + column: transactionStatusEnum('column').notNull(), +}); + +export const allSmallIntsCustom = customSchema.table( + 'schema_test2_custom', + { + column: smallint('column').notNull().array().generatedAlwaysAs([1]).default([124]), + column1: smallint('column1').default(1), + column2: smallint('column2').notNull().array().array(), + column3: smallint('column3').notNull().array().array(), + column4: smallint('column4').notNull().array().default([1]), + }, + ( + t, + ) => [ + uniqueIndex().on(t.column1), + uniqueIndex().on(t.column2), + uniqueIndex('testdfds').on(t.column3), + uniqueIndex('testdfds1').on(t.column4), + ], +); + +export const allEnumsCustom = customSchema.table( + 'all_enums_custom', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns').array().generatedAlwaysAs(['three']), + }, + (t: any) => [index('ds').on(t.column)], +); + +export const allTimestampsCustom = customSchema.table('all_timestamps_custom', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), +}); + +export const allUuidsCustom = customSchema.table('all_uuids_custom', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), +}); + +export const allDatesCustom = customSchema.table('all_dates_custom', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), +}); + +export const allRealsCustom = customSchema.table('all_reals_custom', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), +}); + +export const allBigintsCustom = pgTable('all_bigints_custom', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), +}); + +export const allBigserialsCustom = customSchema.table('all_bigserials_custom', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), +}); + +export const allIntervalsCustom = customSchema.table('all_intervals_custom', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), +}); + +export const allSerialsCustom = customSchema.table('all_serials_custom', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), +}); + +export const allSmallserialsCustom = pgTable('all_smallserials_custom', { + columnAll: smallserial('column_all').notNull(), + column: smallserial('column').notNull(), +}); + +export const allTextsCustom = customSchema.table( + 'all_texts_custom', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + }, + (t: any) => [index('test').on(t.column)], +); + +export const allBoolsCustom = customSchema.table('all_bools_custom', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), +}); + +export const allVarcharsCustom = customSchema.table('all_varchars_custom', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), +}); + +export const allTimesCustom = customSchema.table('all_times_custom', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), +}); + +export const allCharsCustom = customSchema.table('all_chars_custom', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), +}); + +export const allDoublePrecisionCustom = customSchema.table('all_double_precision_custom', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), +}); + +export const allJsonbCustom = customSchema.table('all_jsonb_custom', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), +}); + +export const allJsonCustom = customSchema.table('all_json_custom', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), +}); + +export const allIntegersCustom = customSchema.table('all_integers_custom', { + columnAll: integer('column_all').primaryKey(), + column: integer('column'), + columnPrimary: integer('column_primary'), +}); + +export const allNumericsCustom = customSchema.table('all_numerics_custom', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), +}); + +export const allCidrCustom = customSchema.table('all_cidr_custom', { + columnAll: cidr('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: cidr('column').default('0.0.0.0/0'), + columnPrimary: cidr('column_primary').primaryKey().notNull(), +}); + +export const allCustomCustom = customSchema.table('all_custom_custom', { + columnAll: citext('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: citext('column').default('test{}\'://`"'), + columnPrimary: citext('column_primary').primaryKey().notNull(), +}); + +export const allInetCustom = customSchema.table('all_inet_custom', { + columnAll: inet('column_all').notNull().array().generatedAlwaysAs(['127.0.0.1']), + column: inet('column').default('127.0.0.1'), + columnPrimary: inet('column_primary').primaryKey().notNull(), +}); + +export const allLineCustom = customSchema.table('all_line_custom', { + columnAll: line('column_all').notNull().array().generatedAlwaysAs([[1, 1, 1]]), + column: line('column').default([1, 1, 1]), + columnPrimary: line('column_primary').primaryKey().notNull(), +}); + +export const allMacaddrCustom = customSchema.table('all_macaddr_custom', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03']), + column: macaddr('column').default('08:00:2b:01:02:03'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), +}); + +export const allMacaddr8Custom = customSchema.table('all_macaddr8_custom', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03:04:05']), + column: macaddr('column').default('08:00:2b:01:02:03:04:05'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), +}); + +export const allPointCustom = customSchema.table('all_point_custom', { + columnAll: point('column_all', { mode: 'xy' }).notNull().array().generatedAlwaysAs([{ x: 1, y: 2 }]), + columnAll1: point('column_all1', { mode: 'tuple' }).notNull().array().generatedAlwaysAs([[1, 2]]), + column: point('column', { mode: 'xy' }).default({ x: 1, y: 2 }), + column1: point('column1', { mode: 'tuple' }).default([1, 2]), + columnPrimary: point('column_primary').primaryKey().notNull(), +}); + +export const allDecimalsCustom = customSchema.table('all_decimals_custom', { + columnAll: decimal('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: decimal('column'), + columnPrimary: decimal('column_primary').primaryKey().notNull(), +}); + +export const allGeometryCustom = pgTable('all_geometry_custom', { + columnAll: geometry('column_all', { mode: 'xy', srid: 4326, type: 'point' }).default({ x: 30.5234, y: 50.4501 }) + .notNull(), + columnAll1: geometry('column_all1', { mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }).notNull(), + columnAll2: geometry('column_all2', { mode: 'tuple', srid: 4326, type: 'point' }).default([30.5234, 50.4501]) + .notNull(), + columnAll3: geometry('column_all3', { mode: 'tuple', type: 'point' }).default([30.5234, 50.4501]).notNull(), + column: geometry('column').array(), + columnPrimary: geometry('column_primary').primaryKey().notNull(), +}); + +export const allBitCustom = pgTable('all_bit_custom', { + columnAll: bit('column_all', { dimensions: 1 }).default('1').notNull(), + columnAll1: bit('column_all1', { dimensions: 2 }).default('11').notNull(), + column: bit('column', { dimensions: 3 }).array(), + columnPrimary: bit('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allHalfvecCustom = pgTable('all_halfvec_custom', { + columnAll: halfvec('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: halfvec('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + column: halfvec('column', { dimensions: 3 }).array(), + columnPrimary: halfvec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allVecCustom = pgTable('all_vec_custom', { + columnAll: vector('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: vector('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + columnAll2: vector('column_all2', { dimensions: 2 }).array().default([[0, -2, 3]]).notNull(), + columnPrimary: vector('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSparcevecCustom = pgTable('all_sparcevec_custom', { + columnAll: sparsevec('column_all', { dimensions: 1 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll1: sparsevec('column_all1', { dimensions: 2 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll3: sparsevec('column_all3', { dimensions: 2 }).array().default(['{1:-1,3:2,5:3}/5']).notNull(), + columnPrimary: sparsevec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSmallInts = pgTable( + 'schema_test2', + { + columnAll: smallint('column_all').default(124).notNull(), + column: smallint('columns').array(), + column1: smallint('column1').array().array(), + column2: smallint('column2').array().array(), + column3: smallint('column3').array(), + column4: smallint('column4').array().notNull(), + }, + (t: any) => [uniqueIndex('testdfds').on(t.column)], +); + +export const allEnums = pgTable( + 'all_enums', + { + columnAll: enumname('column_all').default('three').notNull(), + column: enumname('columns'), + column3: enumname('column3').array().notNull(), + }, + (t: any) => [index('ds').on(t.column)], +); + +export const allTimestamps = pgTable('all_timestamps', { + columnDateNow: timestamp('column_date_now', { + precision: 1, + withTimezone: true, + mode: 'string', + }).defaultNow(), + columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), + column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), + column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), + column3: timestamp('column3').array().notNull(), +}); + +export const allUuids = pgTable('all_uuids', { + columnAll: uuid('column_all').defaultRandom().notNull(), + column: uuid('column'), + column3: uuid('column3').array().notNull(), +}); + +export const allDates = pgTable('all_dates', { + column_date_now: date('column_date_now').defaultNow(), + column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), + column: date('column'), + column3: date('column3').array().notNull(), +}); + +export const allReals = pgTable('all_reals', { + columnAll: real('column_all').default(32).notNull(), + column: real('column'), + columnPrimary: real('column_primary').primaryKey().notNull(), + column3: real('column3').array().notNull(), +}); + +export const allBigints = pgTable('all_bigints', { + columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), + column: bigint('column', { mode: 'number' }), + column3: bigint('column3', { mode: 'number' }).array().notNull(), +}); + +export const allBigserials = pgTable('all_bigserials', { + columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), + column: bigserial('column', { mode: 'bigint' }).notNull(), + column3: bigserial('column3', { mode: 'number' }).array().notNull(), +}); + +export const allIntervals = pgTable('all_intervals', { + columnAllConstrains: interval('column_all_constrains', { + fields: 'month', + }) + .default('1 mon') + .notNull(), + columnMinToSec: interval('column_min_to_sec', { + fields: 'minute to second', + }), + columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), + column: interval('column'), + column5: interval('column5', { + fields: 'minute to second', + precision: 3, + }), + column6: interval('column6'), + column3: interval('column3').array().notNull(), +}); + +export const allSerials = pgTable('all_serials', { + columnAll: serial('column_all').notNull(), + column: serial('column').notNull(), + column3: serial('column3').array().notNull(), +}); + +export const allSmallserials = pgTable('all_smallserials', { + columnAll: smallserial('column_all').notNull(), + column: smallserial('column').notNull(), + column3: smallserial('column3').array().notNull(), +}); + +export const allTexts = pgTable( + 'all_texts', + { + columnAll: text('column_all').default('text').notNull(), + column: text('columns').primaryKey(), + column3: text('column3').array().notNull(), + }, + (t: any) => [index('test').on(t.column)], +); + +export const allBools = pgTable('all_bools', { + columnAll: boolean('column_all').default(true).notNull(), + column: boolean('column'), + column3: boolean('column3').array().notNull(), +}); + +export const allVarchars = pgTable('all_varchars', { + columnAll: varchar('column_all').default('text').notNull(), + column: varchar('column', { length: 200 }), + column3: varchar('column3').array().notNull(), +}); + +export const allTimes = pgTable('all_times', { + columnDateNow: time('column_date_now').defaultNow(), + columnAll: time('column_all').default('22:12:12').notNull(), + column: time('column'), + column3: time('column3').array().notNull(), +}); + +export const allChars = pgTable('all_chars', { + columnAll: char('column_all', { length: 1 }).default('text').notNull(), + column: char('column', { length: 1 }), + column3: char('column3').array().notNull(), +}); + +export const allDoublePrecision = pgTable('all_double_precision', { + columnAll: doublePrecision('column_all').default(33.2).notNull(), + column: doublePrecision('column'), + column3: doublePrecision('column3').array().notNull(), +}); + +export const allJsonb = pgTable('all_jsonb', { + columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: jsonb('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + }), + column: jsonb('column'), + column3: jsonb('column3').array().notNull(), +}); + +export const allJson = pgTable('all_json', { + columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), + columnDefaultArray: json('column_default_array').default({ + hello: { 'world world': ['foo', 'bar'] }, + foo: 'bar', + fe: 23, + }), + column: json('column'), + column3: json('column3').array().notNull(), +}); + +export const allIntegers = pgTable('all_integers', { + columnAll: integer('column_all').primaryKey(), + column: integer('column').default(1), + columnPrimary: integer('column_primary'), + column3: integer('column3').array().notNull(), +}); + +export const allNumerics = pgTable('all_numerics', { + columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: numeric('column'), + columnPrimary: numeric('column_primary').primaryKey().notNull(), + column3: numeric('column3').array().notNull(), +}); + +export const allCidr = pgTable('all_cidr', { + columnAll: cidr('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: cidr('column').default('0.0.0.0/0'), + columnPrimary: cidr('column_primary').primaryKey().notNull(), + column3: cidr('column3').array().notNull(), +}); + +export const allCustom = pgTable('all_custom', { + columnAll: citext('column_all').notNull().array().generatedAlwaysAs(['0.0.0.0/0']), + column: citext('column').default('test{}\'://`"'), + columnPrimary: citext('column_primary').primaryKey().notNull(), + column3: citext('column3').array().notNull(), +}); + +export const allInet = pgTable('all_inet', { + columnAll: inet('column_all').notNull().array().generatedAlwaysAs(['127.0.0.1']), + column: inet('column').default('127.0.0.1'), + columnPrimary: inet('column_primary').primaryKey().notNull(), + column3: inet('column3').array().notNull(), +}); + +export const allLine = pgTable('all_line', { + columnAll: line('column_all').notNull().array().generatedAlwaysAs([[1, 1, 1]]), + column: line('column').default([1, 1, 1]), + columnPrimary: line('column_primary').primaryKey().notNull(), + column3: line('column3').array().notNull(), +}); + +export const allMacaddr = pgTable('all_macaddr', { + columnAll: macaddr('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03']), + column: macaddr('column').default('08:00:2b:01:02:03'), + columnPrimary: macaddr('column_primary').primaryKey().notNull(), + column3: macaddr('column3').notNull().array(), +}); + +export const allMacaddr8 = pgTable('all_macaddr8', { + columnAll: macaddr8('column_all').notNull().array().generatedAlwaysAs(['08:00:2b:01:02:03:04:05']), + column: macaddr8('column').default('08:00:2b:01:02:03:04:05'), + columnPrimary: macaddr8('column_primary').primaryKey().notNull(), + column3: macaddr8('column3').notNull().array(), +}); + +export const allPoint = pgTable('all_point', { + columnAll: point('column_all', { mode: 'xy' }).notNull().array().generatedAlwaysAs([{ x: 1, y: 2 }]), + columnAll1: point('column_all1', { mode: 'tuple' }).notNull().array().generatedAlwaysAs([[1, 2]]), + column: point('column', { mode: 'xy' }).default({ x: 1, y: 2 }), + column1: point('column1', { mode: 'tuple' }).default([1, 2]), + columnPrimary: point('column_primary').primaryKey().notNull(), + column3: point('column3').notNull().array(), +}); + +export const allDecimals = pgTable('all_decimals', { + columnAll: decimal('column_all', { precision: 1, scale: 1 }).default('32').notNull(), + column: decimal('column').array(), + columnPrimary: decimal('column_primary').primaryKey().notNull(), +}); + +export const allGeometry = pgTable('all_geometry', { + columnAll: geometry('column_all', { mode: 'xy', srid: 4326, type: 'point' }).default({ x: 30.5234, y: 50.4501 }) + .notNull(), + columnAll1: geometry('column_all1', { mode: 'xy', type: 'point' }).default({ x: 30.5234, y: 50.4501 }).notNull(), + columnAll2: geometry('column_all2', { mode: 'tuple', srid: 4326, type: 'point' }).default([30.5234, 50.4501]) + .notNull(), + columnAll3: geometry('column_all3', { mode: 'tuple', type: 'point' }).default([30.5234, 50.4501]).notNull(), + column: geometry('column').array(), + columnPrimary: geometry('column_primary').primaryKey().notNull(), +}); + +export const allBit = pgTable('all_bit', { + columnAll: bit('column_all', { dimensions: 1 }).default('1').notNull(), + columnAll1: bit('column_all1', { dimensions: 2 }).default('11').notNull(), + column: bit('column', { dimensions: 3 }).array(), + columnPrimary: bit('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allHalfvec = pgTable('all_halfvec', { + columnAll: halfvec('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: halfvec('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + column: halfvec('column', { dimensions: 3 }).array(), + columnPrimary: halfvec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allVec = pgTable('all_vec', { + columnAll: vector('column_all', { dimensions: 1 }).default([0, -2, 3]).notNull(), + columnAll1: vector('column_all1', { dimensions: 2 }).default([0, -2, 3]).notNull(), + columnAll2: vector('column_all2', { dimensions: 2 }).array().default([[0, -2, 3]]).notNull(), + columnPrimary: vector('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); + +export const allSparcevec = pgTable('all_sparcevec', { + columnAll: sparsevec('column_all', { dimensions: 1 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll1: sparsevec('column_all1', { dimensions: 2 }).default('{1:-1,3:2,5:3}/5').notNull(), + columnAll3: sparsevec('column_all3', { dimensions: 2 }).array().default(['{1:-1,3:2,5:3}/5']).notNull(), + columnPrimary: sparsevec('column_primary', { dimensions: 5 }).primaryKey().notNull(), +}); diff --git a/drizzle-kit/tests/push/common.ts b/drizzle-kit/tests/push/common.ts index 627070f117..1d8b817184 100644 --- a/drizzle-kit/tests/push/common.ts +++ b/drizzle-kit/tests/push/common.ts @@ -38,12 +38,9 @@ export const run = ( test('Indexes properties that should not trigger push changes', () => suite.indexesToBeNotTriggered(context)); test('Indexes test case #1', () => suite.indexesTestCase1(context)); test('Drop column', () => suite.case1()); - test('Add not null to a column', () => suite.addNotNull()); test('Add not null to a column with null data. Should rollback', () => suite.addNotNullWithDataNoRollback()); - test('Add basic sequences', () => suite.addBasicSequences()); - test('Add generated column', () => suite.addGeneratedColumn(context)); test('Add generated constraint to an existing column', () => suite.addGeneratedToColumn(context)); test('Drop generated constraint from a column', () => suite.dropGeneratedConstraint(context)); diff --git a/drizzle-kit/tests/push/libsql.test.ts b/drizzle-kit/tests/push/libsql.test.ts deleted file mode 100644 index 2ae2e38110..0000000000 --- a/drizzle-kit/tests/push/libsql.test.ts +++ /dev/null @@ -1,1400 +0,0 @@ -import { createClient } from '@libsql/client'; -import chalk from 'chalk'; -import { sql } from 'drizzle-orm'; -import { - blob, - check, - foreignKey, - getTableConfig, - index, - int, - integer, - numeric, - real, - sqliteTable, - sqliteView, - text, - uniqueIndex, -} from 'drizzle-orm/sqlite-core'; -import { diffTestSchemasPushLibSQL } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -test('nothing changed in schema', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const users = sqliteTable('users', { - id: integer('id').primaryKey().notNull(), - name: text('name').notNull(), - email: text('email'), - textJson: text('text_json', { mode: 'json' }), - blobJon: blob('blob_json', { mode: 'json' }), - blobBigInt: blob('blob_bigint', { mode: 'bigint' }), - numeric: numeric('numeric'), - createdAt: integer('created_at', { mode: 'timestamp' }), - createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), - real: real('real'), - text: text('text', { length: 255 }), - role: text('role', { enum: ['admin', 'user'] }).default('user'), - isConfirmed: integer('is_confirmed', { - mode: 'boolean', - }), - }); - - const schema1 = { - users, - - customers: sqliteTable('customers', { - id: integer('id').primaryKey(), - address: text('address').notNull(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id') - .references(() => users.id) - .notNull(), - }), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL(turso, schema1, schema1, [], false); - expect(sqlStatements.length).toBe(0); - expect(statements.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); -}); - -test('added, dropped index', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const users = sqliteTable('users', { - id: integer('id').primaryKey().notNull(), - name: text('name').notNull(), - email: text('email'), - textJson: text('text_json', { mode: 'json' }), - blobJon: blob('blob_json', { mode: 'json' }), - blobBigInt: blob('blob_bigint', { mode: 'bigint' }), - numeric: numeric('numeric'), - createdAt: integer('created_at', { mode: 'timestamp' }), - createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), - real: real('real'), - text: text('text', { length: 255 }), - role: text('role', { enum: ['admin', 'user'] }).default('user'), - isConfirmed: integer('is_confirmed', { - mode: 'boolean', - }), - }); - - const schema1 = { - users, - customers: sqliteTable( - 'customers', - { - id: integer('id').primaryKey(), - address: text('address').notNull(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id').notNull(), - }, - (table) => ({ - uniqueIndex: uniqueIndex('customers_address_unique').on(table.address), - }), - ), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const schema2 = { - users, - customers: sqliteTable( - 'customers', - { - id: integer('id').primaryKey(), - address: text('address').notNull(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id').notNull(), - }, - (table) => ({ - uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on( - table.isConfirmed, - ), - }), - ), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'drop_index', - tableName: 'customers', - data: 'customers_address_unique;address;true;', - schema: '', - }); - expect(statements[1]).toStrictEqual({ - type: 'create_index', - tableName: 'customers', - data: 'customers_is_confirmed_unique;is_confirmed;true;', - schema: '', - internal: { indexes: {} }, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `DROP INDEX \`customers_address_unique\`;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('added column not null and without default to table with data', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - age: integer('age').notNull(), - }), - }; - - const table = getTableConfig(schema1.companies); - - const seedStatements = [ - `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`, - `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'integer', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - referenceData: undefined, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`delete from companies;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to add not-null ${ - chalk.underline( - 'age', - ) - } column without default value, which contains 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe('companies'); -}); - -test('added column not null and without default to table without data', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - age: integer('age').notNull(), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'integer', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - referenceData: undefined, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, - ); - - expect(infoToPrint!.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop autoincrement. drop column with data', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: false }), - }), - }; - - const table = getTableConfig(schema1.companies); - const seedStatements = [ - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, - ]; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - tableName: 'companies', - columns: [ - { - name: 'id', - type: 'integer', - autoincrement: false, - notNull: true, - primaryKey: true, - generated: undefined, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `CREATE TABLE \`__new_companies\` ( -\t\`id\` integer PRIMARY KEY NOT NULL -);\n`, - ); - expect(sqlStatements[1]).toBe(`INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`); - expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, - ); - - expect(columnsToRemove!.length).toBe(1); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to delete ${ - chalk.underline( - 'name', - ) - } column in companies table with 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('change autoincrement. table is part of foreign key', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const companies1 = sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: true }), - }); - const users1 = sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name').unique(), - companyId: integer('company_id').references(() => companies1.id), - }); - const schema1 = { - companies: companies1, - users: users1, - }; - - const companies2 = sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: false }), - }); - const users2 = sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name').unique(), - companyId: integer('company_id').references(() => companies2.id), - }); - const schema2 = { - companies: companies2, - users: users2, - }; - - const { name: usersTableName } = getTableConfig(users1); - const { name: companiesTableName } = getTableConfig(companies1); - const seedStatements = [ - `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`, - `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`, - `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (1);`, - `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (2);`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - tableName: 'companies', - columns: [ - { - name: 'id', - type: 'integer', - autoincrement: false, - notNull: true, - primaryKey: true, - generated: undefined, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `CREATE TABLE \`__new_companies\` ( -\t\`id\` integer PRIMARY KEY NOT NULL -);\n`, - ); - expect(sqlStatements[1]).toBe( - `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, - ); - expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop not null, add not null', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - userId: int('user_id'), - }, - ), - }; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - columnAutoIncrement: false, - columnDefault: undefined, - columnName: 'name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_notnull', - }); - expect(statements![1]).toStrictEqual({ - columnAutoIncrement: false, - columnDefault: undefined, - columnName: 'name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'posts', - type: 'alter_table_alter_column_set_notnull', - }); - expect(sqlStatements!.length).toBe(2); - expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`); - expect(sqlStatements![1]).toBe(`ALTER TABLE \`posts\` ALTER COLUMN "name" TO "name" text NOT NULL;`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop table with data', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const schema2 = { - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const seedStatements = [ - `INSERT INTO \`users\` ("name") VALUES ('drizzle')`, - ]; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - policies: [], - schema: undefined, - tableName: 'users', - type: 'drop_table', - }); - - expect(sqlStatements!.length).toBe(1); - expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(1); - expect(tablesToRemove![0]).toBe('users'); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('recreate table with nested references', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - let users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }); - let subscriptions = sqliteTable('subscriptions', { - id: int('id').primaryKey({ autoIncrement: true }), - userId: integer('user_id').references(() => users.id), - customerId: text('customer_id'), - }); - const schema1 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }); - const schema2 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, []); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements![1]).toBe( - `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('recreate table with added column not null and without default', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - newColumn: text('new_column').notNull(), - }), - }; - - const seedStatements = [ - `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`, - `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'new_column', - notNull: true, - generated: undefined, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;'); - expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer, -\t\`new_column\` text NOT NULL -);\n`); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to add not-null ${ - chalk.underline('new_column') - } column without default value to table, which contains 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe('users'); -}); - -test('set not null with index', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }, (table) => ({ - someIndex: index('users_name_index').on(table.name), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }, (table) => ({ - someIndex: index('users_name_index').on(table.name), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columnAutoIncrement: false, - columnDefault: undefined, - columnName: 'name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_notnull', - }); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_set_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(3); - expect(sqlStatements[0]).toBe( - `DROP INDEX "users_name_index";`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, - ); - expect(sqlStatements[2]).toBe( - `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`, - ); - expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop not null with two indexes', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - age: int('age').notNull(), - }, (table) => ({ - someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), - someIndex: index('users_age_index').on(table.age), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: int('age').notNull(), - }, (table) => ({ - someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), - someIndex: index('users_age_index').on(table.age), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_alter_column_drop_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }); - - expect(sqlStatements.length).toBe(5); - expect(sqlStatements[0]).toBe( - `DROP INDEX "users_name_unique";`, - ); - expect(sqlStatements[1]).toBe( - `DROP INDEX "users_age_index";`, - ); - expect(sqlStatements[2]).toBe( - `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, - ); - expect(sqlStatements[3]).toBe( - `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`, - ); - expect(sqlStatements[4]).toBe( - `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`, - ); - expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('add check constraint to table', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: ['some_check;"users"."age" > 21'], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer, -\tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[1]).toBe( - 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop check constraint', async (t) => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements[1]).toBe( - 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('db has checks. Push with same names', async () => { - const turso = createClient({ - url: ':memory:', - }); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`some new value`), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - false, - [], - ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('create view', async () => { - const turso = createClient({ - url: ':memory:', - }); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements).toStrictEqual([ - { - definition: 'select "id" from "test"', - name: 'view', - type: 'sqlite_create_view', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `CREATE VIEW \`view\` AS select "id" from "test";`, - ]); -}); - -test('drop view', async () => { - const turso = createClient({ - url: ':memory:', - }); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements).toStrictEqual([ - { - name: 'view', - type: 'drop_view', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'DROP VIEW \`view\`;', - ]); -}); - -test('alter view ".as"', async () => { - const turso = createClient({ - url: ':memory:', - }); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), - }; - - const schema2 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( - turso, - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); diff --git a/drizzle-kit/tests/push/mysql-push.test.ts b/drizzle-kit/tests/push/mysql-push.test.ts deleted file mode 100644 index ba64ccddb0..0000000000 --- a/drizzle-kit/tests/push/mysql-push.test.ts +++ /dev/null @@ -1,345 +0,0 @@ -import Docker from 'dockerode'; -import { sql } from 'drizzle-orm'; -import { check, int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; -import fs from 'fs'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { diffTestSchemasPushMysql } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; - -let client: Connection; -let mysqlContainer: Docker.Container; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); -}); - -if (!fs.existsSync('tests/push/mysql')) { - fs.mkdirSync('tests/push/mysql'); -} - -test('add check constraint to table', async () => { - const schema1 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values'), - }), - }; - const schema2 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values'), - }, (table) => ({ - checkConstraint1: check('some_check1', sql`${table.values} < 100`), - checkConstraint2: check('some_check2', sql`'test' < 100`), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - type: 'create_check_constraint', - tableName: 'test', - schema: '', - data: 'some_check1;\`test\`.\`values\` < 100', - }, - { - data: "some_check2;'test' < 100", - schema: '', - tableName: 'test', - type: 'create_check_constraint', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE \`test\` ADD CONSTRAINT \`some_check1\` CHECK (\`test\`.\`values\` < 100);', - `ALTER TABLE \`test\` ADD CONSTRAINT \`some_check2\` CHECK ('test' < 100);`, - ]); - - await client.query(`DROP TABLE \`test\`;`); -}); - -test('drop check constraint to table', async () => { - const schema1 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values'), - }, (table) => ({ - checkConstraint1: check('some_check1', sql`${table.values} < 100`), - checkConstraint2: check('some_check2', sql`'test' < 100`), - })), - }; - const schema2 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - type: 'delete_check_constraint', - tableName: 'test', - schema: '', - constraintName: 'some_check1', - }, - { - constraintName: 'some_check2', - schema: '', - tableName: 'test', - type: 'delete_check_constraint', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE \`test\` DROP CONSTRAINT \`some_check1\`;', - `ALTER TABLE \`test\` DROP CONSTRAINT \`some_check2\`;`, - ]); - - await client.query(`DROP TABLE \`test\`;`); -}); - -test('db has checks. Push with same names', async () => { - const schema1 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values').default(1), - }, (table) => ({ - checkConstraint: check('some_check', sql`${table.values} < 100`), - })), - }; - const schema2 = { - test: mysqlTable('test', { - id: int('id').primaryKey(), - values: int('values').default(1), - }, (table) => ({ - checkConstraint: check('some_check', sql`some new value`), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - ); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - - await client.query(`DROP TABLE \`test\`;`); -}); - -test('create view', async () => { - const table = mysqlTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: mysqlView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - definition: 'select \`id\` from \`test\`', - name: 'view', - type: 'mysql_create_view', - replace: false, - sqlSecurity: 'definer', - withCheckOption: undefined, - algorithm: 'undefined', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `CREATE ALGORITHM = undefined -SQL SECURITY definer -VIEW \`view\` AS (select \`id\` from \`test\`);`, - ]); - - await client.query(`DROP TABLE \`test\`;`); -}); - -test('drop view', async () => { - const table = mysqlTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: mysqlView('view').as((qb) => qb.select().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - name: 'view', - type: 'drop_view', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'DROP VIEW \`view\`;', - ]); - await client.query(`DROP TABLE \`test\`;`); - await client.query(`DROP VIEW \`view\`;`); -}); - -test('alter view ".as"', async () => { - const table = mysqlTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: mysqlView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), - }; - - const schema2 = { - test: table, - view: mysqlView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); - - await client.query(`DROP TABLE \`test\`;`); - await client.query(`DROP VIEW \`view\`;`); -}); - -test('alter meta options with distinct in definition', async () => { - const table = mysqlTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('merge').as(( - qb, - ) => qb.selectDistinct().from(table).where(sql`${table.id} = 1`)), - }; - - const schema2 = { - test: table, - view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('undefined').as((qb) => - qb.selectDistinct().from(table) - ), - }; - - await expect(diffTestSchemasPushMysql( - client, - schema1, - schema2, - [], - 'drizzle', - false, - )).rejects.toThrowError(); - - await client.query(`DROP TABLE \`test\`;`); -}); diff --git a/drizzle-kit/tests/push/mysql.test.ts b/drizzle-kit/tests/push/mysql.test.ts deleted file mode 100644 index 6c7f5efc2c..0000000000 --- a/drizzle-kit/tests/push/mysql.test.ts +++ /dev/null @@ -1,788 +0,0 @@ -import 'dotenv/config'; -import Docker from 'dockerode'; -import { SQL, sql } from 'drizzle-orm'; -import { - bigint, - binary, - char, - date, - datetime, - decimal, - double, - float, - int, - json, - mediumint, - mysqlEnum, - mysqlTable, - primaryKey, - serial, - smallint, - text, - time, - timestamp, - tinyint, - varbinary, - varchar, - year, -} from 'drizzle-orm/mysql-core'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { diffTestSchemasMysql, diffTestSchemasPushMysql } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; -import { expect, test } from 'vitest'; -import { DialectSuite, run } from './common'; - -async function createDockerDB(context: any): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - context.mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await context.mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -const mysqlSuite: DialectSuite = { - allTypes: async function(context: any): Promise { - const schema1 = { - allBigInts: mysqlTable('all_big_ints', { - simple: bigint('simple', { mode: 'number' }), - columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), - columnDefault: bigint('column_default', { mode: 'number' }).default(12), - columnDefaultSql: bigint('column_default_sql', { - mode: 'number', - }).default(12), - }), - allBools: mysqlTable('all_bools', { - simple: tinyint('simple'), - columnNotNull: tinyint('column_not_null').notNull(), - columnDefault: tinyint('column_default').default(1), - }), - allChars: mysqlTable('all_chars', { - simple: char('simple', { length: 1 }), - columnNotNull: char('column_not_null', { length: 45 }).notNull(), - // columnDefault: char("column_default", { length: 1 }).default("h"), - columnDefaultSql: char('column_default_sql', { length: 1 }).default( - 'h', - ), - }), - allDateTimes: mysqlTable('all_date_times', { - simple: datetime('simple', { mode: 'string', fsp: 1 }), - columnNotNull: datetime('column_not_null', { - mode: 'string', - }).notNull(), - columnDefault: datetime('column_default', { mode: 'string' }).default( - '2023-03-01 14:05:29', - ), - }), - allDates: mysqlTable('all_dates', { - simple: date('simple', { mode: 'string' }), - column_not_null: date('column_not_null', { mode: 'string' }).notNull(), - column_default: date('column_default', { mode: 'string' }).default( - '2023-03-01', - ), - }), - allDecimals: mysqlTable('all_decimals', { - simple: decimal('simple', { precision: 1, scale: 0 }), - columnNotNull: decimal('column_not_null', { - precision: 45, - scale: 3, - }).notNull(), - columnDefault: decimal('column_default', { - precision: 10, - scale: 0, - }).default('100'), - columnDefaultSql: decimal('column_default_sql', { - precision: 10, - scale: 0, - }).default('101'), - }), - - allDoubles: mysqlTable('all_doubles', { - simple: double('simple'), - columnNotNull: double('column_not_null').notNull(), - columnDefault: double('column_default').default(100), - columnDefaultSql: double('column_default_sql').default(101), - }), - - allEnums: mysqlTable('all_enums', { - simple: mysqlEnum('simple', ['hi', 'hello']), - }), - - allEnums1: mysqlTable('all_enums1', { - simple: mysqlEnum('simple', ['hi', 'hello']).default('hi'), - }), - - allFloats: mysqlTable('all_floats', { - columnNotNull: float('column_not_null').notNull(), - columnDefault: float('column_default').default(100), - columnDefaultSql: float('column_default_sql').default(101), - }), - - allInts: mysqlTable('all_ints', { - simple: int('simple'), - columnNotNull: int('column_not_null').notNull(), - columnDefault: int('column_default').default(100), - columnDefaultSql: int('column_default_sql').default(101), - }), - - allIntsRef: mysqlTable('all_ints_ref', { - simple: int('simple'), - columnNotNull: int('column_not_null').notNull(), - columnDefault: int('column_default').default(100), - columnDefaultSql: int('column_default_sql').default(101), - }), - - allJsons: mysqlTable('all_jsons', { - columnDefaultObject: json('column_default_object') - .default({ hello: 'world world' }) - .notNull(), - columnDefaultArray: json('column_default_array').default({ - hello: { 'world world': ['foo', 'bar'] }, - foo: 'bar', - fe: 23, - }), - column: json('column'), - }), - - allMInts: mysqlTable('all_m_ints', { - simple: mediumint('simple'), - columnNotNull: mediumint('column_not_null').notNull(), - columnDefault: mediumint('column_default').default(100), - columnDefaultSql: mediumint('column_default_sql').default(101), - }), - - allReals: mysqlTable('all_reals', { - simple: double('simple', { precision: 5, scale: 2 }), - columnNotNull: double('column_not_null').notNull(), - columnDefault: double('column_default').default(100), - columnDefaultSql: double('column_default_sql').default(101), - }), - - allSInts: mysqlTable('all_s_ints', { - simple: smallint('simple'), - columnNotNull: smallint('column_not_null').notNull(), - columnDefault: smallint('column_default').default(100), - columnDefaultSql: smallint('column_default_sql').default(101), - }), - - allSmallSerials: mysqlTable('all_small_serials', { - columnAll: serial('column_all').primaryKey().notNull(), - }), - - allTInts: mysqlTable('all_t_ints', { - simple: tinyint('simple'), - columnNotNull: tinyint('column_not_null').notNull(), - columnDefault: tinyint('column_default').default(10), - columnDefaultSql: tinyint('column_default_sql').default(11), - }), - - allTexts: mysqlTable('all_texts', { - simple: text('simple'), - columnNotNull: text('column_not_null').notNull(), - columnDefault: text('column_default').default('hello'), - columnDefaultSql: text('column_default_sql').default('hello'), - }), - - allTimes: mysqlTable('all_times', { - simple: time('simple', { fsp: 1 }), - columnNotNull: time('column_not_null').notNull(), - columnDefault: time('column_default').default('22:12:12'), - }), - - allTimestamps: mysqlTable('all_timestamps', { - columnDateNow: timestamp('column_date_now', { - fsp: 1, - mode: 'string', - }).default(sql`(now())`), - columnAll: timestamp('column_all', { mode: 'string' }) - .default('2023-03-01 14:05:29') - .notNull(), - column: timestamp('column', { mode: 'string' }).default( - '2023-02-28 16:18:31', - ), - }), - - allVarChars: mysqlTable('all_var_chars', { - simple: varchar('simple', { length: 100 }), - columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), - columnDefault: varchar('column_default', { length: 100 }).default( - 'hello', - ), - columnDefaultSql: varchar('column_default_sql', { - length: 100, - }).default('hello'), - }), - - allVarbinaries: mysqlTable('all_varbinaries', { - simple: varbinary('simple', { length: 100 }), - columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), - columnDefault: varbinary('column_default', { length: 12 }).default( - sql`(uuid_to_bin(uuid()))`, - ), - }), - - allYears: mysqlTable('all_years', { - simple: year('simple'), - columnNotNull: year('column_not_null').notNull(), - columnDefault: year('column_default').default(2022), - }), - - binafry: mysqlTable('binary', { - simple: binary('simple', { length: 1 }), - columnNotNull: binary('column_not_null', { length: 1 }).notNull(), - columnDefault: binary('column_default', { length: 12 }).default( - sql`(uuid_to_bin(uuid()))`, - ), - }), - }; - - const { statements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema1, - [], - 'drizzle', - false, - ); - expect(statements.length).toBe(2); - expect(statements).toEqual([ - { - type: 'delete_unique_constraint', - tableName: 'all_small_serials', - data: 'column_all;column_all', - schema: '', - }, - { - type: 'delete_unique_constraint', - tableName: 'all_small_serials', - data: 'column_all;column_all', - schema: '', - }, - ]); - - const { sqlStatements: dropStatements } = await diffTestSchemasMysql( - schema1, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - addBasicIndexes: function(context?: any): Promise { - return {} as any; - }, - changeIndexFields: function(context?: any): Promise { - return {} as any; - }, - dropIndex: function(context?: any): Promise { - return {} as any; - }, - indexesToBeNotTriggered: function(context?: any): Promise { - return {} as any; - }, - indexesTestCase1: function(context?: any): Promise { - return {} as any; - }, - async case1() { - // TODO: implement if needed - expect(true).toBe(true); - }, - addNotNull: function(context?: any): Promise { - return {} as any; - }, - addNotNullWithDataNoRollback: function(context?: any): Promise { - return {} as any; - }, - addBasicSequences: function(context?: any): Promise { - return {} as any; - }, - addGeneratedColumn: async function(context: any): Promise { - const schema1 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - }), - }; - const schema2 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'stored' }, - ), - generatedName1: text('gen_name1').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'virtual' }, - ), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - column: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - { - column: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - name: 'gen_name1', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ - "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); - - for (const st of sqlStatements) { - await context.client.query(st); - } - - const { sqlStatements: dropStatements } = await diffTestSchemasMysql( - schema2, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - addGeneratedToColumn: async function(context: any): Promise { - const schema1 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name'), - generatedName1: text('gen_name1'), - }), - }; - const schema2 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'stored' }, - ), - generatedName1: text('gen_name1').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'virtual' }, - ), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name1', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", - 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', - "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", - ]); - - for (const st of sqlStatements) { - await context.client.query(st); - } - - const { sqlStatements: dropStatements } = await diffTestSchemasMysql( - schema2, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - dropGeneratedConstraint: async function(context: any): Promise { - const schema1 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - { mode: 'stored' }, - ), - generatedName1: text('gen_name1').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - { mode: 'virtual' }, - ), - }), - }; - const schema2 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name'), - generatedName1: text('gen_name1'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - oldColumn: { - autoincrement: false, - default: undefined, - generated: { - as: '`name`', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name1', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - oldColumn: { - autoincrement: false, - default: undefined, - generated: { - as: '`name`', - type: 'virtual', - }, - name: 'gen_name1', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', - 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', - 'ALTER TABLE `users` ADD `gen_name1` text;', - ]); - - for (const st of sqlStatements) { - await context.client.query(st); - } - - const { sqlStatements: dropStatements } = await diffTestSchemasMysql( - schema2, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - alterGeneratedConstraint: async function(context: any): Promise { - const schema1 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - { mode: 'stored' }, - ), - generatedName1: text('gen_name1').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name}`, - { mode: 'virtual' }, - ), - }), - }; - const schema2 = { - users: mysqlTable('users', { - id: int('id'), - id2: int('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'stored' }, - ), - generatedName1: text('gen_name1').generatedAlwaysAs( - (): SQL => sql`${schema2.users.name} || 'hello'`, - { mode: 'virtual' }, - ), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - - const { sqlStatements: dropStatements } = await diffTestSchemasMysql( - schema2, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - createTableWithGeneratedConstraint: function(context?: any): Promise { - return {} as any; - }, - createCompositePrimaryKey: async function(context: any): Promise { - const schema1 = {}; - - const schema2 = { - table: mysqlTable('table', { - col1: int('col1').notNull(), - col2: int('col2').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.col1, t.col2], - }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - type: 'create_table', - tableName: 'table', - schema: undefined, - internals: { - indexes: {}, - tables: {}, - }, - compositePKs: ['table_col1_col2_pk;col1,col2'], - compositePkName: 'table_col1_col2_pk', - uniqueConstraints: [], - checkConstraints: [], - columns: [ - { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, - { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, - ], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', - ]); - }, - renameTableWithCompositePrimaryKey: async function(context?: any): Promise { - const productsCategoriesTable = (tableName: string) => { - return mysqlTable(tableName, { - productId: varchar('product_id', { length: 10 }).notNull(), - categoryId: varchar('category_id', { length: 10 }).notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.productId, t.categoryId], - }), - })); - }; - - const schema1 = { - table: productsCategoriesTable('products_categories'), - }; - const schema2 = { - test: productsCategoriesTable('products_to_categories'), - }; - - const { sqlStatements } = await diffTestSchemasPushMysql( - context.client as Connection, - schema1, - schema2, - ['public.products_categories->public.products_to_categories'], - 'drizzle', - false, - ); - - expect(sqlStatements).toStrictEqual([ - 'RENAME TABLE `products_categories` TO `products_to_categories`;', - 'ALTER TABLE `products_to_categories` DROP PRIMARY KEY;', - 'ALTER TABLE `products_to_categories` ADD PRIMARY KEY(`product_id`,`category_id`);', - ]); - - await context.client.query(`DROP TABLE \`products_categories\``); - }, -}; - -run( - mysqlSuite, - async (context: any) => { - const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(context); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - context.client = await createConnection(connectionString); - await context.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await context.client?.end().catch(console.error); - await context.mysqlContainer?.stop().catch(console.error); - throw lastError; - } - }, - async (context: any) => { - await context.client?.end().catch(console.error); - await context.mysqlContainer?.stop().catch(console.error); - }, - async (context: any) => { - await context.client?.query(`drop database if exists \`drizzle\`;`); - await context.client?.query(`create database \`drizzle\`;`); - await context.client?.query(`use \`drizzle\`;`); - }, -); diff --git a/drizzle-kit/tests/push/pg.test.ts b/drizzle-kit/tests/push/pg.test.ts deleted file mode 100644 index a5c8b30287..0000000000 --- a/drizzle-kit/tests/push/pg.test.ts +++ /dev/null @@ -1,4412 +0,0 @@ -import { PGlite } from '@electric-sql/pglite'; -import chalk from 'chalk'; -import { - bigint, - bigserial, - boolean, - char, - check, - date, - doublePrecision, - index, - integer, - interval, - json, - jsonb, - numeric, - pgEnum, - pgMaterializedView, - pgPolicy, - pgRole, - pgSchema, - pgSequence, - pgTable, - pgView, - primaryKey, - real, - serial, - smallint, - text, - time, - timestamp, - uniqueIndex, - uuid, - varchar, -} from 'drizzle-orm/pg-core'; -import { drizzle } from 'drizzle-orm/pglite'; -import { eq, SQL, sql } from 'drizzle-orm/sql'; -import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; -import { diffTestSchemas, diffTestSchemasPush } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; -import { DialectSuite, run } from './common'; - -const pgSuite: DialectSuite = { - async allTypes() { - const client = new PGlite(); - - const customSchema = pgSchema('schemass'); - - const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); - - const enumname = pgEnum('enumname', ['three', 'two', 'one']); - - const schema1 = { - test: pgEnum('test', ['ds']), - testHello: pgEnum('test_hello', ['ds']), - enumname: pgEnum('enumname', ['three', 'two', 'one']), - - customSchema: customSchema, - transactionStatusEnum: customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']), - - allSmallSerials: pgTable('schema_test', { - columnAll: uuid('column_all').defaultRandom(), - column: transactionStatusEnum('column').notNull(), - }), - - allSmallInts: customSchema.table( - 'schema_test2', - { - columnAll: smallint('column_all').default(124).notNull(), - column: smallint('columns').array(), - column1: smallint('column1').array().array(), - column2: smallint('column2').array().array(), - column3: smallint('column3').array(), - }, - (t) => ({ - cd: uniqueIndex('testdfds').on(t.column), - }), - ), - - allEnums: customSchema.table( - 'all_enums', - { - columnAll: enumname('column_all').default('three').notNull(), - column: enumname('columns'), - }, - (t) => ({ - d: index('ds').on(t.column), - }), - ), - - allTimestamps: customSchema.table('all_timestamps', { - columnDateNow: timestamp('column_date_now', { - precision: 1, - withTimezone: true, - mode: 'string', - }).defaultNow(), - columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), - column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), - column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), - }), - - allUuids: customSchema.table('all_uuids', { - columnAll: uuid('column_all').defaultRandom().notNull(), - column: uuid('column'), - }), - - allDates: customSchema.table('all_dates', { - column_date_now: date('column_date_now').defaultNow(), - column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), - column: date('column'), - }), - - allReals: customSchema.table('all_reals', { - columnAll: real('column_all').default(32).notNull(), - column: real('column'), - columnPrimary: real('column_primary').primaryKey().notNull(), - }), - - allBigints: pgTable('all_bigints', { - columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), - column: bigint('column', { mode: 'number' }), - }), - - allBigserials: customSchema.table('all_bigserials', { - columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), - column: bigserial('column', { mode: 'bigint' }).notNull(), - }), - - allIntervals: customSchema.table('all_intervals', { - columnAllConstrains: interval('column_all_constrains', { - fields: 'month', - }) - .default('1 mon') - .notNull(), - columnMinToSec: interval('column_min_to_sec', { - fields: 'minute to second', - }), - columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), - column: interval('column'), - column5: interval('column5', { - fields: 'minute to second', - precision: 3, - }), - column6: interval('column6'), - }), - - allSerials: customSchema.table('all_serials', { - columnAll: serial('column_all').notNull(), - column: serial('column').notNull(), - }), - - allTexts: customSchema.table( - 'all_texts', - { - columnAll: text('column_all').default('text').notNull(), - column: text('columns').primaryKey(), - }, - (t) => ({ - cd: index('test').on(t.column), - }), - ), - - allBools: customSchema.table('all_bools', { - columnAll: boolean('column_all').default(true).notNull(), - column: boolean('column'), - }), - - allVarchars: customSchema.table('all_varchars', { - columnAll: varchar('column_all').default('text').notNull(), - column: varchar('column', { length: 200 }), - }), - - allTimes: customSchema.table('all_times', { - columnDateNow: time('column_date_now').defaultNow(), - columnAll: time('column_all').default('22:12:12').notNull(), - column: time('column'), - }), - - allChars: customSchema.table('all_chars', { - columnAll: char('column_all', { length: 1 }).default('text').notNull(), - column: char('column', { length: 1 }), - }), - - allDoublePrecision: customSchema.table('all_double_precision', { - columnAll: doublePrecision('column_all').default(33.2).notNull(), - column: doublePrecision('column'), - }), - - allJsonb: customSchema.table('all_jsonb', { - columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), - columnDefaultArray: jsonb('column_default_array').default({ - hello: { 'world world': ['foo', 'bar'] }, - }), - column: jsonb('column'), - }), - - allJson: customSchema.table('all_json', { - columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), - columnDefaultArray: json('column_default_array').default({ - hello: { 'world world': ['foo', 'bar'] }, - foo: 'bar', - fe: 23, - }), - column: json('column'), - }), - - allIntegers: customSchema.table('all_integers', { - columnAll: integer('column_all').primaryKey(), - column: integer('column'), - columnPrimary: integer('column_primary'), - }), - - allNumerics: customSchema.table('all_numerics', { - columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), - column: numeric('column'), - columnPrimary: numeric('column_primary').primaryKey().notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema1, [], false, [ - 'public', - 'schemass', - ]); - expect(statements.length).toBe(0); - }, - - async addBasicIndexes() { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index() - .on(t.name.desc(), t.id.asc().nullsLast()) - .with({ fillfactor: 70 }) - .where(sql`select 1`), - indx1: index('indx1') - .using('hash', t.name.desc(), sql`${t.name}`) - .with({ fillfactor: 70 }), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'create_index_pg', - data: { - columns: [ - { - asc: false, - expression: 'name', - isExpression: false, - nulls: 'last', - opclass: undefined, - }, - { - asc: true, - expression: 'id', - isExpression: false, - nulls: 'last', - opclass: undefined, - }, - ], - concurrently: false, - isUnique: false, - method: 'btree', - name: 'users_name_id_index', - where: 'select 1', - with: { - fillfactor: 70, - }, - }, - }); - expect(statements[1]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'create_index_pg', - data: { - columns: [ - { - asc: false, - expression: 'name', - isExpression: false, - nulls: 'last', - opclass: undefined, - }, - { - asc: true, - expression: '"name"', - isExpression: true, - nulls: 'last', - }, - ], - concurrently: false, - isUnique: false, - method: 'hash', - name: 'indx1', - where: undefined, - with: { - fillfactor: 70, - }, - }, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `CREATE INDEX "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE INDEX "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, - ); - }, - - async addGeneratedColumn() { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - }), - }; - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '"users"."name"', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', - ]); - - // for (const st of sqlStatements) { - // await client.query(st); - // } - }, - - async addGeneratedToColumn() { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name'), - }), - }; - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: { - as: '"users"."name"', - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" drop column "gen_name";', - 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', - ]); - - // for (const st of sqlStatements) { - // await client.query(st); - // } - }, - - async dropGeneratedConstraint() { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), - }), - }; - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: undefined, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;']); - }, - - async alterGeneratedConstraint() { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), - }), - }; - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - }, - - async createTableWithGeneratedConstraint() { - const client = new PGlite(); - - const schema1 = {}; - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id2: integer('id2'), - name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columns: [ - { - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - generated: { - as: '"users"."name" || \'hello\'', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - compositePkName: '', - isRLSEnabled: false, - schema: '', - tableName: 'users', - policies: [], - type: 'create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer,\n\t"id2" integer,\n\t"name" text,\n\t"gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', - ]); - }, - - async addBasicSequences() { - const client = new PGlite(); - - const schema1 = { - seq: pgSequence('my_seq', { startWith: 100 }), - }; - - const schema2 = { - seq: pgSequence('my_seq', { startWith: 100 }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - expect(statements.length).toBe(0); - }, - - async changeIndexFields() { - const client = new PGlite(); - - const schema1 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - removeColumn: index('removeColumn').on(t.name, t.id), - addColumn: index('addColumn').on(t.name.desc()).with({ fillfactor: 70 }), - removeExpression: index('removeExpression') - .on(t.name.desc(), sql`name`) - .concurrently(), - addExpression: index('addExpression').on(t.id.desc()), - changeExpression: index('changeExpression').on(t.id.desc(), sql`name`), - changeName: index('changeName').on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), - changeWith: index('changeWith').on(t.name).with({ fillfactor: 70 }), - changeUsing: index('changeUsing').on(t.name), - }), - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - removeColumn: index('removeColumn').on(t.name), - addColumn: index('addColumn').on(t.name.desc(), t.id.nullsLast()).with({ fillfactor: 70 }), - removeExpression: index('removeExpression').on(t.name.desc()).concurrently(), - addExpression: index('addExpression').on(t.id.desc()), - changeExpression: index('changeExpression').on(t.id.desc(), sql`name desc`), - changeName: index('newName') - .on(t.name.desc(), sql`name`) - .with({ fillfactor: 70 }), - changeWith: index('changeWith').on(t.name).with({ fillfactor: 90 }), - changeUsing: index('changeUsing').using('hash', t.name), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(sqlStatements).toStrictEqual([ - 'DROP INDEX "changeName";', - 'DROP INDEX "addColumn";', - 'DROP INDEX "changeExpression";', - 'DROP INDEX "changeUsing";', - 'DROP INDEX "changeWith";', - 'DROP INDEX "removeColumn";', - 'DROP INDEX "removeExpression";', - 'CREATE INDEX "newName" ON "users" USING btree ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', - 'CREATE INDEX "addColumn" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', - 'CREATE INDEX "changeExpression" ON "users" USING btree ("id" DESC NULLS LAST,name desc);', - 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', - 'CREATE INDEX "changeWith" ON "users" USING btree ("name") WITH (fillfactor=90);', - 'CREATE INDEX "removeColumn" ON "users" USING btree ("name");', - 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" USING btree ("name" DESC NULLS LAST);', - ]); - }, - - async dropIndex() { - const client = new PGlite(); - - const schema1 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index().on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), - }), - ), - }; - - const schema2 = { - users: pgTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - schema: '', - tableName: 'users', - type: 'drop_index', - data: 'users_name_id_index;name--false--last,,id--true--last;false;btree;{"fillfactor":"70"}', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP INDEX "users_name_id_index";`); - }, - - async indexesToBeNotTriggered() { - const client = new PGlite(); - - const schema1 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index('indx').on(t.name.desc()).concurrently(), - indx1: index('indx1') - .on(t.name.desc()) - .where(sql`true`), - indx2: index('indx2') - .on(t.name.op('text_ops')) - .where(sql`true`), - indx3: index('indx3') - .on(sql`lower(name)`) - .where(sql`true`), - }), - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: serial('id').primaryKey(), - name: text('name'), - }, - (t) => ({ - indx: index('indx').on(t.name.desc()), - indx1: index('indx1') - .on(t.name.desc()) - .where(sql`false`), - indx2: index('indx2') - .on(t.name.op('test')) - .where(sql`true`), - indx3: index('indx3') - .on(sql`lower(id)`) - .where(sql`true`), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(0); - }, - - async indexesTestCase1() { - const client = new PGlite(); - - const schema1 = { - users: pgTable( - 'users', - { - id: uuid('id').defaultRandom().primaryKey(), - name: text('name').notNull(), - description: text('description'), - imageUrl: text('image_url'), - inStock: boolean('in_stock').default(true), - }, - (t) => ({ - indx: index().on(t.id.desc().nullsFirst()), - indx1: index('indx1').on(t.id, t.imageUrl), - indx2: index('indx4').on(t.id), - }), - ), - }; - - const schema2 = { - users: pgTable( - 'users', - { - id: uuid('id').defaultRandom().primaryKey(), - name: text('name').notNull(), - description: text('description'), - imageUrl: text('image_url'), - inStock: boolean('in_stock').default(true), - }, - (t) => ({ - indx: index().on(t.id.desc().nullsFirst()), - indx1: index('indx1').on(t.id, t.imageUrl), - indx2: index('indx4').on(t.id), - }), - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(0); - }, - - async addNotNull() { - const client = new PGlite(); - - const schema1 = { - users: pgTable( - 'User', - { - id: text('id').primaryKey().notNull(), - name: text('name'), - username: text('username'), - gh_username: text('gh_username'), - email: text('email'), - emailVerified: timestamp('emailVerified', { - precision: 3, - mode: 'date', - }), - image: text('image'), - createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) - .default(sql`CURRENT_TIMESTAMP`) - .notNull(), - updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) - .notNull() - .$onUpdate(() => new Date()), - }, - (table) => { - return { - emailKey: uniqueIndex('User_email_key').on(table.email), - }; - }, - ), - }; - - const schema2 = { - users: pgTable( - 'User', - { - id: text('id').primaryKey().notNull(), - name: text('name'), - username: text('username'), - gh_username: text('gh_username'), - email: text('email').notNull(), - emailVerified: timestamp('emailVerified', { - precision: 3, - mode: 'date', - }), - image: text('image'), - createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) - .default(sql`CURRENT_TIMESTAMP`) - .notNull(), - updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) - .notNull() - .$onUpdate(() => new Date()), - }, - (table) => { - return { - emailKey: uniqueIndex('User_email_key').on(table.email), - }; - }, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - const query = async (sql: string, params?: any[]) => { - const result = await client.query(sql, params ?? []); - return result.rows as any[]; - }; - - const { statementsToExecute } = await pgSuggestions({ query }, statements); - - expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); - }, - - async addNotNullWithDataNoRollback() { - const client = new PGlite(); - const db = drizzle(client); - - const schema1 = { - users: pgTable( - 'User', - { - id: text('id').primaryKey().notNull(), - name: text('name'), - username: text('username'), - gh_username: text('gh_username'), - email: text('email'), - emailVerified: timestamp('emailVerified', { - precision: 3, - mode: 'date', - }), - image: text('image'), - createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) - .default(sql`CURRENT_TIMESTAMP`) - .notNull(), - updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) - .notNull() - .$onUpdate(() => new Date()), - }, - (table) => { - return { - emailKey: uniqueIndex('User_email_key').on(table.email), - }; - }, - ), - }; - - const schema2 = { - users: pgTable( - 'User', - { - id: text('id').primaryKey().notNull(), - name: text('name'), - username: text('username'), - gh_username: text('gh_username'), - email: text('email').notNull(), - emailVerified: timestamp('emailVerified', { - precision: 3, - mode: 'date', - }), - image: text('image'), - createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) - .default(sql`CURRENT_TIMESTAMP`) - .notNull(), - updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) - .notNull() - .$onUpdate(() => new Date()), - }, - (table) => { - return { - emailKey: uniqueIndex('User_email_key').on(table.email), - }; - }, - ), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - const query = async (sql: string, params?: any[]) => { - const result = await client.query(sql, params ?? []); - return result.rows as any[]; - }; - - await db.insert(schema1.users).values({ id: 'str', email: 'email@gmail' }); - - const { statementsToExecute, shouldAskForApprove } = await pgSuggestions({ query }, statements); - - expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); - - expect(shouldAskForApprove).toBeFalsy(); - }, - - async createCompositePrimaryKey() { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - table: pgTable('table', { - col1: integer('col1').notNull(), - col2: integer('col2').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.col1, t.col2], - }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - type: 'create_table', - tableName: 'table', - schema: '', - compositePKs: ['col1,col2;table_col1_col2_pk'], - compositePkName: 'table_col1_col2_pk', - isRLSEnabled: false, - policies: [], - uniqueConstraints: [], - checkConstraints: [], - columns: [ - { name: 'col1', type: 'integer', primaryKey: false, notNull: true }, - { name: 'col2', type: 'integer', primaryKey: false, notNull: true }, - ], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "table" (\n\t"col1" integer NOT NULL,\n\t"col2" integer NOT NULL,\n\tCONSTRAINT "table_col1_col2_pk" PRIMARY KEY("col1","col2")\n);\n', - ]); - }, - - async renameTableWithCompositePrimaryKey() { - const client = new PGlite(); - - const productsCategoriesTable = (tableName: string) => { - return pgTable(tableName, { - productId: text('product_id').notNull(), - categoryId: text('category_id').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.productId, t.categoryId], - }), - })); - }; - - const schema1 = { - table: productsCategoriesTable('products_categories'), - }; - const schema2 = { - test: productsCategoriesTable('products_to_categories'), - }; - - const { sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['public.products_categories->public.products_to_categories'], - false, - ['public'], - ); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "products_categories" RENAME TO "products_to_categories";', - 'ALTER TABLE "products_to_categories" DROP CONSTRAINT "products_categories_product_id_category_id_pk";', - 'ALTER TABLE "products_to_categories" ADD CONSTRAINT "products_to_categories_product_id_category_id_pk" PRIMARY KEY("product_id","category_id");', - ]); - }, - - // async addVectorIndexes() { - // const client = new PGlite(); - - // const schema1 = { - // users: pgTable("users", { - // id: serial("id").primaryKey(), - // name: vector("name", { dimensions: 3 }), - // }), - // }; - - // const schema2 = { - // users: pgTable( - // "users", - // { - // id: serial("id").primaryKey(), - // embedding: vector("name", { dimensions: 3 }), - // }, - // (t) => ({ - // indx2: index("vector_embedding_idx") - // .using("hnsw", t.embedding.op("vector_ip_ops")) - // .with({ m: 16, ef_construction: 64 }), - // }) - // ), - // }; - - // const { statements, sqlStatements } = await diffTestSchemasPush( - // client, - // schema1, - // schema2, - // [], - // false, - // ["public"] - // ); - // expect(statements.length).toBe(1); - // expect(statements[0]).toStrictEqual({ - // schema: "", - // tableName: "users", - // type: "create_index", - // data: 'vector_embedding_idx;name,true,last,vector_ip_ops;false;false;hnsw;undefined;{"m":16,"ef_construction":64}', - // }); - // expect(sqlStatements.length).toBe(1); - // expect(sqlStatements[0]).toBe( - // `CREATE INDEX IF NOT EXISTS "vector_embedding_idx" ON "users" USING hnsw (name vector_ip_ops) WITH (m=16,ef_construction=64);` - // ); - // }, - async case1() { - // TODO: implement if needed - expect(true).toBe(true); - }, -}; - -run(pgSuite); - -test('full sequence: no changes', async () => { - const client = new PGlite(); - - const schema1 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const schema2 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('basic sequence: change fields', async () => { - const client = new PGlite(); - - const schema1 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const schema2 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 100000, - minValue: 100, - cycle: true, - cache: 10, - increment: 4, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - type: 'alter_sequence', - schema: 'public', - name: 'my_seq', - values: { - minValue: '100', - maxValue: '100000', - increment: '4', - startWith: '100', - cache: '10', - cycle: true, - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."my_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 100000 START WITH 100 CACHE 10 CYCLE;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('basic sequence: change name', async () => { - const client = new PGlite(); - - const schema1 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const schema2 = { - seq: pgSequence('my_seq2', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['public.my_seq->public.my_seq2'], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - nameFrom: 'my_seq', - nameTo: 'my_seq2', - schema: 'public', - type: 'rename_sequence', - }, - ]); - expect(sqlStatements).toStrictEqual(['ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";']); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('basic sequence: change name and fields', async () => { - const client = new PGlite(); - - const schema1 = { - seq: pgSequence('my_seq', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 2, - }), - }; - - const schema2 = { - seq: pgSequence('my_seq2', { - startWith: 100, - maxValue: 10000, - minValue: 100, - cycle: true, - cache: 10, - increment: 4, - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['public.my_seq->public.my_seq2'], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - nameFrom: 'my_seq', - nameTo: 'my_seq2', - schema: 'public', - type: 'rename_sequence', - }, - { - name: 'my_seq2', - schema: 'public', - type: 'alter_sequence', - values: { - cache: '10', - cycle: true, - increment: '4', - maxValue: '10000', - minValue: '100', - startWith: '100', - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";', - 'ALTER SEQUENCE "public"."my_seq2" INCREMENT BY 4 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -// identity push tests -test('create table: identity always/by default - no params', async () => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity(), - id2: smallint('id2').generatedByDefaultAsIdentity(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - { - identity: 'users_id1_seq;byDefault;1;9223372036854775807;1;1;1;false', - name: 'id1', - notNull: true, - primaryKey: false, - type: 'bigint', - }, - { - identity: 'users_id2_seq;byDefault;1;32767;1;1;1;false', - name: 'id2', - notNull: true, - primaryKey: false, - type: 'smallint', - }, - ], - compositePKs: [], - compositePkName: '', - schema: '', - tableName: 'users', - policies: [], - type: 'create_table', - uniqueConstraints: [], - isRLSEnabled: false, - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create table: identity always/by default - few params', async () => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ increment: 4 }), - id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ - startWith: 120, - maxValue: 17000, - }), - id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'users_id_seq;byDefault;1;2147483647;4;1;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - { - identity: 'users_id1_seq;byDefault;1;17000;1;120;1;false', - name: 'id1', - notNull: true, - primaryKey: false, - type: 'bigint', - }, - { - identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', - name: 'id2', - notNull: true, - primaryKey: false, - type: 'smallint', - }, - ], - compositePKs: [], - compositePkName: '', - policies: [], - schema: '', - tableName: 'users', - type: 'create_table', - isRLSEnabled: false, - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create table: identity always/by default - all params', async () => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - increment: 4, - minValue: 100, - }), - id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ - startWith: 120, - maxValue: 17000, - increment: 3, - cycle: true, - cache: 100, - }), - id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columns: [ - { - identity: 'users_id_seq;byDefault;100;2147483647;4;100;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - { - identity: 'users_id1_seq;byDefault;1;17000;3;120;100;true', - name: 'id1', - notNull: true, - primaryKey: false, - type: 'bigint', - }, - { - identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', - name: 'id2', - notNull: true, - primaryKey: false, - type: 'smallint', - }, - ], - compositePKs: [], - compositePkName: '', - schema: '', - tableName: 'users', - type: 'create_table', - policies: [], - isRLSEnabled: false, - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 2147483647 START WITH 100 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 3 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 100 CYCLE),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('no diff: identity always/by default - no params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - id2: integer('id2').generatedAlwaysAsIdentity(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - id2: integer('id2').generatedAlwaysAsIdentity(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('no diff: identity always/by default - few params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_name', - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - increment: 1, - startWith: 3, - }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_name', - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - increment: 1, - startWith: 3, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('no diff: identity always/by default - all params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_name', - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - name: 'custom_name', - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('drop identity from a column - no params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([`ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop identity from a column - few params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), - id1: integer('id1').generatedByDefaultAsIdentity({ - name: 'custom_name1', - increment: 4, - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - name: 'custom_name2', - increment: 4, - }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id1: integer('id1'), - id2: integer('id2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - { - columnName: 'id1', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - { - columnName: 'id2', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', - 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop identity from a column - all params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - id1: integer('id1').generatedByDefaultAsIdentity({ - name: 'custom_name1', - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - id2: integer('id2').generatedAlwaysAsIdentity({ - name: 'custom_name2', - startWith: 10, - minValue: 10, - maxValue: 1000, - cycle: true, - cache: 10, - increment: 2, - }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id'), - id1: integer('id1'), - id2: integer('id2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - { - columnName: 'id1', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - { - columnName: 'id2', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, - 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', - 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter identity from a column - no params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;']); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter identity from a column - few params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - increment: 4, - maxValue: 10000, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;10000;4;100;1;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter identity from a column - by default to always', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedAlwaysAsIdentity({ - startWith: 100, - increment: 4, - maxValue: 10000, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;always;1;10000;4;100;1;false', - oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter identity from a column - always to by default', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').generatedAlwaysAsIdentity({ startWith: 100 }), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ - startWith: 100, - increment: 4, - maxValue: 10000, - cycle: true, - cache: 100, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'users_id_seq;byDefault;1;10000;4;100;100;true', - oldIdentity: 'users_id_seq;always;1;2147483647;1;100;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_change_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 100;', - 'ALTER TABLE "users" ALTER COLUMN "id" SET CYCLE;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('add column with identity - few params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - email: text('email'), - }), - }; - - const schema2 = { - users: pgTable('users', { - email: text('email'), - id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), - id1: integer('id1').generatedAlwaysAsIdentity({ - name: 'custom_name1', - increment: 4, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - column: { - identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', - name: 'id', - notNull: true, - primaryKey: false, - type: 'integer', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - { - column: { - identity: 'custom_name1;always;1;2147483647;4;1;1;false', - name: 'id1', - notNull: true, - primaryKey: false, - type: 'integer', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ADD COLUMN "id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', - 'ALTER TABLE "users" ADD COLUMN "id1" integer NOT NULL GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', - ]); - - // for (const st of sqlStatements) { - // await client.query(st); - // } -}); - -test('add identity to column - few params', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id'), - id1: integer('id1'), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), - id1: integer('id1').generatedAlwaysAsIdentity({ - name: 'custom_name1', - increment: 4, - }), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - columnName: 'id', - identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_identity', - }, - { - columnName: 'id1', - identity: 'custom_name1;always;1;2147483647;4;1;1;false', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_identity', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', - 'ALTER TABLE "users" ALTER COLUMN "id1" ADD GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', - ]); - - // for (const st of sqlStatements) { - // await client.query(st); - // } -}); - -test('add array column - empty array default', async () => { - const client = new PGlite(); - - const schema1 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const schema2 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([]), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, - }, - ]); - expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\';']); -}); - -test('add array column - default', async () => { - const client = new PGlite(); - - const schema1 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - }), - }; - const schema2 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([1, 2, 3]), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - type: 'alter_table_add_column', - tableName: 'test', - schema: '', - column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, - }, - ]); - expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\';']); -}); - -test('create view', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: pgView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - definition: 'select distinct "id" from "test"', - name: 'view', - schema: 'public', - type: 'create_view', - with: undefined, - materialized: false, - tablespace: undefined, - using: undefined, - withNoData: false, - }, - ]); - expect(sqlStatements).toStrictEqual(['CREATE VIEW "public"."view" AS (select distinct "id" from "test");']); -}); - -test('add check constraint to table', async () => { - const client = new PGlite(); - - const schema1 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([1, 2, 3]), - }), - }; - const schema2 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').array().default([1, 2, 3]), - }, (table) => ({ - checkConstraint1: check('some_check1', sql`${table.values} < 100`), - checkConstraint2: check('some_check2', sql`'test' < 100`), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - type: 'create_check_constraint', - tableName: 'test', - schema: '', - data: 'some_check1;"test"."values" < 100', - }, - { - data: "some_check2;'test' < 100", - schema: '', - tableName: 'test', - type: 'create_check_constraint', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "test" ADD CONSTRAINT "some_check1" CHECK ("test"."values" < 100);', - `ALTER TABLE "test" ADD CONSTRAINT "some_check2" CHECK ('test' < 100);`, - ]); -}); - -test('create materialized view', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: pgMaterializedView('view') - .withNoData() - .using('heap') - .as((qb) => qb.selectDistinct().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - definition: 'select distinct "id" from "test"', - name: 'view', - schema: 'public', - type: 'create_view', - with: undefined, - materialized: true, - tablespace: undefined, - using: 'heap', - withNoData: true, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE MATERIALIZED VIEW "public"."view" USING "heap" AS (select distinct "id" from "test") WITH NO DATA;', - ]); -}); - -test('drop check constraint', async () => { - const client = new PGlite(); - - const schema1 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').default(1), - }, (table) => ({ - checkConstraint: check('some_check', sql`${table.values} < 100`), - })), - }; - const schema2 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').default(1), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - type: 'delete_check_constraint', - tableName: 'test', - schema: '', - constraintName: 'some_check', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "test" DROP CONSTRAINT "some_check";', - ]); -}); - -test('Column with same name as enum', async () => { - const client = new PGlite(); - const statusEnum = pgEnum('status', ['inactive', 'active', 'banned']); - - const schema1 = { - statusEnum, - table1: pgTable('table1', { - id: serial('id').primaryKey(), - }), - }; - - const schema2 = { - statusEnum, - table1: pgTable('table1', { - id: serial('id').primaryKey(), - status: statusEnum('status').default('inactive'), - }), - table2: pgTable('table2', { - id: serial('id').primaryKey(), - status: statusEnum('status').default('inactive'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - type: 'create_table', - tableName: 'table2', - schema: '', - compositePKs: [], - compositePkName: '', - isRLSEnabled: false, - policies: [], - uniqueConstraints: [], - checkConstraints: [], - columns: [ - { name: 'id', type: 'serial', primaryKey: true, notNull: true }, - { - name: 'status', - type: 'status', - typeSchema: 'public', - primaryKey: false, - notNull: false, - default: "'inactive'", - }, - ], - }, - { - type: 'alter_table_add_column', - tableName: 'table1', - schema: '', - column: { - name: 'status', - type: 'status', - typeSchema: 'public', - primaryKey: false, - notNull: false, - default: "'inactive'", - }, - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "table2" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"status" "status" DEFAULT \'inactive\'\n);\n', - 'ALTER TABLE "table1" ADD COLUMN "status" "status" DEFAULT \'inactive\';', - ]); -}); - -test('db has checks. Push with same names', async () => { - const client = new PGlite(); - - const schema1 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').default(1), - }, (table) => ({ - checkConstraint: check('some_check', sql`${table.values} < 100`), - })), - }; - const schema2 = { - test: pgTable('test', { - id: serial('id').primaryKey(), - values: integer('values').default(1), - }, (table) => ({ - checkConstraint: check('some_check', sql`some new value`), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('drop view', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - name: 'view', - schema: 'public', - type: 'drop_view', - }, - ]); - expect(sqlStatements).toStrictEqual(['DROP VIEW "public"."view";']); -}); - -test('drop materialized view', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([ - { - name: 'view', - schema: 'public', - type: 'drop_view', - materialized: true, - }, - ]); - expect(sqlStatements).toStrictEqual(['DROP MATERIALIZED VIEW "public"."view";']); -}); - -test('push view with same name', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - view: pgView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('push materialized view with same name', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); -}); - -test('add with options for materialized view', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - view: pgMaterializedView('view') - .with({ autovacuumFreezeTableAge: 1, autovacuumEnabled: false }) - .as((qb) => qb.selectDistinct().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'view', - schema: 'public', - type: 'alter_view_add_with_option', - with: { - autovacuumFreezeTableAge: 1, - autovacuumEnabled: false, - }, - materialized: true, - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."view" SET (autovacuum_enabled = false, autovacuum_freeze_table_age = 1);`, - ); -}); - -test('add with options to materialized', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), - }; - - const schema2 = { - test: table, - view: pgMaterializedView('view') - .with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }) - .as((qb) => qb.selectDistinct().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'view', - schema: 'public', - type: 'alter_view_add_with_option', - with: { - autovacuumVacuumCostDelay: 100, - vacuumTruncate: false, - }, - materialized: true, - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER MATERIALIZED VIEW "public"."view" SET (vacuum_truncate = false, autovacuum_vacuum_cost_delay = 100);`, - ); -}); - -test('add with options to materialized with existing flag', async () => { - const client = new PGlite(); - - const table = pgTable('test', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view', {}).as(sql`SELECT id FROM "test"`), - }; - - const schema2 = { - test: table, - view: pgMaterializedView('view', {}).with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }).existing(), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop mat view with data', async () => { - const client = new PGlite(); - - const table = pgTable('table', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), - }; - - const schema2 = { - test: table, - }; - - const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - matViewsToRemove, - } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { after: seedStatements }, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - materialized: true, - name: 'view', - schema: 'public', - type: 'drop_view', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe(`· You're about to delete "${chalk.underline('view')}" materialized view with 3 items`); - expect(columnsToRemove!.length).toBe(0); - expect(schemasToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(matViewsToRemove!.length).toBe(1); -}); - -test('drop mat view without data', async () => { - const client = new PGlite(); - - const table = pgTable('table', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), - }; - - const schema2 = { - test: table, - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - matViewsToRemove, - } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - materialized: true, - name: 'view', - schema: 'public', - type: 'drop_view', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); - expect(infoToPrint!.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(schemasToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(matViewsToRemove!.length).toBe(0); -}); - -test('drop view with data', async () => { - const client = new PGlite(); - - const table = pgTable('table', { - id: serial('id').primaryKey(), - }); - const schema1 = { - test: table, - view: pgView('view', {}).as(sql`SELECT * FROM ${table}`), - }; - - const schema2 = { - test: table, - }; - - const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - matViewsToRemove, - } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { after: seedStatements }, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'view', - schema: 'public', - type: 'drop_view', - }); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`DROP VIEW "public"."view";`); - expect(infoToPrint!.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(schemasToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(matViewsToRemove!.length).toBe(0); -}); - -test('enums ordering', async () => { - const enum1 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'custMgf', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ]); - const schema1 = {}; - - const schema2 = { - enum1, - }; - - const { sqlStatements: createEnum } = await diffTestSchemas(schema1, schema2, []); - - const enum2 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ - 'addedToTop', - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'custMgf', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ]); - const schema3 = { - enum2, - }; - - const { sqlStatements: addedValueSql } = await diffTestSchemas(schema2, schema3, []); - - const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ - 'addedToTop', - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'addedToMiddle', - 'custMgf', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ]); - const schema4 = { - enum3, - }; - - const client = new PGlite(); - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema3, - schema4, - [], - false, - ['public'], - undefined, - undefined, - { before: [...createEnum, ...addedValueSql], runApply: false }, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - before: 'custMgf', - name: 'enum_users_customer_and_ship_to_settings_roles', - schema: 'public', - type: 'alter_type_add_value', - value: 'addedToMiddle', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TYPE "public"."enum_users_customer_and_ship_to_settings_roles" ADD VALUE 'addedToMiddle' BEFORE 'custMgf';`, - ); -}); - -test('drop enum values', async () => { - const newSchema = pgSchema('mySchema'); - const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ - 'addedToTop', - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'addedToMiddle', - 'custMgf', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ]); - const schema1 = { - enum3, - table: pgTable('enum_table', { - id: enum3(), - }), - newSchema, - table1: newSchema.table('enum_table', { - id: enum3(), - }), - }; - - const enum4 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ - 'addedToTop', - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ]); - const schema2 = { - enum4, - table: pgTable('enum_table', { - id: enum4(), - }), - newSchema, - table1: newSchema.table('enum_table', { - id: enum4(), - }), - }; - - const client = new PGlite(); - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public', 'mySchema'], - undefined, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'enum_users_customer_and_ship_to_settings_roles', - enumSchema: 'public', - type: 'alter_type_drop_value', - newValues: [ - 'addedToTop', - 'custAll', - 'custAdmin', - 'custClerk', - 'custInvoiceManager', - 'custApprover', - 'custOrderWriter', - 'custBuyer', - ], - deletedValues: ['addedToMiddle', 'custMgf'], - columnsWithEnum: [{ - column: 'id', - tableSchema: '', - table: 'enum_table', - columnType: 'enum_users_customer_and_ship_to_settings_roles', - default: undefined, - }, { - column: 'id', - tableSchema: 'mySchema', - table: 'enum_table', - columnType: 'enum_users_customer_and_ship_to_settings_roles', - default: undefined, - }], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe( - `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, - ); - expect(sqlStatements[1]).toBe( - `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, - ); - expect(sqlStatements[2]).toBe( - `DROP TYPE "public"."enum_users_customer_and_ship_to_settings_roles";`, - ); - expect(sqlStatements[3]).toBe( - `CREATE TYPE "public"."enum_users_customer_and_ship_to_settings_roles" AS ENUM('addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer');`, - ); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, - ); -}); - -test('column is enum type with default value. shuffle enum', async () => { - const client = new PGlite(); - - const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); - - const from = { - enum1, - table: pgTable('table', { - column: enum1('column').default('value2'), - }), - }; - - const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); - const to = { - enum2, - table: pgTable('table', { - column: enum2('column').default('value2'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - from, - to, - [], - false, - ['public'], - undefined, - ); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); - expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); - expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum";`, - ); - expect(sqlStatements[5]).toBe( - `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnsWithEnum: [ - { - column: 'column', - tableSchema: '', - table: 'table', - default: "'value2'", - columnType: 'enum', - }, - ], - deletedValues: [ - 'value3', - ], - name: 'enum', - newValues: [ - 'value1', - 'value3', - 'value2', - ], - enumSchema: 'public', - type: 'alter_type_drop_value', - }); -}); - -// Policies and Roles push test -test('full policy: no changes', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('add policy', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { type: 'enable_rls', tableName: 'users', schema: '' }, - { - type: 'create_policy', - tableName: 'users', - data: { - name: 'test', - as: 'PERMISSIVE', - for: 'ALL', - to: ['public'], - on: undefined, - }, - schema: '', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop policy', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { type: 'disable_rls', tableName: 'users', schema: '' }, - { - schema: '', - tableName: 'users', - type: 'disable_rls', - }, - { - type: 'drop_policy', - tableName: 'users', - data: { - name: 'test', - as: 'PERMISSIVE', - for: 'ALL', - to: ['public'], - on: undefined, - }, - schema: '', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', - 'DROP POLICY "test" ON "users" CASCADE;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('add policy without enable rls', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - newrls: pgPolicy('newRls'), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - type: 'create_policy', - tableName: 'users', - data: { - name: 'newRls', - as: 'PERMISSIVE', - for: 'ALL', - to: ['public'], - on: undefined, - }, - schema: '', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop policy without disable rls', async () => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - oldRls: pgPolicy('oldRls'), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(statements).toStrictEqual([ - { - type: 'drop_policy', - tableName: 'users', - data: { - name: 'oldRls', - as: 'PERMISSIVE', - for: 'ALL', - to: ['public'], - on: undefined, - }, - schema: '', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "oldRls" ON "users" CASCADE;', - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -//// - -test('alter policy without recreation: changing roles', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO current_role;', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--current_role--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy without recreation: changing using', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', using: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([]); - expect(statements).toStrictEqual([]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy without recreation: changing with check', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([]); - expect(statements).toStrictEqual([]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy with recreation: changing as', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy with recreation: changing for', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', for: 'delete' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'DELETE', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy with recreation: changing both "as" and "for"', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive', for: 'insert' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'INSERT', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter policy with recreation: changing all fields', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive', to: 'current_role', withCheck: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_role;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'SELECT', - name: 'test', - to: ['public'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'ALL', - name: 'test', - to: ['current_role'], - on: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('rename policy', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('newName', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['public.users.test->public.users.newName'], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - newName: 'newName', - oldName: 'test', - schema: '', - tableName: 'users', - type: 'rename_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('rename policy in renamed table', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('newName', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [ - 'public.users->public.users2', - 'public.users2.test->public.users2.newName', - ], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" RENAME TO "users2";', - 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - fromSchema: '', - tableNameFrom: 'users', - tableNameTo: 'users2', - toSchema: '', - type: 'rename_table', - }, - { - newName: 'newName', - oldName: 'test', - schema: '', - tableName: 'users2', - type: 'rename_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create table with a policy', async (t) => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users2" (\n\t"id" integer PRIMARY KEY NOT NULL\n);\n', - 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - ], - checkConstraints: [], - compositePKs: [], - isRLSEnabled: false, - compositePkName: '', - policies: [ - 'test--PERMISSIVE--ALL--public--undefined', - ], - schema: '', - tableName: 'users2', - type: 'create_table', - uniqueConstraints: [], - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: [ - 'public', - ], - on: undefined, - }, - schema: '', - tableName: 'users2', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop table with a policy', async (t) => { - const client = new PGlite(); - - const schema1 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = {}; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users2" CASCADE;', - 'DROP TABLE "users2" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - policies: [ - 'test--PERMISSIVE--ALL--public--undefined', - ], - schema: '', - tableName: 'users2', - type: 'drop_table', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('add policy with multiple "to" roles', async (t) => { - const client = new PGlite(); - - client.query(`CREATE ROLE manager;`); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const role = pgRole('manager').existing(); - - const schema2 = { - role, - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { to: ['current_role', role] }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: undefined, - to: ['current_role', 'manager'], - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('rename policy that is linked', async (t) => { - const client = new PGlite(); - - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('newName', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['public.users.test->public.users.newName'], - false, - ['public'], - undefined, - undefined, - { before: createUsers }, - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - newName: 'newName', - oldName: 'test', - schema: '', - tableName: 'users', - type: 'rename_policy', - }, - ]); -}); - -test('alter policy that is linked', async (t) => { - const client = new PGlite(); - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }).link(users), - }; - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { before: createUsers }, - ); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO current_role;', - ]); - expect(statements).toStrictEqual([{ - newData: 'test--PERMISSIVE--ALL--current_role--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }]); -}); - -test('alter policy that is linked: withCheck', async (t) => { - const client = new PGlite(); - - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { before: createUsers }, - ); - - expect(sqlStatements).toStrictEqual([]); - expect(statements).toStrictEqual([]); -}); - -test('alter policy that is linked: using', async (t) => { - const client = new PGlite(); - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive', using: sql`true` }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive', using: sql`false` }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { before: createUsers }, - ); - - expect(sqlStatements).toStrictEqual([]); - expect(statements).toStrictEqual([]); -}); - -test('alter policy that is linked: using', async (t) => { - const client = new PGlite(); - - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); - - const schema1 = { - rls: pgPolicy('test', { for: 'insert' }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('test', { for: 'delete' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - undefined, - { before: createUsers }, - ); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'INSERT', - name: 'test', - on: undefined, - to: [ - 'public', - ], - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'DELETE', - name: 'test', - on: undefined, - to: [ - 'public', - ], - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -//// - -test('create role', async (t) => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager'), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: false, - createRole: false, - inherit: true, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create role with properties', async (t) => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager', { createDb: true, inherit: false, createRole: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: true, - createRole: true, - inherit: false, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create role with some properties', async (t) => { - const client = new PGlite(); - - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager', { createDb: true, inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: true, - createRole: false, - inherit: false, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('drop role', async (t) => { - const client = new PGlite(); - - const schema1 = { manager: pgRole('manager') }; - - const schema2 = {}; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'drop_role', - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('create and drop role', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - admin: pgRole('admin'), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager', 'admin'] } }, - ); - - expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";', 'CREATE ROLE "admin";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'drop_role', - }, - { - name: 'admin', - type: 'create_role', - values: { - createDb: false, - createRole: false, - inherit: true, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('rename role', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - admin: pgRole('admin'), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - ['manager->admin'], - false, - ['public'], - undefined, - { roles: { include: ['manager', 'admin'] } }, - ); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" RENAME TO "admin";']); - expect(statements).toStrictEqual([ - { nameFrom: 'manager', nameTo: 'admin', type: 'rename_role' }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter all role field', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createDb: true, createRole: true, inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: true, - createRole: true, - inherit: false, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter createdb in role', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createDb: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB NOCREATEROLE INHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: true, - createRole: false, - inherit: true, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter createrole in role', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createRole: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB CREATEROLE INHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: false, - createRole: true, - inherit: true, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); - -test('alter inherit in role', async (t) => { - const client = new PGlite(); - - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPush( - client, - schema1, - schema2, - [], - false, - ['public'], - undefined, - { roles: { include: ['manager'] } }, - ); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB NOCREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: false, - createRole: false, - inherit: false, - }, - }, - ]); - - for (const st of sqlStatements) { - await client.query(st); - } -}); diff --git a/drizzle-kit/tests/push/singlestore-push.test.ts b/drizzle-kit/tests/push/singlestore-push.test.ts deleted file mode 100644 index 2cc8931cab..0000000000 --- a/drizzle-kit/tests/push/singlestore-push.test.ts +++ /dev/null @@ -1,894 +0,0 @@ -import chalk from 'chalk'; -import Docker from 'dockerode'; -import { getTableConfig, index, int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; -import fs from 'fs'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { diffTestSchemasPushSingleStore } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; - -let client: Connection; -let singlestoreContainer: Docker.Container; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - singlestoreContainer = await docker.createContainer({ - Image: image, - Env: ['ROOT_PASSWORD=singlestore'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await singlestoreContainer.start(); - await new Promise((resolve) => setTimeout(resolve, 4000)); - - return `singlestore://root:singlestore@localhost:${port}/`; -} - -beforeAll(async () => { - const connectionString = process.env.MYSQL_CONNECTION_STRING ?? (await createDockerDB()); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await singlestoreContainer?.stop().catch(console.error); - throw lastError; - } - - await client.query('DROP DATABASE IF EXISTS drizzle;'); - await client.query('CREATE DATABASE drizzle;'); - await client.query('USE drizzle;'); -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await singlestoreContainer?.stop().catch(console.error); -}); - -if (!fs.existsSync('tests/push/singlestore')) { - fs.mkdirSync('tests/push/singlestore'); -} - -test('db has checks. Push with same names', async () => { - const schema1 = { - test: singlestoreTable('test', { - id: int('id').primaryKey(), - values: int('values').default(1), - }), - }; - const schema2 = { - test: singlestoreTable('test', { - id: int('id').primaryKey(), - values: int('values').default(1), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - ); - - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - - await client.query(`DROP TABLE \`test\`;`); -}); - -// TODO: Unskip this test when views are implemented -/* test.skip.skip('create view', async () => { - const table = singlestoreTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: singlestoreView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - definition: 'select `id` from `test`', - name: 'view', - type: 'singlestore_create_view', - replace: false, - sqlSecurity: 'definer', - withCheckOption: undefined, - algorithm: 'undefined', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `CREATE ALGORITHM = undefined -SQL SECURITY definer -VIEW \`view\` AS (select \`id\` from \`test\`);`, - ]); - - await client.query(`DROP TABLE \`test\`;`); -}); */ - -// TODO: Unskip this test when views are implemented -/* test.skip('drop view', async () => { - const table = singlestoreTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: singlestoreView('view').as((qb) => qb.select().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - name: 'view', - type: 'drop_view', - }, - ]); - expect(sqlStatements).toStrictEqual(['DROP VIEW `view`;']); - await client.query(`DROP TABLE \`test\`;`); - await client.query(`DROP VIEW \`view\`;`); -}); */ - -// TODO: Unskip this test when views are implemented -/* test.skip('alter view ".as"', async () => { - const table = singlestoreTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: singlestoreView('view').as((qb) => - qb - .select() - .from(table) - .where(sql`${table.id} = 1`) - ), - }; - - const schema2 = { - test: table, - view: singlestoreView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); - - await client.query(`DROP TABLE \`test\`;`); - await client.query(`DROP VIEW \`view\`;`); -}); */ - -// TODO: Unskip this test when views are implemented -/* test.skip('alter meta options with distinct in definition', async () => { - const table = singlestoreTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: singlestoreView('view') - .withCheckOption('cascaded') - .sqlSecurity('definer') - .algorithm('merge') - .as((qb) => - qb - .selectDistinct() - .from(table) - .where(sql`${table.id} = 1`) - ), - }; - - const schema2 = { - test: table, - view: singlestoreView('view') - .withCheckOption('cascaded') - .sqlSecurity('definer') - .algorithm('undefined') - .as((qb) => qb.selectDistinct().from(table)), - }; - - await expect( - diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - ), - ).rejects.toThrowError(); - - await client.query(`DROP TABLE \`test\`;`); -}); */ - -test('added column not null and without default to table with data', async (t) => { - const schema1 = { - companies: singlestoreTable('companies', { - id: int('id'), - name: text('name'), - }), - }; - - const schema2 = { - companies: singlestoreTable('companies', { - id: int('id'), - name: text('name'), - age: int('age').notNull(), - }), - }; - - const table = getTableConfig(schema1.companies); - - const seedStatements = [ - `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('drizzle');`, - `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('turso');`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - { - after: seedStatements, - }, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - schema: '', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`truncate table companies;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to add not-null ${ - chalk.underline( - 'age', - ) - } column without default value, which contains 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe('companies'); - - await client.query(`DROP TABLE \`companies\`;`); -}); - -test('added column not null and without default to table without data', async (t) => { - const schema1 = { - companies: singlestoreTable('companies', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }), - }; - - const schema2 = { - companies: singlestoreTable('companies', { - id: int('id').primaryKey(), - name: text('name').notNull(), - age: int('age').notNull(), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - schema: '', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, - ); - - expect(infoToPrint!.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - - await client.query(`DROP TABLE \`companies\`;`); -}); - -test('drop not null, add not null', async (t) => { - const schema1 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }), - posts: singlestoreTable( - 'posts', - { - id: int('id').primaryKey(), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const schema2 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name'), - }), - posts: singlestoreTable( - 'posts', - { - id: int('id').primaryKey(), - name: text('name').notNull(), - userId: int('user_id'), - }, - ), - }; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - ); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'int', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: undefined, - name: 'user_id', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'int', - }, - ], - compositePKs: [ - 'posts_id;id', - ], - tableName: 'posts', - type: 'singlestore_recreate_table', - uniqueConstraints: [], - }); - expect(statements![1]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'int', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [ - 'users_id;id', - ], - tableName: 'users', - type: 'singlestore_recreate_table', - uniqueConstraints: [], - }); - expect(sqlStatements!.length).toBe(8); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_posts\` ( -\t\`id\` int NOT NULL, -\t\`name\` text NOT NULL, -\t\`user_id\` int, -\tCONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) -);\n`); - expect(sqlStatements![1]).toBe( - `INSERT INTO \`__new_posts\`(\`id\`, \`name\`, \`user_id\`) SELECT \`id\`, \`name\`, \`user_id\` FROM \`posts\`;`, - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`posts\`;`); - expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`); - expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` int NOT NULL, -\t\`name\` text, -\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) -);\n`); - expect(sqlStatements![5]).toBe( - `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, - ); - expect(sqlStatements![6]).toBe( - `DROP TABLE \`users\`;`, - ); - expect(sqlStatements![7]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - - await client.query(`DROP TABLE \`users\`;`); - await client.query(`DROP TABLE \`posts\`;`); -}); - -test('drop table with data', async (t) => { - const schema1 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }), - posts: singlestoreTable( - 'posts', - { - id: int('id').primaryKey(), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const schema2 = { - posts: singlestoreTable( - 'posts', - { - id: int('id').primaryKey(), - name: text('name'), - userId: int('user_id'), - }, - ), - }; - - const seedStatements = [ - `INSERT INTO \`users\` (\`id\`, \`name\`) VALUES (1, 'drizzle')`, - ]; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - { after: seedStatements }, - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - policies: [], - schema: undefined, - tableName: 'users', - type: 'drop_table', - }); - - expect(sqlStatements!.length).toBe(1); - expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(1); - expect(tablesToRemove![0]).toBe('users'); - expect(tablesToTruncate!.length).toBe(0); - - await client.query(`DROP TABLE \`users\`;`); - await client.query(`DROP TABLE \`posts\`;`); -}); - -test('change data type. db has indexes. table does not have values', async (t) => { - const schema1 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: int('name').notNull(), - }, (table) => [index('index').on(table.name)]), - }; - - const schema2 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }, (table) => [index('index').on(table.name)]), - }; - - const seedStatements = [`INSERT INTO users VALUES (1, 12)`]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - ); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'int', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [ - 'users_id;id', - ], - tableName: 'users', - type: 'singlestore_recreate_table', - uniqueConstraints: [], - }); - expect(statements![1]).toStrictEqual({ - data: 'index;name;false;;;', - internal: undefined, - schema: '', - tableName: 'users', - type: 'create_index', - }); - - expect(sqlStatements!.length).toBe(5); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` int NOT NULL, -\t\`name\` text NOT NULL, -\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) -);\n`); - expect(sqlStatements![1]).toBe( - `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements![4]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - - await client.query(`DROP TABLE \`users\`;`); -}); - -test('change data type. db has indexes. table has values', async (t) => { - const schema1 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: int('name'), - }, (table) => [index('index').on(table.name)]), - }; - - const schema2 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name'), - }, (table) => [index('index').on(table.name)]), - }; - - const seedStatements = [`INSERT INTO users VALUES (1, 12);`, `INSERT INTO users (id) VALUES (2);`]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - { after: seedStatements }, - ); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - onUpdate: undefined, - primaryKey: false, - type: 'int', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [ - 'users_id;id', - ], - tableName: 'users', - type: 'singlestore_recreate_table', - uniqueConstraints: [], - }); - expect(statements![1]).toStrictEqual({ - data: 'index;name;false;;;', - internal: undefined, - schema: '', - tableName: 'users', - type: 'create_index', - }); - - expect(sqlStatements!.length).toBe(6); - expect(sqlStatements![0]).toBe(`TRUNCATE TABLE \`users\`;`); - expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` int NOT NULL, -\t\`name\` text, -\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) -);\n`); - expect(sqlStatements![2]).toBe( - `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, - ); - expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements![5]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about recreate ${chalk.underline('users')} table with data type changing for ${ - chalk.underline('name') - } column, which contains 1 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe(`users`); - - await client.query(`DROP TABLE \`users\`;`); -}); - -test('add column. add default to column without not null', async (t) => { - const schema1 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name'), - }), - }; - - const schema2 = { - users: singlestoreTable('users', { - id: int('id').primaryKey(), - name: text('name').default('drizzle'), - age: int('age'), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSingleStore( - client, - schema1, - schema2, - [], - 'drizzle', - false, - undefined, - ); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - columnAutoIncrement: false, - columnName: 'name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - newDefaultValue: "'drizzle'", - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_default', - }); - expect(statements![1]).toStrictEqual({ - type: 'alter_table_add_column', - tableName: 'users', - schema: '', - column: { - notNull: false, - primaryKey: false, - autoincrement: false, - name: 'age', - type: 'int', - }, - }); - expect(sqlStatements!.length).toBe(2); - expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` MODIFY COLUMN \`name\` text DEFAULT 'drizzle';`); - expect(sqlStatements![1]).toBe(`ALTER TABLE \`users\` ADD \`age\` int;`); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - - await client.query(`DROP TABLE \`users\`;`); -}); diff --git a/drizzle-kit/tests/push/singlestore.test.ts b/drizzle-kit/tests/push/singlestore.test.ts deleted file mode 100644 index 0fe04f03b7..0000000000 --- a/drizzle-kit/tests/push/singlestore.test.ts +++ /dev/null @@ -1,444 +0,0 @@ -import Docker from 'dockerode'; -import { SQL, sql } from 'drizzle-orm'; -import { - bigint, - binary, - char, - date, - decimal, - double, - float, - int, - mediumint, - primaryKey, - singlestoreEnum, - singlestoreTable, - smallint, - text, - time, - timestamp, - tinyint, - varbinary, - varchar, - vector, - year, -} from 'drizzle-orm/singlestore-core'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { diffTestSchemasPushSingleStore, diffTestSchemasSingleStore } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; -import { expect } from 'vitest'; -import { DialectSuite, run } from './common'; - -async function createDockerDB(context: any): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - context.singlestoreContainer = await docker.createContainer({ - Image: image, - Env: ['ROOT_PASSWORD=singlestore'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await context.singlestoreContainer.start(); - await new Promise((resolve) => setTimeout(resolve, 4000)); - - return `singlestore://root:singlestore@localhost:${port}/`; -} - -const singlestoreSuite: DialectSuite = { - allTypes: async function(context: any): Promise { - const schema1 = { - allBigInts: singlestoreTable('all_big_ints', { - simple: bigint('simple', { mode: 'number' }), - columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), - columnDefault: bigint('column_default', { mode: 'number' }).default(12), - columnDefaultSql: bigint('column_default_sql', { - mode: 'number', - }).default(12), - }), - allBools: singlestoreTable('all_bools', { - simple: tinyint('simple'), - columnNotNull: tinyint('column_not_null').notNull(), - columnDefault: tinyint('column_default').default(1), - }), - allChars: singlestoreTable('all_chars', { - simple: char('simple', { length: 1 }), - columnNotNull: char('column_not_null', { length: 45 }).notNull(), - // columnDefault: char("column_default", { length: 1 }).default("h"), - columnDefaultSql: char('column_default_sql', { length: 1 }).default( - 'h', - ), - }), - // allDateTimes: singlestoreTable("all_date_times", { - // simple: datetime("simple", { mode: "string", fsp: 1 }), - // columnNotNull: datetime("column_not_null", { - // mode: "string", - // }).notNull(), - // columnDefault: datetime("column_default", { mode: "string" }).default( - // "2023-03-01 14:05:29" - // ), - // }), - allDates: singlestoreTable('all_dates', { - simple: date('simple', { mode: 'string' }), - column_not_null: date('column_not_null', { mode: 'string' }).notNull(), - column_default: date('column_default', { mode: 'string' }).default( - '2023-03-01', - ), - }), - allDecimals: singlestoreTable('all_decimals', { - simple: decimal('simple', { precision: 1, scale: 0 }), - columnNotNull: decimal('column_not_null', { - precision: 45, - scale: 3, - }).notNull(), - columnDefault: decimal('column_default', { - precision: 10, - scale: 0, - }).default('100'), - columnDefaultSql: decimal('column_default_sql', { - precision: 10, - scale: 0, - }).default('101'), - }), - - allDoubles: singlestoreTable('all_doubles', { - simple: double('simple'), - columnNotNull: double('column_not_null').notNull(), - columnDefault: double('column_default').default(100), - columnDefaultSql: double('column_default_sql').default(101), - }), - - allEnums: singlestoreTable('all_enums', { - simple: singlestoreEnum('simple', ['hi', 'hello']), - }), - - allEnums1: singlestoreTable('all_enums1', { - simple: singlestoreEnum('simple', ['hi', 'hello']).default('hi'), - }), - - allFloats: singlestoreTable('all_floats', { - columnNotNull: float('column_not_null').notNull(), - columnDefault: float('column_default').default(100), - columnDefaultSql: float('column_default_sql').default(101), - }), - - allInts: singlestoreTable('all_ints', { - simple: int('simple'), - columnNotNull: int('column_not_null').notNull(), - columnDefault: int('column_default').default(100), - columnDefaultSql: int('column_default_sql').default(101), - }), - - allIntsRef: singlestoreTable('all_ints_ref', { - simple: int('simple'), - columnNotNull: int('column_not_null').notNull(), - columnDefault: int('column_default').default(100), - columnDefaultSql: int('column_default_sql').default(101), - }), - - // allJsons: singlestoreTable("all_jsons", { - // columnDefaultObject: json("column_default_object") - // .default({ hello: "world world" }) - // .notNull(), - // columnDefaultArray: json("column_default_array").default({ - // hello: { "world world": ["foo", "bar"] }, - // foo: "bar", - // fe: 23, - // }), - // column: json("column"), - // }), - - allMInts: singlestoreTable('all_m_ints', { - simple: mediumint('simple'), - columnNotNull: mediumint('column_not_null').notNull(), - columnDefault: mediumint('column_default').default(100), - columnDefaultSql: mediumint('column_default_sql').default(101), - }), - - allReals: singlestoreTable('all_reals', { - simple: double('simple', { precision: 5, scale: 2 }), - columnNotNull: double('column_not_null').notNull(), - columnDefault: double('column_default').default(100), - columnDefaultSql: double('column_default_sql').default(101), - }), - - allSInts: singlestoreTable('all_s_ints', { - simple: smallint('simple'), - columnNotNull: smallint('column_not_null').notNull(), - columnDefault: smallint('column_default').default(100), - columnDefaultSql: smallint('column_default_sql').default(101), - }), - - // allSmallSerials: singlestoreTable("all_small_serials", { - // columnAll: serial("column_all").notNull(), - // }), - - allTInts: singlestoreTable('all_t_ints', { - simple: tinyint('simple'), - columnNotNull: tinyint('column_not_null').notNull(), - columnDefault: tinyint('column_default').default(10), - columnDefaultSql: tinyint('column_default_sql').default(11), - }), - - allTexts: singlestoreTable('all_texts', { - simple: text('simple'), - columnNotNull: text('column_not_null').notNull(), - columnDefault: text('column_default').default('hello'), - columnDefaultSql: text('column_default_sql').default('hello'), - }), - - allTimes: singlestoreTable('all_times', { - // simple: time("simple", { fsp: 1 }), - columnNotNull: time('column_not_null').notNull(), - columnDefault: time('column_default').default('22:12:12'), - }), - - allTimestamps: singlestoreTable('all_timestamps', { - // columnDateNow: timestamp("column_date_now", { - // fsp: 1, - // mode: "string", - // }).default(sql`(now())`), - columnAll: timestamp('column_all', { mode: 'string' }) - .default('2023-03-01 14:05:29') - .notNull(), - column: timestamp('column', { mode: 'string' }).default( - '2023-02-28 16:18:31', - ), - }), - - allVarChars: singlestoreTable('all_var_chars', { - simple: varchar('simple', { length: 100 }), - columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), - columnDefault: varchar('column_default', { length: 100 }).default( - 'hello', - ), - columnDefaultSql: varchar('column_default_sql', { - length: 100, - }).default('hello'), - }), - - allVarbinaries: singlestoreTable('all_varbinaries', { - simple: varbinary('simple', { length: 100 }), - columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), - columnDefault: varbinary('column_default', { length: 12 }), - }), - - allYears: singlestoreTable('all_years', { - simple: year('simple'), - columnNotNull: year('column_not_null').notNull(), - columnDefault: year('column_default').default(2022), - }), - - binafry: singlestoreTable('binary', { - simple: binary('simple', { length: 1 }), - columnNotNull: binary('column_not_null', { length: 1 }).notNull(), - columnDefault: binary('column_default', { length: 12 }), - }), - - allVectors: singlestoreTable('all_vectors', { - vectorSimple: vector('vector_simple', { dimensions: 1 }), - vectorElementType: vector('vector_element_type', { dimensions: 1, elementType: 'I8' }), - vectorNotNull: vector('vector_not_null', { dimensions: 1 }).notNull(), - vectorDefault: vector('vector_default', { dimensions: 1 }).default([1]), - }), - }; - - const { statements } = await diffTestSchemasPushSingleStore( - context.client as Connection, - schema1, - schema1, - [], - 'drizzle', - false, - ); - console.log(statements); - expect(statements.length).toBe(0); - expect(statements).toEqual([]); - - const { sqlStatements: dropStatements } = await diffTestSchemasSingleStore( - schema1, - {}, - [], - false, - ); - - for (const st of dropStatements) { - await context.client.query(st); - } - }, - addBasicIndexes: function(context?: any): Promise { - return {} as any; - }, - changeIndexFields: function(context?: any): Promise { - return {} as any; - }, - dropIndex: function(context?: any): Promise { - return {} as any; - }, - indexesToBeNotTriggered: function(context?: any): Promise { - return {} as any; - }, - indexesTestCase1: function(context?: any): Promise { - return {} as any; - }, - async case1() { - // TODO: implement if needed - expect(true).toBe(true); - }, - addNotNull: function(context?: any): Promise { - return {} as any; - }, - addNotNullWithDataNoRollback: function(context?: any): Promise { - return {} as any; - }, - addBasicSequences: function(context?: any): Promise { - return {} as any; - }, - addGeneratedColumn: async function(context: any): Promise { - return {} as any; - }, - addGeneratedToColumn: async function(context: any): Promise { - return {} as any; - }, - dropGeneratedConstraint: async function(context: any): Promise { - return {} as any; - }, - alterGeneratedConstraint: async function(context: any): Promise { - return {} as any; - }, - createTableWithGeneratedConstraint: function(context?: any): Promise { - return {} as any; - }, - createCompositePrimaryKey: async function(context: any): Promise { - const schema1 = {}; - - const schema2 = { - table: singlestoreTable('table', { - col1: int('col1').notNull(), - col2: int('col2').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.col1, t.col2], - }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( - context.client as Connection, - schema1, - schema2, - [], - 'drizzle', - false, - ); - - expect(statements).toStrictEqual([ - { - type: 'create_table', - tableName: 'table', - schema: undefined, - internals: { - indexes: {}, - tables: {}, - }, - compositePKs: ['table_col1_col2_pk;col1,col2'], - compositePkName: 'table_col1_col2_pk', - uniqueConstraints: [], - columns: [ - { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, - { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, - ], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', - ]); - }, - renameTableWithCompositePrimaryKey: async function(context?: any): Promise { - const productsCategoriesTable = (tableName: string) => { - return singlestoreTable(tableName, { - productId: varchar('product_id', { length: 10 }).notNull(), - categoryId: varchar('category_id', { length: 10 }).notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.productId, t.categoryId], - }), - })); - }; - - const schema1 = { - table: productsCategoriesTable('products_categories'), - }; - const schema2 = { - test: productsCategoriesTable('products_to_categories'), - }; - - const { sqlStatements } = await diffTestSchemasPushSingleStore( - context.client as Connection, - schema1, - schema2, - ['public.products_categories->public.products_to_categories'], - 'drizzle', - false, - ); - - // It's not possible to create/alter/drop primary keys in SingleStore - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', - ]); - - await context.client.query(`DROP TABLE \`products_categories\``); - }, -}; - -run( - singlestoreSuite, - async (context: any) => { - const connectionString = process.env.SINGLESTORE_CONNECTION_STRING - ?? (await createDockerDB(context)); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - context.client = await createConnection(connectionString); - await context.client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to SingleStore'); - await context.client?.end().catch(console.error); - await context.singlestoreContainer?.stop().catch(console.error); - throw lastError; - } - - await context.client.query(`DROP DATABASE IF EXISTS \`drizzle\`;`); - await context.client.query('CREATE DATABASE drizzle;'); - await context.client.query('USE drizzle;'); - }, - async (context: any) => { - await context.client?.end().catch(console.error); - await context.singlestoreContainer?.stop().catch(console.error); - }, -); diff --git a/drizzle-kit/tests/push/sqlite.test.ts b/drizzle-kit/tests/push/sqlite.test.ts deleted file mode 100644 index e2c85233a3..0000000000 --- a/drizzle-kit/tests/push/sqlite.test.ts +++ /dev/null @@ -1,1613 +0,0 @@ -import Database from 'better-sqlite3'; -import chalk from 'chalk'; -import { sql } from 'drizzle-orm'; -import { - blob, - check, - foreignKey, - getTableConfig, - int, - integer, - numeric, - primaryKey, - real, - sqliteTable, - sqliteView, - text, - uniqueIndex, -} from 'drizzle-orm/sqlite-core'; -import { diffTestSchemasPushSqlite, introspectSQLiteToFile } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -test('nothing changed in schema', async (t) => { - const client = new Database(':memory:'); - - const users = sqliteTable('users', { - id: integer('id').primaryKey().notNull(), - name: text('name').notNull(), - email: text('email'), - textJson: text('text_json', { mode: 'json' }), - blobJon: blob('blob_json', { mode: 'json' }), - blobBigInt: blob('blob_bigint', { mode: 'bigint' }), - numeric: numeric('numeric'), - createdAt: integer('created_at', { mode: 'timestamp' }), - createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), - real: real('real'), - text: text('text', { length: 255 }), - role: text('role', { enum: ['admin', 'user'] }).default('user'), - isConfirmed: integer('is_confirmed', { - mode: 'boolean', - }), - }); - - const schema1 = { - users, - - customers: sqliteTable('customers', { - id: integer('id').primaryKey(), - address: text('address').notNull(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id') - .references(() => users.id) - .notNull(), - }), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema1, [], false); - - expect(sqlStatements.length).toBe(0); - expect(statements.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); -}); - -test('dropped, added unique index', async (t) => { - const client = new Database(':memory:'); - - const users = sqliteTable('users', { - id: integer('id').primaryKey().notNull(), - name: text('name').notNull(), - email: text('email'), - textJson: text('text_json', { mode: 'json' }), - blobJon: blob('blob_json', { mode: 'json' }), - blobBigInt: blob('blob_bigint', { mode: 'bigint' }), - numeric: numeric('numeric'), - createdAt: integer('created_at', { mode: 'timestamp' }), - createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), - real: real('real'), - text: text('text', { length: 255 }), - role: text('role', { enum: ['admin', 'user'] }).default('user'), - isConfirmed: integer('is_confirmed', { - mode: 'boolean', - }), - }); - - const schema1 = { - users, - - customers: sqliteTable( - 'customers', - { - id: integer('id').primaryKey(), - address: text('address').notNull().unique(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id').notNull(), - }, - (table) => ({ - uniqueIndex: uniqueIndex('customers_address_unique').on(table.address), - }), - ), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const schema2 = { - users, - - customers: sqliteTable( - 'customers', - { - id: integer('id').primaryKey(), - address: text('address').notNull(), - isConfirmed: integer('is_confirmed', { mode: 'boolean' }), - registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) - .notNull() - .$defaultFn(() => new Date()), - userId: integer('user_id').notNull(), - }, - (table) => ({ - uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on( - table.isConfirmed, - ), - }), - ), - - posts: sqliteTable('posts', { - id: integer('id').primaryKey(), - content: text('content'), - authorId: integer('author_id'), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema2, [], false); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'drop_index', - tableName: 'customers', - data: 'customers_address_unique;address;true;', - schema: '', - }); - expect(statements[1]).toStrictEqual({ - type: 'create_index', - tableName: 'customers', - data: 'customers_is_confirmed_unique;is_confirmed;true;', - schema: '', - internal: { - indexes: {}, - }, - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `DROP INDEX \`customers_address_unique\`;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('added column not null and without default to table with data', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - age: integer('age').notNull(), - }), - }; - - const table = getTableConfig(schema1.companies); - const seedStatements = [ - `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`, - `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'integer', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - referenceData: undefined, - }); - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`delete from companies;`); - expect(sqlStatements[1]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to add not-null ${ - chalk.underline( - 'age', - ) - } column without default value, which contains 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe('companies'); -}); - -test('added column not null and without default to table without data', async (t) => { - const turso = new Database(':memory:'); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey(), - name: text('name').notNull(), - age: integer('age').notNull(), - }), - }; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(turso, schema1, schema2, [], false); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'companies', - column: { - name: 'age', - type: 'integer', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - referenceData: undefined, - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, - ); - - expect(infoToPrint!.length).toBe(0); - expect(columnsToRemove!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop autoincrement. drop column with data', async (t) => { - const turso = new Database(':memory:'); - - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: false }), - }), - }; - - const table = getTableConfig(schema1.companies); - const seedStatements = [ - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, - ]; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - turso, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - tableName: 'companies', - columns: [ - { - name: 'id', - type: 'integer', - autoincrement: false, - notNull: true, - primaryKey: true, - generated: undefined, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `CREATE TABLE \`__new_companies\` ( -\t\`id\` integer PRIMARY KEY NOT NULL -);\n`, - ); - expect(sqlStatements[1]).toBe( - `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, - ); - expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, - ); - - expect(columnsToRemove!.length).toBe(1); - expect(columnsToRemove![0]).toBe('name'); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to delete ${ - chalk.underline( - 'name', - ) - } column in companies table with 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop autoincrement. drop column with data with pragma off', async (t) => { - const client = new Database(':memory:'); - - client.exec('PRAGMA foreign_keys=OFF;'); - - const users = sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - }); - const schema1 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name'), - user_id: integer('user_id').references(() => users.id), - }), - }; - - const schema2 = { - companies: sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: false }), - user_id: integer('user_id').references(() => users.id), - }), - }; - - const table = getTableConfig(schema1.companies); - const seedStatements = [ - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, - `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, - ]; - - const { - sqlStatements, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - tableName: 'companies', - columns: [ - { - name: 'id', - type: 'integer', - autoincrement: false, - notNull: true, - primaryKey: true, - generated: undefined, - }, - { - name: 'user_id', - type: 'integer', - autoincrement: false, - notNull: false, - primaryKey: false, - generated: undefined, - }, - ], - compositePKs: [], - referenceData: [ - { - columnsFrom: [ - 'user_id', - ], - columnsTo: [ - 'id', - ], - name: '', - onDelete: 'no action', - onUpdate: 'no action', - tableFrom: 'companies', - tableTo: 'users', - }, - ], - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( - `CREATE TABLE \`__new_companies\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`user_id\` integer, -\tFOREIGN KEY (\`user_id\`) REFERENCES \`users\`(\`id\`) ON UPDATE no action ON DELETE no action -);\n`, - ); - expect(sqlStatements[1]).toBe( - `INSERT INTO \`__new_companies\`("id", "user_id") SELECT "id", "user_id" FROM \`companies\`;`, - ); - expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, - ); - - expect(columnsToRemove!.length).toBe(1); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to delete ${ - chalk.underline( - 'name', - ) - } column in companies table with 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('change autoincrement. other table references current', async (t) => { - const client = new Database(':memory:'); - - const companies1 = sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: true }), - }); - const users1 = sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name').unique(), - companyId: text('company_id').references(() => companies1.id), - }); - const schema1 = { - companies: companies1, - users: users1, - }; - - const companies2 = sqliteTable('companies', { - id: integer('id').primaryKey({ autoIncrement: false }), - }); - const users2 = sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name').unique(), - companyId: text('company_id').references(() => companies1.id), - }); - const schema2 = { - companies: companies2, - users: users2, - }; - - const { name: usersTableName } = getTableConfig(users1); - const { name: companiesTableName } = getTableConfig(companies1); - const seedStatements = [ - `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`, - `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`, - `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('1');`, - `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('2');`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - tableName: 'companies', - columns: [ - { - name: 'id', - type: 'integer', - autoincrement: false, - notNull: true, - primaryKey: true, - generated: undefined, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe( - `CREATE TABLE \`__new_companies\` ( -\t\`id\` integer PRIMARY KEY NOT NULL -);\n`, - ); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`companies\`;`); - expect(sqlStatements[4]).toBe( - `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, - ); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('create table with custom name references', async (t) => { - const client = new Database(':memory:'); - - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }); - - const schema1 = { - users, - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }, - (t) => ({ - fk: foreignKey({ - columns: [t.id], - foreignColumns: [users.id], - name: 'custom_name_fk', - }), - }), - ), - }; - - const schema2 = { - users, - posts: sqliteTable( - 'posts', - { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }, - (t) => ({ - fk: foreignKey({ - columns: [t.id], - foreignColumns: [users.id], - name: 'custom_name_fk', - }), - }), - ), - }; - - const { sqlStatements } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(sqlStatements!.length).toBe(0); -}); - -test('drop not null, add not null', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - posts: sqliteTable('posts', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - userId: int('user_id'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - posts: sqliteTable('posts', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - userId: int('user_id'), - }), - }; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema2, []); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - checkConstraints: [], - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - }); - expect(statements![1]).toStrictEqual({ - checkConstraints: [], - columns: [ - { - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: undefined, - name: 'user_id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'posts', - type: 'recreate_table', - uniqueConstraints: [], - }); - - expect(sqlStatements.length).toBe(8); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`name\` text -);\n`); - expect(sqlStatements[1]).toBe( - `INSERT INTO \`__new_users\`("id", "name") SELECT "id", "name" FROM \`users\`;`, - ); - expect(sqlStatements[2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_posts\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`name\` text NOT NULL, -\t\`user_id\` integer -);\n`); - expect(sqlStatements![5]).toBe( - `INSERT INTO \`__new_posts\`("id", "name", "user_id") SELECT "id", "name", "user_id" FROM \`posts\`;`, - ); - expect(sqlStatements![6]).toBe(`DROP TABLE \`posts\`;`); - expect(sqlStatements![7]).toBe( - `ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('rename table and change data type', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('old_users', { - id: int('id').primaryKey({ autoIncrement: true }), - age: text('age'), - }), - }; - - const schema2 = { - users: sqliteTable('new_users', { - id: int('id').primaryKey({ autoIncrement: true }), - age: integer('age'), - }), - }; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ - 'public.old_users->public.new_users', - ]); - - expect(statements!.length).toBe(2); - expect(statements![0]).toStrictEqual({ - fromSchema: undefined, - tableNameFrom: 'old_users', - tableNameTo: 'new_users', - toSchema: undefined, - type: 'rename_table', - }); - expect(statements![1]).toStrictEqual({ - columns: [ - { - autoincrement: true, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'new_users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(5); - expect(sqlStatements![0]).toBe( - `ALTER TABLE \`old_users\` RENAME TO \`new_users\`;`, - ); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`age\` integer -);\n`); - expect(sqlStatements![2]).toBe( - `INSERT INTO \`__new_new_users\`("id", "age") SELECT "id", "age" FROM \`new_users\`;`, - ); - expect(sqlStatements![3]).toBe(`DROP TABLE \`new_users\`;`); - expect(sqlStatements![4]).toBe( - `ALTER TABLE \`__new_new_users\` RENAME TO \`new_users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('rename column and change data type', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - age: integer('age'), - }), - }; - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ - 'public.users.name->public.users.age', - ]); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: true, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, -\t\`age\` integer -);\n`); - expect(sqlStatements![1]).toBe( - `INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`, - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('recreate table with nested references', async (t) => { - const client = new Database(':memory:'); - - let users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }); - let subscriptions = sqliteTable('subscriptions', { - id: int('id').primaryKey({ autoIncrement: true }), - userId: integer('user_id').references(() => users.id), - customerId: text('customer_id'), - }); - const schema1 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }); - const schema2 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references( - () => subscriptions.id, - ), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ - 'public.users.name->public.users.age', - ]); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements![2]).toBe( - `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, - ); - expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![4]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - expect(sqlStatements[5]).toBe('PRAGMA foreign_keys=ON;'); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('recreate table with added column not null and without default with data', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - newColumn: text('new_column').notNull(), - }), - }; - - const seedStatements = [ - `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`, - `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`, - ]; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - false, - seedStatements, - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'new_column', - notNull: true, - generated: undefined, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;'); - expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer, -\t\`new_column\` text NOT NULL -);\n`); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(1); - expect(infoToPrint![0]).toBe( - `· You're about to add not-null ${ - chalk.underline('new_column') - } column without default value to table, which contains 2 items`, - ); - expect(shouldAskForApprove).toBe(true); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(1); - expect(tablesToTruncate![0]).toBe('users'); -}); - -test('add check constraint to table', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: ['some_check;"users"."age" > 21'], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer, -\tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[1]).toBe( - 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('drop check constraint', async (t) => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements!.length).toBe(1); - expect(statements![0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - name: 'id', - notNull: true, - generated: undefined, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'age', - notNull: false, - generated: undefined, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements!.length).toBe(4); - expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements[1]).toBe( - 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', - ); - expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements![3]).toBe( - `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, - ); - - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('db has checks. Push with same names', async () => { - const client = new Database(':memory:'); - - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`${table.age} > 21`), - })), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }, (table) => ({ - someCheck: check('some_check', sql`some new value`), - })), - }; - - const { - statements, - sqlStatements, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - false, - [], - ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); - expect(columnsToRemove!.length).toBe(0); - expect(infoToPrint!.length).toBe(0); - expect(shouldAskForApprove).toBe(false); - expect(tablesToRemove!.length).toBe(0); - expect(tablesToTruncate!.length).toBe(0); -}); - -test('create view', async () => { - const client = new Database(':memory:'); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - }; - - const schema2 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements).toStrictEqual([ - { - definition: 'select "id" from "test"', - name: 'view', - type: 'sqlite_create_view', - }, - ]); - expect(sqlStatements).toStrictEqual([ - `CREATE VIEW \`view\` AS select "id" from "test";`, - ]); -}); - -test('drop view', async () => { - const client = new Database(':memory:'); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const schema2 = { - test: table, - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements).toStrictEqual([ - { - name: 'view', - type: 'drop_view', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'DROP VIEW \`view\`;', - ]); -}); - -test('alter view ".as"', async () => { - const client = new Database(':memory:'); - - const table = sqliteTable('test', { - id: int('id').primaryKey(), - }); - - const schema1 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), - }; - - const schema2 = { - test: table, - view: sqliteView('view').as((qb) => qb.select().from(table)), - }; - - const { statements, sqlStatements } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('create composite primary key', async (t) => { - const client = new Database(':memory:'); - - const schema1 = {}; - - const schema2 = { - table: sqliteTable('table', { - col1: integer('col1').notNull(), - col2: integer('col2').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.col1, t.col2], - }), - })), - }; - - const { - statements, - sqlStatements, - } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - [], - ); - - expect(statements).toStrictEqual([{ - type: 'sqlite_create_table', - tableName: 'table', - compositePKs: [['col1', 'col2']], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - columns: [ - { name: 'col1', type: 'integer', primaryKey: false, notNull: true, autoincrement: false }, - { name: 'col2', type: 'integer', primaryKey: false, notNull: true, autoincrement: false }, - ], - }]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `table` (\n\t`col1` integer NOT NULL,\n\t`col2` integer NOT NULL,\n\tPRIMARY KEY(`col1`, `col2`)\n);\n', - ]); -}); - -test('rename table with composite primary key', async () => { - const client = new Database(':memory:'); - - const productsCategoriesTable = (tableName: string) => { - return sqliteTable(tableName, { - productId: text('product_id').notNull(), - categoryId: text('category_id').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.productId, t.categoryId], - }), - })); - }; - - const schema1 = { - table: productsCategoriesTable('products_categories'), - }; - const schema2 = { - test: productsCategoriesTable('products_to_categories'), - }; - - const { sqlStatements } = await diffTestSchemasPushSqlite( - client, - schema1, - schema2, - ['public.products_categories->public.products_to_categories'], - false, - ); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', - ]); -}); diff --git a/drizzle-kit/tests/rls/pg-policy.test.ts b/drizzle-kit/tests/rls/pg-policy.test.ts deleted file mode 100644 index 3d5dcbd140..0000000000 --- a/drizzle-kit/tests/rls/pg-policy.test.ts +++ /dev/null @@ -1,1656 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { integer, pgPolicy, pgRole, pgSchema, pgTable } from 'drizzle-orm/pg-core'; -import { diffTestSchemas } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -test('add policy + enable rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('drop policy + disable rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', - 'DROP POLICY "test" ON "users" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'disable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - ]); -}); - -test('add policy without enable rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - newrls: pgPolicy('newRls'), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'newRls', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('drop policy without disable rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - oldRls: pgPolicy('oldRls'), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "oldRls" ON "users" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'oldRls', - to: ['public'], - using: undefined, - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - ]); -}); - -test('alter policy without recreation: changing roles', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO current_role;', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--current_role--undefined--undefined--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -test('alter policy without recreation: changing using', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', using: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO public USING (true);', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--public--true--undefined--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -test('alter policy without recreation: changing with check', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO public WITH CHECK (true);', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--public--undefined--true--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -/// - -test('alter policy with recreation: changing as', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - using: undefined, - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('alter policy with recreation: changing for', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', for: 'delete' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'DELETE', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('alter policy with recreation: changing both "as" and "for"', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive', for: 'insert' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - using: undefined, - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'INSERT', - name: 'test', - to: ['public'], - using: undefined, - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('alter policy with recreation: changing all fields', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'restrictive', to: 'current_role', withCheck: sql`true` }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_role WITH CHECK (true);', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'SELECT', - name: 'test', - to: ['public'], - using: 'true', - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'RESTRICTIVE', - for: 'ALL', - name: 'test', - on: undefined, - to: ['current_role'], - using: undefined, - withCheck: 'true', - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('rename policy', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('newName', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ - 'public.users.test->public.users.newName', - ]); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - newName: 'newName', - oldName: 'test', - schema: '', - tableName: 'users', - type: 'rename_policy', - }, - ]); -}); - -test('rename policy in renamed table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('newName', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ - 'public.users->public.users2', - 'public.users2.test->public.users2.newName', - ]); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" RENAME TO "users2";', - 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - fromSchema: '', - tableNameFrom: 'users', - tableNameTo: 'users2', - toSchema: '', - type: 'rename_table', - }, - { - newName: 'newName', - oldName: 'test', - schema: '', - tableName: 'users2', - type: 'rename_policy', - }, - ]); -}); - -test('create table with a policy', async (t) => { - const schema1 = {}; - - const schema2 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE "users2" (\n\t"id" integer PRIMARY KEY NOT NULL\n);\n', - 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - ], - compositePKs: [], - checkConstraints: [], - compositePkName: '', - policies: [ - 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - ], - schema: '', - tableName: 'users2', - isRLSEnabled: false, - type: 'create_table', - uniqueConstraints: [], - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: [ - 'public', - ], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users2', - type: 'create_policy', - }, - ]); -}); - -test('drop table with a policy', async (t) => { - const schema1 = { - users: pgTable('users2', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { as: 'permissive' }), - })), - }; - - const schema2 = {}; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users2" CASCADE;', - 'DROP TABLE "users2" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - policies: [ - 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - ], - schema: '', - tableName: 'users2', - type: 'drop_table', - }, - ]); -}); - -test('add policy with multiple "to" roles', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const role = pgRole('manager').existing(); - - const schema2 = { - role, - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { to: ['current_role', role] }), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: undefined, - to: ['current_role', 'manager'], - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('create table with rls enabled', async (t) => { - const schema1 = {}; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }).enableRLS(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY NOT NULL\n); -`, - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - ]); -}); - -test('enable rls force', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }).enableRLS(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;']); -}); - -test('disable rls force', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }).enableRLS(), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;']); -}); - -test('drop policy with enabled rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { to: ['current_role', role] }), - })).enableRLS(), - }; - - const role = pgRole('manager').existing(); - - const schema2 = { - role, - users: pgTable('users', { - id: integer('id').primaryKey(), - }).enableRLS(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - ]); -}); - -test('add policy with enabled rls', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }).enableRLS(), - }; - - const role = pgRole('manager').existing(); - - const schema2 = { - role, - users: pgTable('users', { - id: integer('id').primaryKey(), - }, () => ({ - rls: pgPolicy('test', { to: ['current_role', role] }), - })).enableRLS(), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', - ]); -}); - -test('add policy + link table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('link table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - rls: pgPolicy('test', { as: 'permissive' }), - }; - - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('unlink table', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - users, - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive' }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', - 'DROP POLICY "test" ON "users" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'disable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - ]); -}); - -test('drop policy with link', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - users, - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - users, - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', - 'DROP POLICY "test" ON "users" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'disable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - ]); -}); - -test('add policy in table and with link table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - const users = pgTable('users', { - id: integer('id').primaryKey(), - }, () => [ - pgPolicy('test1', { to: 'current_user' }), - ]); - - const schema2 = { - users, - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO current_user;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test1', - to: ['current_user'], - on: undefined, - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - to: ['public'], - using: undefined, - on: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); - -test('link non-schema table', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = {}; - - const schema2 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE POLICY "test" ON "public"."users" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"public"."users"', - type: 'create_ind_policy', - }, - ]); -}); - -test('unlink non-schema table', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - rls: pgPolicy('test', { as: 'permissive' }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "public"."users" CASCADE;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"public"."users"', - type: 'drop_ind_policy', - }, - ]); -}); - -test('add policy + link non-schema table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const cities = pgTable('cities', { - id: integer('id').primaryKey(), - }); - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test2'), - ]), - rls: pgPolicy('test', { as: 'permissive' }).link(cities), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', - 'CREATE POLICY "test" ON "public"."cities" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test2', - on: undefined, - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."cities"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"public"."cities"', - type: 'create_ind_policy', - }, - ]); -}); - -test('add policy + link non-schema table from auth schema', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }), - }; - - const authSchema = pgSchema('auth'); - - const cities = authSchema.table('cities', { - id: integer('id').primaryKey(), - }); - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test2'), - ]), - rls: pgPolicy('test', { as: 'permissive' }).link(cities), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', - 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', - 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', - ]); - expect(statements).toStrictEqual([ - { - schema: '', - tableName: 'users', - type: 'enable_rls', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test2', - on: undefined, - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"auth"."cities"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"auth"."cities"', - type: 'create_ind_policy', - }, - ]); -}); - -test('rename policy that is linked', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - rls: pgPolicy('newName', { as: 'permissive' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ - '"public"."users".test->"public"."users".newName', - ]); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "public"."users" RENAME TO "newName";', - ]); - expect(statements).toStrictEqual([ - { - newName: 'newName', - oldName: 'test', - tableKey: '"public"."users"', - type: 'rename_ind_policy', - }, - ]); -}); - -test('alter policy that is linked', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive' }).link(users), - }; - - const schema2 = { - rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "public"."users" TO current_role;', - ]); - expect(statements).toStrictEqual([ - { - newData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'current_role', - ], - using: undefined, - withCheck: undefined, - }, - oldData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - type: 'alter_ind_policy', - }, - ]); -}); - -test('alter policy that is linked: withCheck', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), - }; - - const schema2 = { - rls: pgPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "public"."users" TO public WITH CHECK (false);', - ]); - expect(statements).toStrictEqual([ - { - newData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: 'false', - }, - oldData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: 'true', - }, - type: 'alter_ind_policy', - }, - ]); -}); - -test('alter policy that is linked: using', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { as: 'permissive', using: sql`true` }).link(users), - }; - - const schema2 = { - rls: pgPolicy('test', { as: 'permissive', using: sql`false` }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "public"."users" TO public USING (false);', - ]); - expect(statements).toStrictEqual([ - { - newData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: 'false', - withCheck: undefined, - }, - oldData: { - as: 'PERMISSIVE', - for: 'ALL', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: 'true', - withCheck: undefined, - }, - type: 'alter_ind_policy', - }, - ]); -}); - -test('alter policy that is linked: using', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - rls: pgPolicy('test', { for: 'insert' }).link(users), - }; - - const schema2 = { - rls: pgPolicy('test', { for: 'delete' }).link(users), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "public"."users" CASCADE;', - 'CREATE POLICY "test" ON "public"."users" AS PERMISSIVE FOR DELETE TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'INSERT', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"public"."users"', - type: 'drop_ind_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'DELETE', - name: 'test', - on: '"public"."users"', - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - tableName: '"public"."users"', - type: 'create_ind_policy', - }, - ]); -}); - -//// - -test('alter policy in the table', async (t) => { - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive' }), - ]), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive', to: 'current_role' }), - ]), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO current_role;', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--current_role--undefined--undefined--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -test('alter policy in the table: withCheck', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), - ]), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive', withCheck: sql`false` }), - ]), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--public--undefined--false--undefined', - oldData: 'test--PERMISSIVE--ALL--public--undefined--true--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -test('alter policy in the table: using', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive', using: sql`true` }), - ]), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { as: 'permissive', using: sql`false` }), - ]), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER POLICY "test" ON "users" TO public USING (false);', - ]); - expect(statements).toStrictEqual([ - { - newData: 'test--PERMISSIVE--ALL--public--false--undefined--undefined', - oldData: 'test--PERMISSIVE--ALL--public--true--undefined--undefined', - schema: '', - tableName: 'users', - type: 'alter_policy', - }, - ]); -}); - -test('alter policy in the table: using', async (t) => { - const users = pgTable('users', { - id: integer('id').primaryKey(), - }); - - const schema1 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { for: 'insert' }), - ]), - }; - - const schema2 = { - users: pgTable('users', { - id: integer('id').primaryKey(), - }, (t) => [ - pgPolicy('test', { for: 'delete' }), - ]), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual([ - 'DROP POLICY "test" ON "users" CASCADE;', - 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', - ]); - expect(statements).toStrictEqual([ - { - data: { - as: 'PERMISSIVE', - for: 'INSERT', - name: 'test', - on: undefined, - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'drop_policy', - }, - { - data: { - as: 'PERMISSIVE', - for: 'DELETE', - name: 'test', - on: undefined, - to: [ - 'public', - ], - using: undefined, - withCheck: undefined, - }, - schema: '', - tableName: 'users', - type: 'create_policy', - }, - ]); -}); diff --git a/drizzle-kit/tests/rls/pg-role.test.ts b/drizzle-kit/tests/rls/pg-role.test.ts deleted file mode 100644 index a6b7629557..0000000000 --- a/drizzle-kit/tests/rls/pg-role.test.ts +++ /dev/null @@ -1,234 +0,0 @@ -import { pgRole } from 'drizzle-orm/pg-core'; -import { diffTestSchemas } from 'tests/schemaDiffer'; -import { expect, test } from 'vitest'; - -test('create role', async (t) => { - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager'), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: false, - createRole: false, - inherit: true, - }, - }, - ]); -}); - -test('create role with properties', async (t) => { - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager', { createDb: true, inherit: false, createRole: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: true, - createRole: true, - inherit: false, - }, - }, - ]); -}); - -test('create role with some properties', async (t) => { - const schema1 = {}; - - const schema2 = { - manager: pgRole('manager', { createDb: true, inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'create_role', - values: { - createDb: true, - createRole: false, - inherit: false, - }, - }, - ]); -}); - -test('drop role', async (t) => { - const schema1 = { manager: pgRole('manager') }; - - const schema2 = {}; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'drop_role', - }, - ]); -}); - -test('create and drop role', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - admin: pgRole('admin'), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";', 'CREATE ROLE "admin";']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'drop_role', - }, - { - name: 'admin', - type: 'create_role', - values: { - createDb: false, - createRole: false, - inherit: true, - }, - }, - ]); -}); - -test('rename role', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - admin: pgRole('admin'), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, ['manager->admin']); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" RENAME TO "admin";']); - expect(statements).toStrictEqual([ - { nameFrom: 'manager', nameTo: 'admin', type: 'rename_role' }, - ]); -}); - -test('alter all role field', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createDb: true, createRole: true, inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: true, - createRole: true, - inherit: false, - }, - }, - ]); -}); - -test('alter createdb in role', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createDb: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB NOCREATEROLE INHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: true, - createRole: false, - inherit: true, - }, - }, - ]); -}); - -test('alter createrole in role', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { createRole: true }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB CREATEROLE INHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: false, - createRole: true, - inherit: true, - }, - }, - ]); -}); - -test('alter inherit in role', async (t) => { - const schema1 = { - manager: pgRole('manager'), - }; - - const schema2 = { - manager: pgRole('manager', { inherit: false }), - }; - - const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); - - expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB NOCREATEROLE NOINHERIT;']); - expect(statements).toStrictEqual([ - { - name: 'manager', - type: 'alter_role', - values: { - createDb: false, - createRole: false, - inherit: false, - }, - }, - ]); -}); diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts deleted file mode 100644 index c756875ef8..0000000000 --- a/drizzle-kit/tests/schemaDiffer.ts +++ /dev/null @@ -1,2784 +0,0 @@ -import { PGlite } from '@electric-sql/pglite'; -import { Client } from '@libsql/client/.'; -import { Database } from 'better-sqlite3'; -import { is } from 'drizzle-orm'; -import { MySqlSchema, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; -import { - getMaterializedViewConfig, - isPgEnum, - isPgMaterializedView, - isPgSequence, - isPgView, - PgEnum, - PgEnumObject, - PgMaterializedView, - PgPolicy, - PgRole, - PgSchema, - PgSequence, - PgTable, - PgView, -} from 'drizzle-orm/pg-core'; -import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; -import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; -import * as fs from 'fs'; -import { type Client as GelClient } from 'gel'; -import { Connection } from 'mysql2/promise'; -import { libSqlLogSuggestionsAndReturn } from 'src/cli/commands/libSqlPushUtils'; -import { - columnsResolver, - enumsResolver, - indPolicyResolver, - mySqlViewsResolver, - Named, - policyResolver, - roleResolver, - schemasResolver, - sequencesResolver, - sqliteViewsResolver, - tablesResolver, - viewsResolver, -} from 'src/cli/commands/migrate'; -import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; -import { logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn } from 'src/cli/commands/singlestorePushUtils'; -import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils'; -import { Entities } from 'src/cli/validations/cli'; -import { CasingType } from 'src/cli/validations/common'; -import { schemaToTypeScript as schemaToTypeScriptGel } from 'src/introspect-gel'; -import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql'; -import { schemaToTypeScript } from 'src/introspect-pg'; -import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore'; -import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite'; -import { fromDatabase as fromGelDatabase } from 'src/serializer/gelSerializer'; -import { prepareFromMySqlImports } from 'src/serializer/mysqlImports'; -import { mysqlSchema, squashMysqlScheme, ViewSquashed } from 'src/serializer/mysqlSchema'; -import { fromDatabase as fromMySqlDatabase, generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; -import { prepareFromPgImports } from 'src/serializer/pgImports'; -import { pgSchema, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema'; -import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer'; -import { prepareFromSingleStoreImports } from 'src/serializer/singlestoreImports'; -import { singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; -import { - fromDatabase as fromSingleStoreDatabase, - generateSingleStoreSnapshot, -} from 'src/serializer/singlestoreSerializer'; -import { prepareFromSqliteImports } from 'src/serializer/sqliteImports'; -import { sqliteSchema, squashSqliteScheme, View as SqliteView } from 'src/serializer/sqliteSchema'; -import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; -import { - applyLibSQLSnapshotsDiff, - applyMysqlSnapshotsDiff, - applyPgSnapshotsDiff, - applySingleStoreSnapshotsDiff, - applySqliteSnapshotsDiff, - Column, - ColumnsResolverInput, - ColumnsResolverOutput, - Enum, - PolicyResolverInput, - PolicyResolverOutput, - ResolverInput, - ResolverOutput, - ResolverOutputWithMoved, - RolesResolverInput, - RolesResolverOutput, - Sequence, - Table, - TablePolicyResolverInput, - TablePolicyResolverOutput, -} from 'src/snapshotsDiffer'; - -export type PostgresSchema = Record< - string, - | PgTable - | PgEnum - | PgEnumObject - | PgSchema - | PgSequence - | PgView - | PgMaterializedView - | PgRole - | PgPolicy ->; -export type MysqlSchema = Record< - string, - MySqlTable | MySqlSchema | MySqlView ->; -export type SqliteSchema = Record | SQLiteView>; -export type SinglestoreSchema = Record< - string, - SingleStoreTable | SingleStoreSchema /* | SingleStoreView */ ->; - -export const testSchemasResolver = - (renames: Set) => async (input: ResolverInput): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } - - let createdSchemas = [...input.created]; - let deletedSchemas = [...input.deleted]; - - const result: { - created: Named[]; - renamed: { from: Named; to: Named }[]; - deleted: Named[]; - } = { created: [], renamed: [], deleted: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedSchemas.findIndex((it) => { - return it.name === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdSchemas.findIndex((it) => { - return it.name === to; - }); - - result.renamed.push({ - from: deletedSchemas[idxFrom], - to: createdSchemas[idxTo], - }); - - delete createdSchemas[idxTo]; - delete deletedSchemas[idxFrom]; - - createdSchemas = createdSchemas.filter(Boolean); - deletedSchemas = deletedSchemas.filter(Boolean); - } - } - - result.created = createdSchemas; - result.deleted = deletedSchemas; - - return result; - } catch (e) { - console.error(e); - throw e; - } - }; - -export const testSequencesResolver = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdSequences = [...input.created]; - let deletedSequences = [...input.deleted]; - - const result: { - created: Sequence[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Sequence; to: Sequence }[]; - deleted: Sequence[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdSequences.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const tableFrom = deletedSequences[idxFrom]; - const tableTo = createdSequences[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedSequences[idxFrom], - to: createdSequences[idxTo], - }); - } - - delete createdSequences[idxTo]; - delete deletedSequences[idxFrom]; - - createdSequences = createdSequences.filter(Boolean); - deletedSequences = deletedSequences.filter(Boolean); - } - } - - result.created = createdSequences; - result.deleted = deletedSequences; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testEnumsResolver = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdEnums = [...input.created]; - let deletedEnums = [...input.deleted]; - - const result: { - created: Enum[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Enum; to: Enum }[]; - deleted: Enum[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdEnums.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const tableFrom = deletedEnums[idxFrom]; - const tableTo = createdEnums[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedEnums[idxFrom], - to: createdEnums[idxTo], - }); - } - - delete createdEnums[idxTo]; - delete deletedEnums[idxFrom]; - - createdEnums = createdEnums.filter(Boolean); - deletedEnums = deletedEnums.filter(Boolean); - } - } - - result.created = createdEnums; - result.deleted = deletedEnums; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testTablesResolver = (renames: Set) => -async ( - input: ResolverInput
, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdTables = [...input.created]; - let deletedTables = [...input.deleted]; - - const result: { - created: Table[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: Table; to: Table }[]; - deleted: Table[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdTables.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const tableFrom = deletedTables[idxFrom]; - const tableTo = createdTables[idxFrom]; - - if (tableFrom.schema !== tableTo.schema) { - result.moved.push({ - name: tableFrom.name, - schemaFrom: tableFrom.schema, - schemaTo: tableTo.schema, - }); - } - - if (tableFrom.name !== tableTo.name) { - result.renamed.push({ - from: deletedTables[idxFrom], - to: createdTables[idxTo], - }); - } - - delete createdTables[idxTo]; - delete deletedTables[idxFrom]; - - createdTables = createdTables.filter(Boolean); - deletedTables = deletedTables.filter(Boolean); - } - } - - result.created = createdTables; - result.deleted = deletedTables; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testColumnsResolver = (renames: Set) => -async ( - input: ColumnsResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - tableName: input.tableName, - schema: input.schema, - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } - - let createdColumns = [...input.created]; - let deletedColumns = [...input.deleted]; - - const renamed: { from: Column; to: Column }[] = []; - - const schema = input.schema || 'public'; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdColumns.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === to; - }); - - renamed.push({ - from: deletedColumns[idxFrom], - to: createdColumns[idxTo], - }); - - delete createdColumns[idxTo]; - delete deletedColumns[idxFrom]; - - createdColumns = createdColumns.filter(Boolean); - deletedColumns = deletedColumns.filter(Boolean); - } - } - - return { - tableName: input.tableName, - schema: input.schema, - created: createdColumns, - deleted: deletedColumns, - renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testPolicyResolver = (renames: Set) => -async ( - input: TablePolicyResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - tableName: input.tableName, - schema: input.schema, - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } - - let createdPolicies = [...input.created]; - let deletedPolicies = [...input.deleted]; - - const renamed: { from: Policy; to: Policy }[] = []; - - const schema = input.schema || 'public'; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedPolicies.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdPolicies.findIndex((it) => { - return `${schema}.${input.tableName}.${it.name}` === to; - }); - - renamed.push({ - from: deletedPolicies[idxFrom], - to: createdPolicies[idxTo], - }); - - delete createdPolicies[idxTo]; - delete deletedPolicies[idxFrom]; - - createdPolicies = createdPolicies.filter(Boolean); - deletedPolicies = deletedPolicies.filter(Boolean); - } - } - - return { - tableName: input.tableName, - schema: input.schema, - created: createdPolicies, - deleted: deletedPolicies, - renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testIndPolicyResolver = (renames: Set) => -async ( - input: PolicyResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } - - let createdPolicies = [...input.created]; - let deletedPolicies = [...input.deleted]; - - const renamed: { from: Policy; to: Policy }[] = []; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedPolicies.findIndex((it) => { - return `${it.on}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdPolicies.findIndex((it) => { - return `${it.on}.${it.name}` === to; - }); - - renamed.push({ - from: deletedPolicies[idxFrom], - to: createdPolicies[idxTo], - }); - - delete createdPolicies[idxTo]; - delete deletedPolicies[idxFrom]; - - createdPolicies = createdPolicies.filter(Boolean); - deletedPolicies = deletedPolicies.filter(Boolean); - } - } - - return { - created: createdPolicies, - deleted: deletedPolicies, - renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testRolesResolver = (renames: Set) => -async ( - input: RolesResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - renamed: [], - deleted: input.deleted, - }; - } - - let createdPolicies = [...input.created]; - let deletedPolicies = [...input.deleted]; - - const renamed: { from: Policy; to: Policy }[] = []; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedPolicies.findIndex((it) => { - return `${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdPolicies.findIndex((it) => { - return `${it.name}` === to; - }); - - renamed.push({ - from: deletedPolicies[idxFrom], - to: createdPolicies[idxTo], - }); - - delete createdPolicies[idxTo]; - delete deletedPolicies[idxFrom]; - - createdPolicies = createdPolicies.filter(Boolean); - deletedPolicies = deletedPolicies.filter(Boolean); - } - } - - return { - created: createdPolicies, - deleted: deletedPolicies, - renamed, - }; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testViewsResolver = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; - - const result: { - created: View[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: View; to: View }[]; - deleted: View[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.schema !== viewTo.schema) { - result.moved.push({ - name: viewFrom.name, - schemaFrom: viewFrom.schema, - schemaTo: viewTo.schema, - }); - } - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); - } - } - - result.created = createdViews; - result.deleted = deletedViews; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testViewsResolverMySql = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; - - const result: { - created: ViewSquashed[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: ViewSquashed; to: ViewSquashed }[]; - deleted: ViewSquashed[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.schema !== viewTo.schema) { - result.moved.push({ - name: viewFrom.name, - schemaFrom: viewFrom.schema, - schemaTo: viewTo.schema, - }); - } - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); - } - } - - result.created = createdViews; - result.deleted = deletedViews; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testViewsResolverSingleStore = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; - - const result: { - created: ViewSquashed[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: ViewSquashed; to: ViewSquashed }[]; - deleted: ViewSquashed[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return `${it.schema || 'public'}.${it.name}` === to; - }); - - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.schema !== viewTo.schema) { - result.moved.push({ - name: viewFrom.name, - schemaFrom: viewFrom.schema, - schemaTo: viewTo.schema, - }); - } - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); - } - } - - result.created = createdViews; - result.deleted = deletedViews; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const testViewsResolverSqlite = (renames: Set) => -async ( - input: ResolverInput, -): Promise> => { - try { - if ( - input.created.length === 0 - || input.deleted.length === 0 - || renames.size === 0 - ) { - return { - created: input.created, - moved: [], - renamed: [], - deleted: input.deleted, - }; - } - - let createdViews = [...input.created]; - let deletedViews = [...input.deleted]; - - const result: { - created: SqliteView[]; - moved: { name: string; schemaFrom: string; schemaTo: string }[]; - renamed: { from: SqliteView; to: SqliteView }[]; - deleted: SqliteView[]; - } = { created: [], renamed: [], deleted: [], moved: [] }; - - for (let rename of renames) { - const [from, to] = rename.split('->'); - - const idxFrom = deletedViews.findIndex((it) => { - return it.name === from; - }); - - if (idxFrom >= 0) { - const idxTo = createdViews.findIndex((it) => { - return it.name === to; - }); - - const viewFrom = deletedViews[idxFrom]; - const viewTo = createdViews[idxFrom]; - - if (viewFrom.name !== viewTo.name) { - result.renamed.push({ - from: deletedViews[idxFrom], - to: createdViews[idxTo], - }); - } - - delete createdViews[idxTo]; - delete deletedViews[idxFrom]; - - createdViews = createdViews.filter(Boolean); - deletedViews = deletedViews.filter(Boolean); - } - } - - result.created = createdViews; - result.deleted = deletedViews; - - return result; - } catch (e) { - console.error(e); - throw e; - } -}; - -export const diffTestSchemasPush = async ( - client: PGlite, - left: PostgresSchema, - right: PostgresSchema, - renamesArr: string[], - cli: boolean = false, - schemas: string[] = ['public'], - casing?: CasingType | undefined, - entities?: Entities, - sqlStatementsToRun: { - before?: string[]; - after?: string[]; - runApply?: boolean; - } = { - before: [], - after: [], - runApply: true, - }, -) => { - const shouldRunApply = sqlStatementsToRun.runApply === undefined - ? true - : sqlStatementsToRun.runApply; - - for (const st of sqlStatementsToRun.before ?? []) { - await client.query(st); - } - - if (shouldRunApply) { - const res = await applyPgDiffs(left, casing); - for (const st of res.sqlStatements) { - await client.query(st); - } - } - - for (const st of sqlStatementsToRun.after ?? []) { - await client.query(st); - } - - const materializedViewsForRefresh = Object.values(left).filter((it) => - isPgMaterializedView(it) - ) as PgMaterializedView[]; - - // refresh all mat views - for (const view of materializedViewsForRefresh) { - const viewConf = getMaterializedViewConfig(view); - if (viewConf.isExisting) continue; - - await client.exec( - `REFRESH MATERIALIZED VIEW "${viewConf.schema ?? 'public'}"."${viewConf.name}"${ - viewConf.withNoData ? ' WITH NO DATA;' : ';' - }`, - ); - } - - // do introspect into PgSchemaInternal - const introspectedSchema = await fromDatabase( - { - query: async (query: string, values?: any[] | undefined) => { - const res = await client.query(query, values); - return res.rows as any[]; - }, - }, - undefined, - schemas, - entities, - ); - - const leftTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; - - const leftSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; - - const leftEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; - - const leftSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; - - const leftRoles = Object.values(right).filter((it) => is(it, PgRole)) as PgRole[]; - - const leftPolicies = Object.values(right).filter((it) => is(it, PgPolicy)) as PgPolicy[]; - - const leftViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; - - const leftMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; - - const serialized2 = generatePgSnapshot( - leftTables, - leftEnums, - leftSchemas, - leftSequences, - leftRoles, - leftPolicies, - leftViews, - leftMaterializedViews, - casing, - ); - - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashPgScheme(sch1, 'push'); - const sn2 = squashPgScheme(sch2, 'push'); - - const validatedPrev = pgSchema.parse(sch1); - const validatedCur = pgSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - sn1, - sn2, - testSchemasResolver(renames), - testEnumsResolver(renames), - testSequencesResolver(renames), - testPolicyResolver(renames), - testIndPolicyResolver(renames), - testRolesResolver(renames), - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolver(renames), - validatedPrev, - validatedCur, - 'push', - ); - - const { - shouldAskForApprove, - statementsToExecute, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - schemasToRemove, - matViewsToRemove, - } = await pgSuggestions( - { - query: async (sql: string, params: any[] = []) => { - return (await client.query(sql, params)).rows as T[]; - }, - }, - statements, - ); - - return { - sqlStatements: statementsToExecute, - statements, - shouldAskForApprove, - columnsToRemove, - tablesToRemove, - tablesToTruncate, - infoToPrint, - schemasToRemove, - matViewsToRemove, - }; - } else { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - sn1, - sn2, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - return { sqlStatements, statements }; - } -}; - -export const applyPgDiffs = async ( - sn: PostgresSchema, - casing: CasingType | undefined, -) => { - const dryRun = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - tables: {}, - enums: {}, - views: {}, - schemas: {}, - sequences: {}, - policies: {}, - roles: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - } as const; - - const tables = Object.values(sn).filter((it) => is(it, PgTable)) as PgTable[]; - - const schemas = Object.values(sn).filter((it) => is(it, PgSchema)) as PgSchema[]; - - const enums = Object.values(sn).filter((it) => isPgEnum(it)) as PgEnum[]; - - const sequences = Object.values(sn).filter((it) => isPgSequence(it)) as PgSequence[]; - - const roles = Object.values(sn).filter((it) => is(it, PgRole)) as PgRole[]; - - const views = Object.values(sn).filter((it) => isPgView(it)) as PgView[]; - - const policies = Object.values(sn).filter((it) => is(it, PgPolicy)) as PgPolicy[]; - - const materializedViews = Object.values(sn).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; - - const serialized1 = generatePgSnapshot( - tables, - enums, - schemas, - sequences, - roles, - policies, - views, - materializedViews, - casing, - ); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - - const sch1 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sn1 = squashPgScheme(sch1); - - const validatedPrev = pgSchema.parse(dryRun); - const validatedCur = pgSchema.parse(sch1); - - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - dryRun, - sn1, - testSchemasResolver(new Set()), - testEnumsResolver(new Set()), - testSequencesResolver(new Set()), - testPolicyResolver(new Set()), - testIndPolicyResolver(new Set()), - testRolesResolver(new Set()), - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolver(new Set()), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemas = async ( - left: PostgresSchema, - right: PostgresSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, PgTable)) as PgTable[]; - - const rightTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; - - const leftSchemas = Object.values(left).filter((it) => is(it, PgSchema)) as PgSchema[]; - - const rightSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; - - const leftEnums = Object.values(left).filter((it) => isPgEnum(it)) as PgEnum[]; - - const rightEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; - - const leftSequences = Object.values(left).filter((it) => isPgSequence(it)) as PgSequence[]; - - const rightSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; - - const leftRoles = Object.values(left).filter((it) => is(it, PgRole)) as PgRole[]; - - const rightRoles = Object.values(right).filter((it) => is(it, PgRole)) as PgRole[]; - - const leftPolicies = Object.values(left).filter((it) => is(it, PgPolicy)) as PgPolicy[]; - - const rightPolicies = Object.values(right).filter((it) => is(it, PgPolicy)) as PgPolicy[]; - - const leftViews = Object.values(left).filter((it) => isPgView(it)) as PgView[]; - - const rightViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; - - const leftMaterializedViews = Object.values(left).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; - - const rightMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; - - const serialized1 = generatePgSnapshot( - leftTables, - leftEnums, - leftSchemas, - leftSequences, - leftRoles, - leftPolicies, - leftViews, - leftMaterializedViews, - casing, - ); - const serialized2 = generatePgSnapshot( - rightTables, - rightEnums, - rightSchemas, - rightSequences, - rightRoles, - rightPolicies, - rightViews, - rightMaterializedViews, - casing, - ); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashPgScheme(sch1); - const sn2 = squashPgScheme(sch2); - - const validatedPrev = pgSchema.parse(sch1); - const validatedCur = pgSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - sn1, - sn2, - testSchemasResolver(renames), - testEnumsResolver(renames), - testSequencesResolver(renames), - testPolicyResolver(renames), - testIndPolicyResolver(renames), - testRolesResolver(renames), - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolver(renames), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } else { - const { sqlStatements, statements } = await applyPgSnapshotsDiff( - sn1, - sn2, - schemasResolver, - enumsResolver, - sequencesResolver, - policyResolver, - indPolicyResolver, - roleResolver, - tablesResolver, - columnsResolver, - viewsResolver, - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } -}; - -export const diffTestSchemasPushMysql = async ( - client: Connection, - left: MysqlSchema, - right: MysqlSchema, - renamesArr: string[], - schema: string, - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const { sqlStatements } = await applyMySqlDiffs(left, casing); - for (const st of sqlStatements) { - await client.query(st); - } - // do introspect into PgSchemaInternal - const introspectedSchema = await fromMySqlDatabase( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute(sql, params); - return res[0] as any; - }, - }, - schema, - ); - - const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashMysqlScheme(sch1); - const sn2 = squashMysqlScheme(sch2); - - const validatedPrev = mysqlSchema.parse(sch1); - const validatedCur = mysqlSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverMySql(renames), - validatedPrev, - validatedCur, - 'push', - ); - return { sqlStatements, statements }; - } else { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - 'push', - ); - return { sqlStatements, statements }; - } -}; - -export const applyMySqlDiffs = async ( - sn: MysqlSchema, - casing: CasingType | undefined, -) => { - const dryRun = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - views: {}, - tables: {}, - enums: {}, - schemas: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - } as const; - - const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const serialized1 = generateMySqlSnapshot(tables, views, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - - const sch1 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sn1 = squashMysqlScheme(sch1); - - const validatedPrev = mysqlSchema.parse(dryRun); - const validatedCur = mysqlSchema.parse(sch1); - - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - dryRun, - sn1, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverMySql(new Set()), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasMysql = async ( - left: MysqlSchema, - right: MysqlSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; - - const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; - - const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing); - const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashMysqlScheme(sch1); - const sn2 = squashMysqlScheme(sch2); - - const validatedPrev = mysqlSchema.parse(sch1); - const validatedCur = mysqlSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverMySql(renames), - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } - - const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - mySqlViewsResolver, - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasSingleStore = async ( - left: SinglestoreSchema, - right: SinglestoreSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - - /* const leftViews = Object.values(left).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - - const rightTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - - /* const rightViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - - const serialized1 = generateSingleStoreSnapshot( - leftTables, - /* leftViews, */ - casing, - ); - const serialized2 = generateSingleStoreSnapshot( - rightTables, - /* rightViews, */ - casing, - ); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSingleStoreScheme(sch1); - const sn2 = squashSingleStoreScheme(sch2); - - const validatedPrev = singlestoreSchema.parse(sch1); - const validatedCur = singlestoreSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - /* testViewsResolverSingleStore(renames), */ - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; - } - - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasPushSingleStore = async ( - client: Connection, - left: SinglestoreSchema, - right: SinglestoreSchema, - renamesArr: string[], - schema: string, - cli: boolean = false, - casing?: CasingType | undefined, - sqlStatementsToRun: { - before?: string[]; - after?: string[]; - runApply?: boolean; - } = { - before: [], - after: [], - runApply: true, - }, -) => { - const shouldRunApply = sqlStatementsToRun.runApply === undefined - ? true - : sqlStatementsToRun.runApply; - - for (const st of sqlStatementsToRun.before ?? []) { - await client.query(st); - } - - if (shouldRunApply) { - const res = await applySingleStoreDiffs(left, casing); - for (const st of res.sqlStatements) { - await client.query(st); - } - } - - for (const st of sqlStatementsToRun.after ?? []) { - await client.query(st); - } - - // do introspect into PgSchemaInternal - const introspectedSchema = await fromSingleStoreDatabase( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute(sql, params); - return res[0] as any; - }, - }, - schema, - ); - - const leftTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - - /* const leftViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - - const serialized2 = generateSingleStoreSnapshot( - leftTables, - /* leftViews, */ - casing, - ); - - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSingleStoreScheme(sch1); - const sn2 = squashSingleStoreScheme(sch2); - - const validatedPrev = singlestoreSchema.parse(sch1); - const validatedCur = singlestoreSchema.parse(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - /* testViewsResolverSingleStore(renames), */ - validatedPrev, - validatedCur, - 'push', - ); - - const { - statementsToExecute, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await singleStoreLogSuggestionsAndReturn( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute(sql, params); - return res[0] as T[]; - }, - }, - statements, - sn1, - sn2, - ); - - return { - sqlStatements: statementsToExecute, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - }; - } else { - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - /* singleStoreViewsResolver, */ - validatedPrev, - validatedCur, - 'push', - ); - return { sqlStatements, statements }; - } -}; - -export const applySingleStoreDiffs = async ( - sn: SinglestoreSchema, - casing: CasingType | undefined, -) => { - const dryRun = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - tables: {}, - views: {}, - enums: {}, - schemas: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - } as const; - - const tables = Object.values(sn).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - - /* const views = Object.values(sn).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ - - const serialized1 = generateSingleStoreSnapshot(tables, /* views, */ casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - - const sch1 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sn1 = squashSingleStoreScheme(sch1); - - const validatedPrev = singlestoreSchema.parse(dryRun); - const validatedCur = singlestoreSchema.parse(sch1); - - const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( - dryRun, - sn1, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - /* testViewsResolverSingleStore(new Set()), */ - validatedPrev, - validatedCur, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasPushSqlite = async ( - client: Database, - left: SqliteSchema, - right: SqliteSchema, - renamesArr: string[], - cli: boolean = false, - seedStatements: string[] = [], - casing?: CasingType | undefined, -) => { - const { sqlStatements } = await applySqliteDiffs(left, 'push'); - - for (const st of sqlStatements) { - client.exec(st); - } - - for (const st of seedStatements) { - client.exec(st); - } - - // do introspect into PgSchemaInternal - const introspectedSchema = await fromSqliteDatabase( - { - query: async (sql: string, params: any[] = []) => { - return client.prepare(sql).bind(params).all() as T[]; - }, - run: async (query: string) => { - client.prepare(query).run(); - }, - }, - undefined, - ); - - const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSqliteScheme(sch1, 'push'); - const sn2 = squashSqliteScheme(sch2, 'push'); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverSqlite(renames), - sch1, - sch2, - 'push', - ); - - const { - statementsToExecute, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await logSuggestionsAndReturn( - { - query: async (sql: string, params: any[] = []) => { - return client.prepare(sql).bind(params).all() as T[]; - }, - run: async (query: string) => { - client.prepare(query).run(); - }, - }, - statements, - sn1, - sn2, - _meta!, - ); - - return { - sqlStatements: statementsToExecute, - statements, - columnsToRemove, - infoToPrint, - schemasToRemove, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - }; - } else { - const { sqlStatements, statements } = await applySqliteSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - sch1, - sch2, - 'push', - ); - return { sqlStatements, statements }; - } -}; - -export async function diffTestSchemasPushLibSQL( - client: Client, - left: SqliteSchema, - right: SqliteSchema, - renamesArr: string[], - cli: boolean = false, - seedStatements: string[] = [], - casing?: CasingType | undefined, -) { - const { sqlStatements } = await applyLibSQLDiffs(left, 'push'); - - for (const st of sqlStatements) { - await client.execute(st); - } - - for (const st of seedStatements) { - await client.execute(st); - } - - const introspectedSchema = await fromSqliteDatabase( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute({ sql, args: params || [] }); - return res.rows as T[]; - }, - run: async (query: string) => { - await client.execute(query); - }, - }, - undefined, - ); - - const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const leftViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized2 = generateSqliteSnapshot(leftTables, leftViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSqliteScheme(sch1, 'push'); - const sn2 = squashSqliteScheme(sch2, 'push'); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverSqlite(renames), - sch1, - sch2, - 'push', - ); - - const { - statementsToExecute, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - } = await libSqlLogSuggestionsAndReturn( - { - query: async (sql: string, params?: any[]) => { - const res = await client.execute({ sql, args: params || [] }); - return res.rows as T[]; - }, - run: async (query: string) => { - await client.execute(query); - }, - }, - statements, - sn1, - sn2, - _meta!, - ); - - return { - sqlStatements: statementsToExecute, - statements, - columnsToRemove, - infoToPrint, - shouldAskForApprove, - tablesToRemove, - tablesToTruncate, - }; - } else { - const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - sch1, - sch2, - 'push', - ); - return { sqlStatements, statements }; - } -} - -export const applySqliteDiffs = async ( - sn: SqliteSchema, - action?: 'push' | undefined, - casing?: CasingType | undefined, -) => { - const dryRun = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - tables: {}, - enums: {}, - views: {}, - schemas: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - } as const; - - const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized1 = generateSqliteSnapshot(tables, views, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sn1 = squashSqliteScheme(sch1, action); - - const { sqlStatements, statements } = await applySqliteSnapshotsDiff( - dryRun, - sn1, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverSqlite(new Set()), - dryRun, - sch1, - action, - ); - - return { sqlStatements, statements }; -}; - -export const applyLibSQLDiffs = async ( - sn: SqliteSchema, - action?: 'push' | undefined, - casing?: CasingType | undefined, -) => { - const dryRun = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - tables: {}, - views: {}, - enums: {}, - schemas: {}, - _meta: { - schemas: {}, - tables: {}, - columns: {}, - }, - } as const; - - const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized1 = generateSqliteSnapshot(tables, views, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sn1 = squashSqliteScheme(sch1, action); - - const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( - dryRun, - sn1, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverSqlite(new Set()), - dryRun, - sch1, - action, - ); - - return { sqlStatements, statements }; -}; - -export const diffTestSchemasSqlite = async ( - left: SqliteSchema, - right: SqliteSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); - const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSqliteScheme(sch1); - const sn2 = squashSqliteScheme(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applySqliteSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverSqlite(renames), - sch1, - sch2, - ); - return { sqlStatements, statements }; - } - - const { sqlStatements, statements } = await applySqliteSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - sch1, - sch2, - ); - return { sqlStatements, statements }; -}; - -export const diffTestSchemasLibSQL = async ( - left: SqliteSchema, - right: SqliteSchema, - renamesArr: string[], - cli: boolean = false, - casing?: CasingType | undefined, -) => { - const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; - - const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; - - const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); - const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); - - const { version: v1, dialect: d1, ...rest1 } = serialized1; - const { version: v2, dialect: d2, ...rest2 } = serialized2; - - const sch1 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest1, - } as const; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn1 = squashSqliteScheme(sch1); - const sn2 = squashSqliteScheme(sch2); - - const renames = new Set(renamesArr); - - if (!cli) { - const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( - sn1, - sn2, - testTablesResolver(renames), - testColumnsResolver(renames), - testViewsResolverSqlite(renames), - sch1, - sch2, - ); - return { sqlStatements, statements }; - } - - const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( - sn1, - sn2, - tablesResolver, - columnsResolver, - sqliteViewsResolver, - sch1, - sch2, - ); - return { sqlStatements, statements }; -}; - -// --- Introspect to file helpers --- - -export const introspectPgToFile = async ( - client: PGlite, - initSchema: PostgresSchema, - testName: string, - schemas: string[] = ['public'], - entities?: Entities, - casing?: CasingType | undefined, -) => { - // put in db - const { sqlStatements } = await applyPgDiffs(initSchema, casing); - for (const st of sqlStatements) { - await client.query(st); - } - - // introspect to schema - const introspectedSchema = await fromDatabase( - { - query: async (query: string, values?: any[] | undefined) => { - const res = await client.query(query, values); - return res.rows as any[]; - }, - }, - undefined, - schemas, - entities, - ); - - const { version: initV, dialect: initD, ...initRest } = introspectedSchema; - - const initSch = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashPgScheme(initSch); - const validatedCur = pgSchema.parse(initSch); - - // write to ts file - const file = schemaToTypeScript(introspectedSchema, 'camel'); - - fs.writeFileSync(`tests/introspect/postgres/${testName}.ts`, file.file); - - // generate snapshot from ts file - const response = await prepareFromPgImports([ - `tests/introspect/postgres/${testName}.ts`, - ]); - - const afterFileImports = generatePgSnapshot( - response.tables, - response.enums, - response.schemas, - response.sequences, - response.roles, - response.policies, - response.views, - response.matViews, - casing, - ); - - const { version: v2, dialect: d2, ...rest2 } = afterFileImports; - - const sch2 = { - version: '7', - dialect: 'postgresql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn2AfterIm = squashPgScheme(sch2); - const validatedCurAfterImport = pgSchema.parse(sch2); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applyPgSnapshotsDiff( - initSn, - sn2AfterIm, - testSchemasResolver(new Set()), - testEnumsResolver(new Set()), - testSequencesResolver(new Set()), - testPolicyResolver(new Set()), - testIndPolicyResolver(new Set()), - testRolesResolver(new Set()), - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolver(new Set()), - validatedCur, - validatedCurAfterImport, - ); - - fs.rmSync(`tests/introspect/postgres/${testName}.ts`); - - return { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - }; -}; - -export const introspectGelToFile = async ( - client: GelClient, - testName: string, - schemas: string[] = ['public'], - entities?: Entities, - casing?: CasingType | undefined, -) => { - // introspect to schema - const introspectedSchema = await fromGelDatabase( - { - query: async (query: string, values?: any[] | undefined) => { - const res = await client.querySQL(query, values); - return res as any[]; - }, - }, - undefined, - schemas, - entities, - ); - - // write to ts file - const file = schemaToTypeScriptGel(introspectedSchema, 'camel'); - - const path = `tests/introspect/gel/${testName}.ts`; - fs.writeFileSync(path, file.file); - - return path; -}; - -export const introspectMySQLToFile = async ( - client: Connection, - initSchema: MysqlSchema, - testName: string, - schema: string, - casing?: CasingType | undefined, -) => { - // put in db - const { sqlStatements } = await applyMySqlDiffs(initSchema, casing); - for (const st of sqlStatements) { - await client.query(st); - } - - // introspect to schema - const introspectedSchema = await fromMySqlDatabase( - { - query: async (sql: string, params?: any[] | undefined) => { - const res = await client.execute(sql, params); - return res[0] as any; - }, - }, - schema, - ); - - const { version: initV, dialect: initD, ...initRest } = introspectedSchema; - - const initSch = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashMysqlScheme(initSch); - const validatedCur = mysqlSchema.parse(initSch); - - const file = schemaToTypeScriptMySQL(introspectedSchema, 'camel'); - - fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file); - - const response = await prepareFromMySqlImports([ - `tests/introspect/mysql/${testName}.ts`, - ]); - - const afterFileImports = generateMySqlSnapshot( - response.tables, - response.views, - casing, - ); - - const { version: v2, dialect: d2, ...rest2 } = afterFileImports; - - const sch2 = { - version: '5', - dialect: 'mysql', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn2AfterIm = squashMysqlScheme(sch2); - const validatedCurAfterImport = mysqlSchema.parse(sch2); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applyMysqlSnapshotsDiff( - sn2AfterIm, - initSn, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverMySql(new Set()), - validatedCurAfterImport, - validatedCur, - ); - - fs.rmSync(`tests/introspect/mysql/${testName}.ts`); - - return { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - }; -}; - -export const introspectSingleStoreToFile = async ( - client: Connection, - initSchema: SinglestoreSchema, - testName: string, - schema: string, - casing?: CasingType | undefined, -) => { - // put in db - const { sqlStatements } = await applySingleStoreDiffs(initSchema, casing); - for (const st of sqlStatements) { - await client.query(st); - } - - // introspect to schema - const introspectedSchema = await fromSingleStoreDatabase( - { - query: async (sql: string, params?: any[] | undefined) => { - const res = await client.execute(sql, params); - return res[0] as any; - }, - }, - schema, - ); - - const file = schemaToTypeScriptSingleStore(introspectedSchema, 'camel'); - - fs.writeFileSync(`tests/introspect/singlestore/${testName}.ts`, file.file); - - const response = await prepareFromSingleStoreImports([ - `tests/introspect/singlestore/${testName}.ts`, - ]); - - const afterFileImports = generateSingleStoreSnapshot( - response.tables, - /* response.views, */ - casing, - ); - - const { version: v2, dialect: d2, ...rest2 } = afterFileImports; - - const sch2 = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn2AfterIm = squashSingleStoreScheme(sch2); - const validatedCurAfterImport = singlestoreSchema.parse(sch2); - - const leftTables = Object.values(initSchema).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; - - const initSnapshot = generateSingleStoreSnapshot( - leftTables, - /* response.views, */ - casing, - ); - - const { version: initV, dialect: initD, ...initRest } = initSnapshot; - - const initSch = { - version: '1', - dialect: 'singlestore', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashSingleStoreScheme(initSch); - const validatedCur = singlestoreSchema.parse(initSch); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applySingleStoreSnapshotsDiff( - sn2AfterIm, - initSn, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - /* testViewsResolverSingleStore(new Set()), */ - validatedCurAfterImport, - validatedCur, - ); - - fs.rmSync(`tests/introspect/singlestore/${testName}.ts`); - - return { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - }; -}; - -export const introspectSQLiteToFile = async ( - client: Database, - initSchema: SqliteSchema, - testName: string, - casing?: CasingType | undefined, -) => { - // put in db - const { sqlStatements } = await applySqliteDiffs(initSchema); - for (const st of sqlStatements) { - client.exec(st); - } - - // introspect to schema - const introspectedSchema = await fromSqliteDatabase( - { - query: async (sql: string, params: any[] = []) => { - return client.prepare(sql).bind(params).all() as T[]; - }, - run: async (query: string) => { - client.prepare(query).run(); - }, - }, - undefined, - ); - - const { version: initV, dialect: initD, ...initRest } = introspectedSchema; - - const initSch = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashSqliteScheme(initSch); - - const validatedCur = sqliteSchema.parse(initSch); - - const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); - - fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file); - - const response = await prepareFromSqliteImports([ - `tests/introspect/sqlite/${testName}.ts`, - ]); - - const afterFileImports = generateSqliteSnapshot( - response.tables, - response.views, - casing, - ); - - const { version: v2, dialect: d2, ...rest2 } = afterFileImports; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn2AfterIm = squashSqliteScheme(sch2); - const validatedCurAfterImport = sqliteSchema.parse(sch2); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applySqliteSnapshotsDiff( - sn2AfterIm, - initSn, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverSqlite(new Set()), - validatedCurAfterImport, - validatedCur, - ); - - fs.rmSync(`tests/introspect/sqlite/${testName}.ts`); - - return { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - }; -}; - -export const introspectLibSQLToFile = async ( - client: Client, - initSchema: SqliteSchema, - testName: string, - casing?: CasingType | undefined, -) => { - // put in db - const { sqlStatements } = await applyLibSQLDiffs(initSchema); - for (const st of sqlStatements) { - client.execute(st); - } - - // introspect to schema - const introspectedSchema = await fromSqliteDatabase( - { - query: async (sql: string, params: any[] = []) => { - return (await client.execute({ sql, args: params })).rows as T[]; - }, - run: async (query: string) => { - client.execute(query); - }, - }, - undefined, - ); - - const { version: initV, dialect: initD, ...initRest } = introspectedSchema; - - const initSch = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...initRest, - } as const; - - const initSn = squashSqliteScheme(initSch); - - const validatedCur = sqliteSchema.parse(initSch); - - const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); - - fs.writeFileSync(`tests/introspect/libsql/${testName}.ts`, file.file); - - const response = await prepareFromSqliteImports([ - `tests/introspect/libsql/${testName}.ts`, - ]); - - const afterFileImports = generateSqliteSnapshot( - response.tables, - response.views, - casing, - ); - - const { version: v2, dialect: d2, ...rest2 } = afterFileImports; - - const sch2 = { - version: '6', - dialect: 'sqlite', - id: '0', - prevId: '0', - ...rest2, - } as const; - - const sn2AfterIm = squashSqliteScheme(sch2); - const validatedCurAfterImport = sqliteSchema.parse(sch2); - - const { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - } = await applyLibSQLSnapshotsDiff( - sn2AfterIm, - initSn, - testTablesResolver(new Set()), - testColumnsResolver(new Set()), - testViewsResolverSqlite(new Set()), - validatedCurAfterImport, - validatedCur, - ); - - fs.rmSync(`tests/introspect/libsql/${testName}.ts`); - - return { - sqlStatements: afterFileSqlStatements, - statements: afterFileStatements, - }; -}; diff --git a/drizzle-kit/tests/singlestore/mocks.ts b/drizzle-kit/tests/singlestore/mocks.ts new file mode 100644 index 0000000000..1eaf72d5a3 --- /dev/null +++ b/drizzle-kit/tests/singlestore/mocks.ts @@ -0,0 +1,219 @@ +import Docker, { Container } from 'dockerode'; +import { is } from 'drizzle-orm'; +import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { mkdirSync, writeFileSync } from 'fs'; +import getPort from 'get-port'; +import { Connection, createConnection } from 'mysql2/promise'; +import { suggestions } from 'src/cli/commands/push-mysql'; +import { CasingType } from 'src/cli/validations/common'; +import { explain } from 'src/cli/views'; +import { createDDL, interimToDDL } from 'src/dialects/mysql/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/mysql/diff'; +import { fromDatabaseForDrizzle } from 'src/dialects/mysql/introspect'; +import { ddlToTypeScript } from 'src/dialects/mysql/typescript'; +import { fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/singlestore/drizzle'; +import { DB } from 'src/utils'; +import { mockResolver } from 'src/utils/mocks'; +import { v4 as uuid } from 'uuid'; + +export type SinglestoreSchema = Record | SingleStoreSchema>; + +export const drizzleToDDL = (sch: SinglestoreSchema, casing?: CasingType | undefined) => { + const tables = Object.values(sch).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; + return interimToDDL(fromDrizzleSchema(tables, casing)); +}; + +export const diff = async ( + left: SinglestoreSchema, + right: SinglestoreSchema, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1 } = drizzleToDDL(left, casing); + const { ddl: ddl2 } = drizzleToDDL(right, casing); + + const renames = new Set(renamesArr); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'default', + ); + return { sqlStatements, statements }; +}; + +export const pullDiff = async ( + db: DB, + initSchema: SinglestoreSchema, + testName: string, + casing?: CasingType | undefined, +) => { + mkdirSync('tests/mysql/tmp', { recursive: true }); + const { ddl: initDDL } = drizzleToDDL(initSchema, casing); + const { sqlStatements: init } = await ddlDiffDry(createDDL(), initDDL); + for (const st of init) await db.query(st); + + // introspect to schema + const schema = await fromDatabaseForDrizzle(db, 'drizzle'); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const filePath = `tests/singlestore/tmp/${testName}.ts`; + const file = ddlToTypeScript(ddl1, schema.viewColumns, 'camel', 'singlestore'); + writeFileSync(filePath, file.file); + + const typeCheckResult = await $`pnpm exec tsc --noEmit --skipLibCheck ${filePath}`.nothrow(); + if (typeCheckResult.exitCode !== 0) { + throw new Error(typeCheckResult.stderr || typeCheckResult.stdout); + } + + // generate snapshot from ts file + const response = await prepareFromSchemaFiles([filePath]); + + const interim = fromDrizzleSchema(response.tables, casing); + const { ddl: ddl2, errors: e3 } = interimToDDL(interim); + + // TODO: handle errors + const renames = new Set(); + + const { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + // rmSync(`tests/mysql/tmp/${testName}.ts`); + + return { + sqlStatements: afterFileSqlStatements, + statements: afterFileStatements, + }; +}; + +export const diffPush = async (config: { + db: DB; + init: SinglestoreSchema; + destination: SinglestoreSchema; + renames?: string[]; + casing?: CasingType; + before?: string[]; + after?: string[]; + apply?: boolean; +}) => { + const { db, init: initSchema, destination, casing, before, after, renames: rens } = config; + const apply = config.apply ?? true; + const { ddl: initDDL } = drizzleToDDL(initSchema, casing); + const { sqlStatements: inits } = await ddlDiffDry(createDDL(), initDDL, 'default'); + + const init = [] as string[]; + if (before) init.push(...before); + if (apply) init.push(...inits); + if (after) init.push(...after); + + for (const st of init) { + await db.query(st); + } + + // do introspect into PgSchemaInternal + const introspectedSchema = await fromDatabaseForDrizzle(db, 'drizzle'); + + const { ddl: ddl1, errors: err3 } = interimToDDL(introspectedSchema); + const { ddl: ddl2, errors: err2 } = drizzleToDDL(destination, casing); + + // TODO: handle errors + + const renames = new Set(rens); + const { sqlStatements, statements, groupedStatements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const explainMessage = explain('singlestore', groupedStatements, false, []); + if (explainMessage) console.log(explainMessage); + + return { sqlStatements, statements, hints: [] }; +}; + +async function createDockerDB(): Promise<{ url: string; container: Container }> { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + const mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + return { url: `singlestore://root:singlestore@localhost:${port}/`, container: mysqlContainer }; +} + +export type TestDatabase = { + db: DB; + close: () => Promise; + clear: () => Promise; +}; + +export const prepareTestDatabase = async (): Promise => { + const envUrl = process.env.MYSQL_CONNECTION_STRING; + const { url, container } = envUrl ? { url: envUrl, container: null } : await createDockerDB(); + + const sleep = 1000; + let timeLeft = 20000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + const client: Connection = await createConnection(url); + await client.connect(); + const db = { + query: async (sql: string, params: any[]) => { + const [res] = await client.query(sql); + return res as any[]; + }, + }; + connected = true; + const close = async () => { + await client?.end().catch(console.error); + await container?.stop().catch(console.error); + }; + const clear = async () => { + await client.query(`drop database if exists \`drizzle\`;`); + await client.query(`create database \`drizzle\`;`); + await client.query(`use \`drizzle\`;`); + }; + return { db, close, clear }; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + + throw new Error(); +}; diff --git a/drizzle-kit/tests/introspect/singlestore.test.ts b/drizzle-kit/tests/singlestore/pull.test.ts similarity index 60% rename from drizzle-kit/tests/introspect/singlestore.test.ts rename to drizzle-kit/tests/singlestore/pull.test.ts index 8d4940662a..489e9d0267 100644 --- a/drizzle-kit/tests/introspect/singlestore.test.ts +++ b/drizzle-kit/tests/singlestore/pull.test.ts @@ -1,6 +1,4 @@ -import Docker from 'dockerode'; import 'dotenv/config'; -import { SQL, sql } from 'drizzle-orm'; import { bigint, char, @@ -11,88 +9,32 @@ import { mediumint, singlestoreTable, smallint, - text, tinyint, varchar, } from 'drizzle-orm/singlestore-core'; import * as fs from 'fs'; -import getPort from 'get-port'; -import { Connection, createConnection } from 'mysql2/promise'; -import { introspectSingleStoreToFile } from 'tests/schemaDiffer'; -import { v4 as uuid } from 'uuid'; +import { DB } from 'src/utils'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { prepareTestDatabase, pullDiff, TestDatabase } from './mocks'; -let client: Connection; -let singlestoreContainer: Docker.Container; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - singlestoreContainer = await docker.createContainer({ - Image: image, - Env: ['ROOT_PASSWORD=singlestore'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await singlestoreContainer.start(); - await new Promise((resolve) => setTimeout(resolve, 4000)); - - return `singlestore://root:singlestore@localhost:${port}/`; -} +let _: TestDatabase; +let db: DB; beforeAll(async () => { - const connectionString = process.env.SINGLESTORE_CONNECTION_STRING ?? await createDockerDB(); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to SingleStore'); - await client?.end().catch(console.error); - await singlestoreContainer?.stop().catch(console.error); - throw lastError; - } + _ = await prepareTestDatabase(); + db = _.db; }); afterAll(async () => { - await client?.end().catch(console.error); - await singlestoreContainer?.stop().catch(console.error); + await _.close(); }); beforeEach(async () => { - await client.query(`drop database if exists \`drizzle\`;`); - await client.query(`create database \`drizzle\`;`); - await client.query(`use \`drizzle\`;`); + await _.clear(); }); -if (!fs.existsSync('tests/introspect/singlestore')) { - fs.mkdirSync('tests/introspect/singlestore'); +if (!fs.existsSync('tests/mysql/tmp')) { + fs.mkdirSync('tests/mysql/tmp', { recursive: true }); } // TODO: Unskip this test when generated column is implemented @@ -108,7 +50,7 @@ if (!fs.existsSync('tests/introspect/singlestore')) { }; const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, + db, schema, 'generated-link-column', 'drizzle', @@ -132,7 +74,7 @@ if (!fs.existsSync('tests/introspect/singlestore')) { }; const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, + db, schema, 'generated-link-column-virtual', 'drizzle', @@ -150,12 +92,7 @@ test('Default value of character type column: char', async () => { }), }; - const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, - schema, - 'default-value-char-column', - 'drizzle', - ); + const { statements, sqlStatements } = await pullDiff(db, schema, 'default-value-char-column'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); @@ -169,12 +106,7 @@ test('Default value of character type column: varchar', async () => { }), }; - const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, - schema, - 'default-value-varchar-column', - 'drizzle', - ); + const { statements, sqlStatements } = await pullDiff(db, schema, 'default-value-varchar-column'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); @@ -193,7 +125,7 @@ test('Default value of character type column: varchar', async () => { }; const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, + db, schema, 'view-1', 'drizzle', @@ -216,7 +148,7 @@ test('Default value of character type column: varchar', async () => { }; const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, + db, schema, 'view-2', 'drizzle', @@ -235,12 +167,7 @@ test('handle float type', async () => { }), }; - const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, - schema, - 'handle-float-type', - 'drizzle', - ); + const { statements, sqlStatements } = await pullDiff(db, schema, 'handle-float-type'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); @@ -263,12 +190,7 @@ test('handle unsigned numerical types', async () => { }), }; - const { statements, sqlStatements } = await introspectSingleStoreToFile( - client, - schema, - 'handle-unsigned-numerical-types', - 'drizzle', - ); + const { statements, sqlStatements } = await pullDiff(db, schema, 'handle-unsigned-numerical-types'); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); diff --git a/drizzle-kit/tests/singlestore/push.test.ts b/drizzle-kit/tests/singlestore/push.test.ts new file mode 100644 index 0000000000..638799c43f --- /dev/null +++ b/drizzle-kit/tests/singlestore/push.test.ts @@ -0,0 +1,765 @@ +import chalk from 'chalk'; +import { + bigint, + binary, + char, + date, + decimal, + double, + float, + index, + int, + mediumint, + primaryKey, + singlestoreEnum, + singlestoreTable, + smallint, + text, + time, + timestamp, + tinyint, + varbinary, + varchar, + vector, + year, +} from 'drizzle-orm/singlestore-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { DialectSuite, run } from '../push/common'; +import { diffPush, prepareTestDatabase, TestDatabase } from './mocks'; + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = await prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +const singlestoreSuite: DialectSuite = { + allTypes: async function(context: any): Promise { + const schema1 = { + allBigInts: singlestoreTable('all_big_ints', { + simple: bigint('simple', { mode: 'number' }), + columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), + columnDefault: bigint('column_default', { mode: 'number' }).default(12), + columnDefaultSql: bigint('column_default_sql', { + mode: 'number', + }).default(12), + }), + allBools: singlestoreTable('all_bools', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(1), + }), + allChars: singlestoreTable('all_chars', { + simple: char('simple', { length: 1 }), + columnNotNull: char('column_not_null', { length: 45 }).notNull(), + // columnDefault: char("column_default", { length: 1 }).default("h"), + columnDefaultSql: char('column_default_sql', { length: 1 }).default( + 'h', + ), + }), + // allDateTimes: singlestoreTable("all_date_times", { + // simple: datetime("simple", { mode: "string", fsp: 1 }), + // columnNotNull: datetime("column_not_null", { + // mode: "string", + // }).notNull(), + // columnDefault: datetime("column_default", { mode: "string" }).default( + // "2023-03-01 14:05:29" + // ), + // }), + allDates: singlestoreTable('all_dates', { + simple: date('simple', { mode: 'string' }), + column_not_null: date('column_not_null', { mode: 'string' }).notNull(), + column_default: date('column_default', { mode: 'string' }).default( + '2023-03-01', + ), + }), + allDecimals: singlestoreTable('all_decimals', { + simple: decimal('simple', { precision: 1, scale: 0 }), + columnNotNull: decimal('column_not_null', { + precision: 45, + scale: 3, + }).notNull(), + columnDefault: decimal('column_default', { + precision: 10, + scale: 0, + }).default('100'), + columnDefaultSql: decimal('column_default_sql', { + precision: 10, + scale: 0, + }).default('101'), + }), + + allDoubles: singlestoreTable('all_doubles', { + simple: double('simple'), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allEnums: singlestoreTable('all_enums', { + simple: singlestoreEnum('simple', ['hi', 'hello']), + }), + + allEnums1: singlestoreTable('all_enums1', { + simple: singlestoreEnum('simple', ['hi', 'hello']).default('hi'), + }), + + allFloats: singlestoreTable('all_floats', { + columnNotNull: float('column_not_null').notNull(), + columnDefault: float('column_default').default(100), + columnDefaultSql: float('column_default_sql').default(101), + }), + + allInts: singlestoreTable('all_ints', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + allIntsRef: singlestoreTable('all_ints_ref', { + simple: int('simple'), + columnNotNull: int('column_not_null').notNull(), + columnDefault: int('column_default').default(100), + columnDefaultSql: int('column_default_sql').default(101), + }), + + // allJsons: singlestoreTable("all_jsons", { + // columnDefaultObject: json("column_default_object") + // .default({ hello: "world world" }) + // .notNull(), + // columnDefaultArray: json("column_default_array").default({ + // hello: { "world world": ["foo", "bar"] }, + // foo: "bar", + // fe: 23, + // }), + // column: json("column"), + // }), + + allMInts: singlestoreTable('all_m_ints', { + simple: mediumint('simple'), + columnNotNull: mediumint('column_not_null').notNull(), + columnDefault: mediumint('column_default').default(100), + columnDefaultSql: mediumint('column_default_sql').default(101), + }), + + allReals: singlestoreTable('all_reals', { + simple: double('simple', { precision: 5, scale: 2 }), + columnNotNull: double('column_not_null').notNull(), + columnDefault: double('column_default').default(100), + columnDefaultSql: double('column_default_sql').default(101), + }), + + allSInts: singlestoreTable('all_s_ints', { + simple: smallint('simple'), + columnNotNull: smallint('column_not_null').notNull(), + columnDefault: smallint('column_default').default(100), + columnDefaultSql: smallint('column_default_sql').default(101), + }), + + // allSmallSerials: singlestoreTable("all_small_serials", { + // columnAll: serial("column_all").notNull(), + // }), + + allTInts: singlestoreTable('all_t_ints', { + simple: tinyint('simple'), + columnNotNull: tinyint('column_not_null').notNull(), + columnDefault: tinyint('column_default').default(10), + columnDefaultSql: tinyint('column_default_sql').default(11), + }), + + allTexts: singlestoreTable('all_texts', { + simple: text('simple'), + columnNotNull: text('column_not_null').notNull(), + columnDefault: text('column_default').default('hello'), + columnDefaultSql: text('column_default_sql').default('hello'), + }), + + allTimes: singlestoreTable('all_times', { + // simple: time("simple", { fsp: 1 }), + columnNotNull: time('column_not_null').notNull(), + columnDefault: time('column_default').default('22:12:12'), + }), + + allTimestamps: singlestoreTable('all_timestamps', { + // columnDateNow: timestamp("column_date_now", { + // fsp: 1, + // mode: "string", + // }).default(sql`(now())`), + columnAll: timestamp('column_all', { mode: 'string' }) + .default('2023-03-01 14:05:29') + .notNull(), + column: timestamp('column', { mode: 'string' }).default( + '2023-02-28 16:18:31', + ), + }), + + allVarChars: singlestoreTable('all_var_chars', { + simple: varchar('simple', { length: 100 }), + columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), + columnDefault: varchar('column_default', { length: 100 }).default( + 'hello', + ), + columnDefaultSql: varchar('column_default_sql', { + length: 100, + }).default('hello'), + }), + + allVarbinaries: singlestoreTable('all_varbinaries', { + simple: varbinary('simple', { length: 100 }), + columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), + columnDefault: varbinary('column_default', { length: 12 }), + }), + + allYears: singlestoreTable('all_years', { + simple: year('simple'), + columnNotNull: year('column_not_null').notNull(), + columnDefault: year('column_default').default(2022), + }), + + binafry: singlestoreTable('binary', { + simple: binary('simple', { length: 1 }), + columnNotNull: binary('column_not_null', { length: 1 }).notNull(), + columnDefault: binary('column_default', { length: 12 }), + }), + + allVectors: singlestoreTable('all_vectors', { + vectorSimple: vector('vector_simple', { dimensions: 1 }), + vectorElementType: vector('vector_element_type', { dimensions: 1, elementType: 'I8' }), + vectorNotNull: vector('vector_not_null', { dimensions: 1 }).notNull(), + vectorDefault: vector('vector_default', { dimensions: 1 }).default([1]), + }), + }; + + const { sqlStatements } = await diffPush({ db, init: schema1, destination: schema1 }); + expect(sqlStatements).toStrictEqual([]); + }, + addBasicIndexes: function(context?: any): Promise { + return {} as any; + }, + changeIndexFields: function(context?: any): Promise { + return {} as any; + }, + dropIndex: function(context?: any): Promise { + return {} as any; + }, + indexesToBeNotTriggered: function(context?: any): Promise { + return {} as any; + }, + indexesTestCase1: function(context?: any): Promise { + return {} as any; + }, + async case1() { + // TODO: implement if needed + expect(true).toBe(true); + }, + addNotNull: function(context?: any): Promise { + return {} as any; + }, + addNotNullWithDataNoRollback: function(context?: any): Promise { + return {} as any; + }, + addBasicSequences: function(context?: any): Promise { + return {} as any; + }, + addGeneratedColumn: async function(context: any): Promise { + return {} as any; + }, + addGeneratedToColumn: async function(context: any): Promise { + return {} as any; + }, + dropGeneratedConstraint: async function(context: any): Promise { + return {} as any; + }, + alterGeneratedConstraint: async function(context: any): Promise { + return {} as any; + }, + createTableWithGeneratedConstraint: function(context?: any): Promise { + return {} as any; + }, + createCompositePrimaryKey: async function(context: any): Promise { + const schema1 = {}; + + const schema2 = { + table: singlestoreTable('table', { + col1: int('col1').notNull(), + col2: int('col2').notNull(), + }, (t) => [primaryKey({ + columns: [t.col1, t.col2], + })]), + }; + + const { sqlStatements } = await diffPush({ db, init: schema1, destination: schema2 }); + + expect(sqlStatements).toStrictEqual([ + 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', + ]); + }, + renameTableWithCompositePrimaryKey: async function(context?: any): Promise { + const productsCategoriesTable = (tableName: string) => { + return singlestoreTable(tableName, { + productId: varchar('product_id', { length: 10 }).notNull(), + categoryId: varchar('category_id', { length: 10 }).notNull(), + }, (t) => [primaryKey({ + columns: [t.productId, t.categoryId], + })]); + }; + + const schema1 = { + table: productsCategoriesTable('products_categories'), + }; + const schema2 = { + test: productsCategoriesTable('products_to_categories'), + }; + + const { sqlStatements } = await diffPush({ + db, + init: schema1, + destination: schema2, + renames: ['products_categories->products_to_categories'], + }); + + // It's not possible to create/alter/drop primary keys in SingleStore + expect(sqlStatements).toStrictEqual([ + 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', + ]); + }, +}; + +run(singlestoreSuite); + +test('db has checks. Push with same names', async () => { + const schema1 = { + test: singlestoreTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }), + }; + const schema2 = { + test: singlestoreTable('test', { + id: int('id').primaryKey(), + values: int('values').default(1), + }), + }; + + const { sqlStatements } = await diffPush({ + db, + init: schema1, + destination: schema2, + }); + + expect(sqlStatements).toStrictEqual([]); +}); + +// TODO: Unskip this test when views are implemented +/* test.skip.skip('create view', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + definition: 'select `id` from `test`', + name: 'view', + type: 'singlestore_create_view', + replace: false, + sqlSecurity: 'definer', + withCheckOption: undefined, + algorithm: 'undefined', + }, + ]); + expect(sqlStatements).toStrictEqual([ + `CREATE ALGORITHM = undefined +SQL SECURITY definer +VIEW \`view\` AS (select \`id\` from \`test\`);`, + ]); + + await client.query(`DROP TABLE \`test\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('drop view', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements).toStrictEqual([ + { + name: 'view', + type: 'drop_view', + }, + ]); + expect(sqlStatements).toStrictEqual(['DROP VIEW `view`;']); + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('alter view ".as"', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view').as((qb) => + qb + .select() + .from(table) + .where(sql`${table.id} = 1`) + ), + }; + + const schema2 = { + test: table, + view: singlestoreView('view').as((qb) => qb.select().from(table)), + }; + + const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + + await client.query(`DROP TABLE \`test\`;`); + await client.query(`DROP VIEW \`view\`;`); +}); */ + +// TODO: Unskip this test when views are implemented +/* test.skip('alter meta options with distinct in definition', async () => { + const table = singlestoreTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: singlestoreView('view') + .withCheckOption('cascaded') + .sqlSecurity('definer') + .algorithm('merge') + .as((qb) => + qb + .selectDistinct() + .from(table) + .where(sql`${table.id} = 1`) + ), + }; + + const schema2 = { + test: table, + view: singlestoreView('view') + .withCheckOption('cascaded') + .sqlSecurity('definer') + .algorithm('undefined') + .as((qb) => qb.selectDistinct().from(table)), + }; + + await expect( + diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + ), + ).rejects.toThrowError(); + + await client.query(`DROP TABLE \`test\`;`); +}); */ + +test('added column not null and without default to table with data', async (t) => { + const schema1 = { + companies: singlestoreTable('companies', { + id: int('id'), + name: text('name'), + }), + }; + + const schema2 = { + companies: singlestoreTable('companies', { + id: int('id'), + name: text('name'), + age: int('age').notNull(), + }), + }; + + const { sqlStatements } = await diffPush({ + db, + init: schema1, + destination: schema2, + after: [`INSERT INTO \`companies\` (\`name\`) VALUES ('drizzle'), ('turso');`], + }); + + expect(sqlStatements).toStrictEqual([ + `truncate table companies;`, + `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, + ]); +}); + +test('added column not null and without default to table without data', async (t) => { + const schema1 = { + companies: singlestoreTable('companies', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + }; + + const schema2 = { + companies: singlestoreTable('companies', { + id: int('id').primaryKey(), + name: text('name').notNull(), + age: int('age').notNull(), + }), + }; + + const { sqlStatements } = await diffPush({ db, init: schema1, destination: schema2 }); + + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, + ]); +}); + +test('drop not null, add not null', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name').notNull(), + userId: int('user_id'), + }, + ), + }; + const { sqlStatements } = await diffPush({ db, init: schema1, destination: schema2 }); + + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE \`__new_posts\` ( +\t\`id\` int NOT NULL, +\t\`name\` text NOT NULL, +\t\`user_id\` int, +\tCONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) +);\n`, + `INSERT INTO \`__new_posts\`(\`id\`, \`name\`, \`user_id\`) SELECT \`id\`, \`name\`, \`user_id\` FROM \`posts\`;`, + `DROP TABLE \`posts\`;`, + `ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`, + + `CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`, + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + `DROP TABLE \`users\`;`, + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + ]); +}); + +test('drop table with data', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const schema2 = { + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const { sqlStatements, hints } = await diffPush({ + db, + init: schema1, + destination: schema2, + after: [`INSERT INTO \`users\` (\`id\`, \`name\`) VALUES (1, 'drizzle')`], + }); + + expect(sqlStatements).toStrictEqual([`DROP TABLE \`users\`;`]); + expect(hints).toStrictEqual([`· You're about to delete ${chalk.underline('users')} table with 1 items`]); +}); + +test('change data type. db has indexes. table does not have values', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: int('name').notNull(), + }, (table) => [index('index').on(table.name)]), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }, (table) => [index('index').on(table.name)]), + }; + + const { sqlStatements } = await diffPush({ + db, + init: schema1, + destination: schema2, + after: [`INSERT INTO users VALUES (1, 12)`], + }); + expect(sqlStatements).toStrictEqual([ + `CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text NOT NULL, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`, + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + `DROP TABLE \`users\`;`, + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + `CREATE INDEX \`index\` ON \`users\` (\`name\`);`, + ]); +}); + +test('change data type. db has indexes. table has values', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: int('name'), + }, (table) => [index('index').on(table.name)]), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }, (table) => [index('index').on(table.name)]), + }; + + const { sqlStatements, hints } = await diffPush({ + db, + init: schema1, + destination: schema2, + after: [`INSERT INTO users VALUES (1, 12);`, `INSERT INTO users (id) VALUES (2);`], + }); + + expect(sqlStatements).toStrictEqual([ + `TRUNCATE TABLE \`users\`;`, + `CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`, + + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + `DROP TABLE \`users\`;`, + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + `CREATE INDEX \`index\` ON \`users\` (\`name\`);`, + ]); + expect(hints).toStrictEqual([ + `· You're about recreate ${chalk.underline('users')} table with data type changing for ${ + chalk.underline('name') + } column, which contains 1 items`, + ]); +}); + +test('add column. add default to column without not null', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').default('drizzle'), + age: int('age'), + }), + }; + const { sqlStatements, hints } = await diffPush({ db, init: schema1, destination: schema2 }); + + expect(sqlStatements).toStrictEqual([ + `ALTER TABLE \`users\` MODIFY COLUMN \`name\` text DEFAULT 'drizzle';`, + `ALTER TABLE \`users\` ADD \`age\` int;`, + ]); +}); diff --git a/drizzle-kit/tests/singlestore-generated.test.ts b/drizzle-kit/tests/singlestore/singlestore-generated.test.ts similarity index 57% rename from drizzle-kit/tests/singlestore-generated.test.ts rename to drizzle-kit/tests/singlestore/singlestore-generated.test.ts index 8944f3b211..8250b68374 100644 --- a/drizzle-kit/tests/singlestore-generated.test.ts +++ b/drizzle-kit/tests/singlestore/singlestore-generated.test.ts @@ -1,7 +1,7 @@ import { SQL, sql } from 'drizzle-orm'; import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; -import { diffTestSchemasSingleStore } from './schemaDiffer'; +import { diff } from './mocks'; test('generated as callback: add column with generated constraint', async () => { const from = { @@ -23,30 +23,8 @@ test('generated as callback: add column with generated constraint', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); + const { sqlStatements } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); @@ -74,30 +52,8 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); + const { sqlStatements } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); @@ -125,30 +81,8 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); + const { sqlStatements } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", @@ -175,40 +109,8 @@ test('generated as callback: drop generated constraint as stored', async () => { generatedName1: text('gen_name'), }), }; + const { sqlStatements } = await diff(from, to, []); - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); @@ -235,39 +137,7 @@ test('generated as callback: drop generated constraint as virtual', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); + const { sqlStatements } = await diff(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', @@ -298,30 +168,8 @@ test('generated as callback: change generated constraint type from virtual to st }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); + const { sqlStatements } = await diff(from, to, []); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", @@ -350,30 +198,7 @@ test('generated as callback: change generated constraint type from stored to vir }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( - from, - to, - [], - ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); + const { sqlStatements } = await diff(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", @@ -402,30 +227,11 @@ test('generated as callback: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", @@ -454,30 +260,11 @@ test('generated as sql: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); @@ -505,30 +292,11 @@ test('generated as sql: add generated constraint to an exisiting column as store }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); @@ -556,30 +324,11 @@ test('generated as sql: add generated constraint to an exisiting column as virtu }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", @@ -607,39 +356,11 @@ test('generated as sql: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); @@ -666,39 +387,11 @@ test('generated as sql: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', @@ -729,30 +422,11 @@ test('generated as sql: change generated constraint type from virtual to stored' }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", @@ -781,30 +455,11 @@ test('generated as sql: change generated constraint type from stored to virtual' }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", @@ -833,30 +488,11 @@ test('generated as sql: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", @@ -885,30 +521,11 @@ test('generated as string: add column with generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - schema: '', - tableName: 'users', - type: 'alter_table_add_column', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); @@ -936,30 +553,11 @@ test('generated as string: add generated constraint to an exisiting column as st }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'stored', - }, - columnAutoIncrement: false, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); @@ -987,30 +585,11 @@ test('generated as string: add generated constraint to an exisiting column as vi }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'to add'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", @@ -1038,39 +617,11 @@ test('generated as string: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'stored', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - type: 'alter_table_alter_column_drop_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); @@ -1097,39 +648,11 @@ test('generated as string: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - oldColumn: { - autoincrement: false, - generated: { - as: "`users`.`name` || 'to delete'", - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - onUpdate: undefined, - primaryKey: false, - type: 'text', - }, - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', @@ -1159,30 +682,11 @@ test('generated as string: change generated constraint type from virtual to stor }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'stored', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", @@ -1209,30 +713,11 @@ test('generated as string: change generated constraint type from stored to virtu }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", @@ -1259,30 +744,11 @@ test('generated as string: change generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSingleStore( + const { sqlStatements } = await diff( from, to, [], ); - - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: "`users`.`name` || 'hello'", - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", diff --git a/drizzle-kit/tests/singlestore-schemas.test.ts b/drizzle-kit/tests/singlestore/singlestore-schemas.test.ts similarity index 74% rename from drizzle-kit/tests/singlestore-schemas.test.ts rename to drizzle-kit/tests/singlestore/singlestore-schemas.test.ts index db9fe04804..5ed140418a 100644 --- a/drizzle-kit/tests/singlestore-schemas.test.ts +++ b/drizzle-kit/tests/singlestore/singlestore-schemas.test.ts @@ -1,6 +1,6 @@ import { singlestoreSchema, singlestoreTable } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; -import { diffTestSchemasSingleStore } from './schemaDiffer'; +import { diff } from './mocks'; // We don't manage databases(schemas) in MySQL with Drizzle Kit test('add schema #1', async () => { @@ -8,7 +8,7 @@ test('add schema #1', async () => { devSchema: singlestoreSchema('dev'), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); + const { statements } = await diff({}, to, []); expect(statements.length).toBe(0); }); @@ -22,7 +22,7 @@ test('add schema #2', async () => { devSchema2: singlestoreSchema('dev2'), }; - const { statements } = await diffTestSchemasSingleStore(from, to, []); + const { statements } = await diff(from, to, []); expect(statements.length).toBe(0); }); @@ -32,7 +32,7 @@ test('delete schema #1', async () => { devSchema: singlestoreSchema('dev'), }; - const { statements } = await diffTestSchemasSingleStore(from, {}, []); + const { statements } = await diff(from, {}, []); expect(statements.length).toBe(0); }); @@ -46,7 +46,7 @@ test('delete schema #2', async () => { devSchema: singlestoreSchema('dev'), }; - const { statements } = await diffTestSchemasSingleStore(from, to, []); + const { statements } = await diff(from, to, []); expect(statements.length).toBe(0); }); @@ -59,7 +59,7 @@ test('rename schema #1', async () => { devSchema2: singlestoreSchema('dev2'), }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev->dev2']); + const { statements } = await diff(from, to, ['dev->dev2']); expect(statements.length).toBe(0); }); @@ -74,7 +74,7 @@ test('rename schema #2', async () => { devSchema2: singlestoreSchema('dev2'), }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); @@ -87,7 +87,7 @@ test('add table to schema #1', async () => { users: dev.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); @@ -100,7 +100,7 @@ test('add table to schema #2', async () => { users: dev.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); @@ -114,7 +114,7 @@ test('add table to schema #3', async () => { users: singlestoreTable('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ @@ -139,7 +139,7 @@ test('remove table from schema #1', async () => { dev, }; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); @@ -149,7 +149,7 @@ test('remove table from schema #2', async () => { const from = { dev, users: dev.table('users', {}) }; const to = {}; - const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); + const { statements } = await diff(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); diff --git a/drizzle-kit/tests/singlestore.test.ts b/drizzle-kit/tests/singlestore/singlestore.test.ts similarity index 57% rename from drizzle-kit/tests/singlestore.test.ts rename to drizzle-kit/tests/singlestore/singlestore.test.ts index dca99ad2d5..82ee9ade5e 100644 --- a/drizzle-kit/tests/singlestore.test.ts +++ b/drizzle-kit/tests/singlestore/singlestore.test.ts @@ -11,29 +11,16 @@ import { uniqueIndex, } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; -import { diffTestSchemasSingleStore } from './schemaDiffer'; +import { diff } from './mocks'; test('add table #1', async () => { const to = { users: singlestoreTable('users', {}), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); + const { sqlStatements } = await diff({}, to, []); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - uniqueConstraints: [], - compositePkName: '', - }); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #2', async () => { @@ -43,74 +30,23 @@ test('add table #2', async () => { }), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); + const { sqlStatements } = await diff({}, to, []); - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [ - { - name: 'id', - notNull: true, - primaryKey: false, - type: 'serial', - autoincrement: true, - }, - ], - compositePKs: ['users_id;id'], - compositePkName: 'users_id', - uniqueConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - }); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #3', async () => { const to = { - users: singlestoreTable( - 'users', - { - id: serial('id'), - }, - (t) => { - return { - pk: primaryKey({ - name: 'users_pk', - columns: [t.id], - }), - }; - }, - ), + users: singlestoreTable('users', { + id: serial('id'), + }, (t) => [primaryKey({ + name: 'users_pk', + columns: [t.id], + })]), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [ - { - name: 'id', - notNull: true, - primaryKey: false, - type: 'serial', - autoincrement: true, - }, - ], - compositePKs: ['users_pk;id'], - uniqueConstraints: [], - compositePkName: 'users_pk', - internals: { - tables: {}, - indexes: {}, - }, - }); + const { sqlStatements } = await diff({}, to, []); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #4', async () => { @@ -119,35 +55,8 @@ test('add table #4', async () => { posts: singlestoreTable('posts', {}), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - }); - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'posts', - schema: undefined, - columns: [], - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - uniqueConstraints: [], - compositePkName: '', - }); + const { sqlStatements } = await diff({}, to, []); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #5', async () => { @@ -161,9 +70,8 @@ test('add table #5', async () => { users: schema.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, []); - - expect(statements.length).toBe(0); + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([]); }); test('add table #6', async () => { @@ -175,28 +83,8 @@ test('add table #6', async () => { users2: singlestoreTable('users2', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users2', - schema: undefined, - columns: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePKs: [], - uniqueConstraints: [], - compositePkName: '', - }); - expect(statements[1]).toStrictEqual({ - policies: [], - type: 'drop_table', - tableName: 'users1', - schema: undefined, - }); + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #7', async () => { @@ -209,31 +97,11 @@ test('add table #7', async () => { users2: singlestoreTable('users2', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ - 'public.users1->public.users2', + const { sqlStatements } = await diff(from, to, [ + 'users1->users2', ]); - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users1', - tableNameTo: 'users2', - fromSchema: undefined, - toSchema: undefined, - }); - expect(statements[1]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - compositePKs: [], - uniqueConstraints: [], - internals: { - tables: {}, - indexes: {}, - }, - compositePkName: '', - }); + expect(sqlStatements).toStrictEqual(['']); }); test('add schema + table #1', async () => { @@ -244,9 +112,8 @@ test('add schema + table #1', async () => { users: schema.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore({}, to, []); - - expect(statements.length).toBe(0); + const { sqlStatements } = await diff({}, to, []); + expect(sqlStatements).toStrictEqual([]); }); test('change schema with tables #1', async () => { @@ -261,11 +128,10 @@ test('change schema with tables #1', async () => { users: schema2.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder->folder2', ]); - - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual([]); }); test('change table schema #1', async () => { @@ -279,17 +145,10 @@ test('change table schema #1', async () => { users: schema.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ - 'public.users->folder.users', + const { sqlStatements } = await diff(from, to, [ + 'users->folder.users', ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - policies: [], - type: 'drop_table', - tableName: 'users', - schema: undefined, - }); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #2', async () => { @@ -303,24 +162,10 @@ test('change table schema #2', async () => { users: singlestoreTable('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ - 'folder.users->public.users', + const { sqlStatements } = await diff(from, to, [ + 'folder.users->users', ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_table', - tableName: 'users', - schema: undefined, - columns: [], - uniqueConstraints: [], - compositePkName: '', - compositePKs: [], - internals: { - tables: {}, - indexes: {}, - }, - }); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #3', async () => { @@ -337,11 +182,11 @@ test('change table schema #3', async () => { users: schema2.table('users', {}), }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder1.users->folder2.users', ]); - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #4', async () => { @@ -357,11 +202,11 @@ test('change table schema #4', async () => { users: schema2.table('users', {}), // move table }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder1.users->folder2.users', ]); - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #5', async () => { @@ -376,11 +221,11 @@ test('change table schema #5', async () => { users: schema2.table('users', {}), // move table }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder1.users->folder2.users', ]); - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #5', async () => { @@ -397,11 +242,11 @@ test('change table schema #5', async () => { users: schema2.table('users2', {}), // rename and move table }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder1.users->folder2.users2', ]); - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual(['']); }); test('change table schema #6', async () => { @@ -416,12 +261,12 @@ test('change table schema #6', async () => { users: schema2.table('users2', {}), // rename table }; - const { statements } = await diffTestSchemasSingleStore(from, to, [ + const { sqlStatements } = await diff(from, to, [ 'folder1->folder2', 'folder2.users->folder2.users2', ]); - expect(statements.length).toBe(0); + expect(sqlStatements).toStrictEqual(['']); }); test('add table #10', async () => { @@ -431,7 +276,7 @@ test('add table #10', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + const { sqlStatements } = await diff({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '{}'\n);\n", @@ -445,7 +290,7 @@ test('add table #11', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + const { sqlStatements } = await diff({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '[]'\n);\n", @@ -459,7 +304,7 @@ test('add table #12', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); + const { sqlStatements } = await diff({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '[1,2,3]'\n);\n", @@ -473,11 +318,10 @@ test('add table #13', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff({}, to, []); + expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value"}\'\n);\n', - ); + ]); }); test('add table #14', async () => { @@ -490,11 +334,10 @@ test('add table #14', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff({}, to, []); + expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', - ); + ]); }); // TODO: add bson type tests @@ -516,11 +359,7 @@ test('drop index', async () => { { name: text('name'), }, - (t) => { - return { - idx: index('name_idx').on(t.name), - }; - }, + (t) => [index('name_idx').on(t.name)], ), }; @@ -530,9 +369,10 @@ test('drop index', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ + 'DROP INDEX `name_idx` ON `table`;', + ]); }); test('add table with indexes', async () => { @@ -546,28 +386,27 @@ test('add table with indexes', async () => { name: text('name'), email: text('email'), }, - (t) => ({ - uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), - indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), - indexExprMultiple: index('indexExprMultiple').on( + (t) => [ + uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + index('indexExpr').on(sql`(lower(${t.email}))`), + index('indexExprMultiple').on( sql`(lower(${t.email}))`, sql`(lower(${t.email}))`, ), - uniqueCol: uniqueIndex('uniqueCol').on(t.email), - indexCol: index('indexCol').on(t.email), - indexColMultiple: index('indexColMultiple').on(t.email, t.email), + uniqueIndex('uniqueCol').on(t.email), + index('indexCol').on(t.email), + index('indexColMultiple').on(t.email, t.email), - indexColExpr: index('indexColExpr').on( + index('indexColExpr').on( sql`(lower(${t.email}))`, t.email, ), - }), + ], ), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(6); + const { sqlStatements } = await diff(from, to, []); expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) ); @@ -593,11 +432,10 @@ test('rename table', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, [`table->table1`]); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` RENAME TO `table1`;', - ); + ]); }); test('rename column', async () => { @@ -613,11 +451,10 @@ test('rename column', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.json->public.table.json1`]); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, [`table.json->table.json1`]); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` CHANGE `json` `json1`;', - ); + ]); }); test('change data type', async () => { @@ -635,23 +472,16 @@ test('change data type', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('drop not null', async () => { @@ -669,23 +499,16 @@ test('drop not null', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('set not null', async () => { @@ -703,23 +526,16 @@ test('set not null', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('set default with not null column', async () => { @@ -737,23 +553,16 @@ test('set default with not null column', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL DEFAULT 1, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('drop default with not null column', async () => { @@ -771,23 +580,16 @@ test('drop default with not null column', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('set default', async () => { @@ -805,11 +607,10 @@ test('set default', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` MODIFY COLUMN `id` int DEFAULT 1;', - ); + ]); }); test('drop default', async () => { @@ -827,11 +628,10 @@ test('drop default', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` MODIFY COLUMN `id` int;', - ); + ]); }); test('set pk', async () => { @@ -849,24 +649,17 @@ test('set pk', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int, \tCONSTRAINT \`table_id\` PRIMARY KEY(\`id\`) );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('drop pk', async () => { @@ -884,23 +677,16 @@ test('drop pk', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); - expect(sqlStatements.length).toBe(4); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, []); + expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, - ); - expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', - ); - expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('set not null + rename column on table with indexes', async () => { @@ -918,26 +704,17 @@ test('set not null + rename column on table with indexes', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.id->public.table.id3`]); - expect(sqlStatements.length).toBe(5); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, [`table.id->table.id3`]); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE \`table\` CHANGE `id` `id3`;', - ); - expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_table\` ( -\t\`id3\` int NOT NULL DEFAULT 1, -\t\`age\` int -);\n`, - ); - expect(sqlStatements[2]).toBe( + \t\`id3\` int NOT NULL DEFAULT 1, + \t\`age\` int + );\n`, 'INSERT INTO `__new_table`(`id3`, `age`) SELECT `id3`, `age` FROM `table`;', - ); - expect(sqlStatements[3]).toBe( 'DROP TABLE `table`;', - ); - expect(sqlStatements[4]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', - ); + ]); }); test('set not null + rename table on table with indexes', async () => { @@ -955,24 +732,15 @@ test('set not null + rename table on table with indexes', async () => { }), }; - const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); - expect(sqlStatements.length).toBe(5); - expect(sqlStatements[0]).toBe( + const { sqlStatements } = await diff(from, to, [`table->table1`]); + expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` RENAME TO `table1`;', - ); - expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_table1\` ( \t\`id\` int NOT NULL DEFAULT 1, \t\`age\` int );\n`, - ); - expect(sqlStatements[2]).toBe( 'INSERT INTO `__new_table1`(\`id\`, \`age\`) SELECT \`id\`, \`age\` FROM `table1`;', - ); - expect(sqlStatements[3]).toBe( 'DROP TABLE `table1`;', - ); - expect(sqlStatements[4]).toBe( 'ALTER TABLE `__new_table1` RENAME TO `table1`;', - ); + ]); }); diff --git a/drizzle-kit/tests/sqlite-checks.test.ts b/drizzle-kit/tests/sqlite-checks.test.ts deleted file mode 100644 index d1824e441b..0000000000 --- a/drizzle-kit/tests/sqlite-checks.test.ts +++ /dev/null @@ -1,308 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasSqlite } from './schemaDiffer'; - -test('create table with check', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'integer', - notNull: true, - primaryKey: true, - autoincrement: false, - }, - { - name: 'age', - type: 'integer', - notNull: false, - primaryKey: false, - autoincrement: false, - }, - ], - compositePKs: [], - checkConstraints: ['some_check_name;"users"."age" > 21'], - referenceData: [], - uniqueConstraints: [], - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) -);\n`); -}); - -test('add check contraint to existing table', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }), - }; - - const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: ['some_check_name;"users"."age" > 21'], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('drop check contraint to existing table', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }), - }; - - const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('rename check constraint', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [`new_some_check_name;"users"."age" > 21`], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('rename check constraint', async (t) => { - const from = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 21`), - })), - }; - - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - }, (table) => ({ - checkConstraint: check('some_check_name', sql`${table.age} > 10`), - })), - }; - - const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [`some_check_name;"users"."age" > 10`], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`age\` integer, -\tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) -);\n`); - expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('create checks with same names', async (t) => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey(), - age: int('age'), - name: text('name'), - }, (table) => ({ - checkConstraint1: check('some_check_name', sql`${table.age} > 21`), - checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), - })), - }; - - await expect(diffTestSchemasSqlite({}, to, [])).rejects.toThrowError(); -}); diff --git a/drizzle-kit/tests/sqlite-columns.test.ts b/drizzle-kit/tests/sqlite-columns.test.ts deleted file mode 100644 index 0cb34c220c..0000000000 --- a/drizzle-kit/tests/sqlite-columns.test.ts +++ /dev/null @@ -1,1049 +0,0 @@ -import { - AnySQLiteColumn, - foreignKey, - index, - int, - integer, - primaryKey, - sqliteTable, - text, -} from 'drizzle-orm/sqlite-core'; -import { JsonCreateIndexStatement, JsonRecreateTableStatement } from 'src/jsonStatements'; -import { expect, test } from 'vitest'; -import { diffTestSchemasSqlite } from './schemaDiffer'; - -test('create table with id', async (t) => { - const schema = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const { statements } = await diffTestSchemasSqlite({}, schema, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'integer', - primaryKey: true, - notNull: true, - autoincrement: true, - }, - ], - uniqueConstraints: [], - referenceData: [], - compositePKs: [], - checkConstraints: [], - }); -}); - -test('add columns #1', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name').notNull(), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }); -}); - -test('add columns #2', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - email: text('email'), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, // TODO: add column has autoincrement??? - }, - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }); -}); - -test('add columns #3', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name1: text('name1').default('name'), - name2: text('name2').notNull(), - name3: text('name3').default('name').notNull(), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(3); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name1', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, // TODO: add column has autoincrement??? - default: "'name'", - }, - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name2', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, // TODO: add column has autoincrement??? - }, - }); - expect(statements[2]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name3', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, // TODO: add column has autoincrement??? - default: "'name'", - }, - }); -}); - -test('add columns #4', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name', { enum: ['one', 'two'] }), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }); -}); - -test('add columns #5', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), - }); - - const schema2 = { - users, - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - // TODO: Fix here - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: 'users_report_to_users_id_fk;users;report_to;users;id;no action;no action', - column: { - name: 'report_to', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }); -}); - -test('add columns #6', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name'), - email: text('email').unique().notNull(), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: integer('id').primaryKey({ autoIncrement: true }), - name: text('name'), - email: text('email').unique().notNull(), - password: text('password').notNull(), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'password', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }); -}); - -test('add index #1', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), - }), - }; - - const users = sqliteTable( - 'users', - { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), - }, - (t) => { - return { - reporteeIdx: index('reportee_idx').on(t.reporteeId), - }; - }, - ); - - const schema2 = { - users, - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'create_index', - tableName: 'users', - internal: { - indexes: {}, - }, - schema: '', - data: 'reportee_idx;report_to;false;', - }); -}); - -test('add foreign key #1', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to'), - }), - }; - - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), - }); - - const schema2 = { - users, - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual( - { - type: 'recreate_table', - columns: [{ - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, { - autoincrement: false, - generated: undefined, - name: 'report_to', - notNull: false, - primaryKey: false, - type: 'integer', - }], - compositePKs: [], - referenceData: [{ - columnsFrom: ['report_to'], - columnsTo: ['id'], - name: 'users_report_to_users_id_fk', - tableFrom: 'users', - tableTo: 'users', - onDelete: 'no action', - onUpdate: 'no action', - }], - tableName: 'users', - uniqueConstraints: [], - checkConstraints: [], - } as JsonRecreateTableStatement, - ); -}); - -test('add foreign key #2', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to'), - }), - }; - - const schema2 = { - users: sqliteTable( - 'users', - { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('report_to'), - }, - (t) => { - return { - reporteeFk: foreignKey({ - columns: [t.reporteeId], - foreignColumns: [t.id], - name: 'reportee_fk', - }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: true, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, { - autoincrement: false, - generated: undefined, - name: 'report_to', - notNull: false, - primaryKey: false, - type: 'integer', - }], - compositePKs: [], - referenceData: [{ - columnsFrom: ['report_to'], - columnsTo: ['id'], - name: 'reportee_fk', - tableFrom: 'users', - tableTo: 'users', - onDelete: 'no action', - onUpdate: 'no action', - }], - tableName: 'users', - uniqueConstraints: [], - checkConstraints: [], - } as JsonRecreateTableStatement); -}); - -test('alter column change name #1', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name1'), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ - 'public.users.name->public.users.name1', - ]); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - oldColumnName: 'name', - newColumnName: 'name1', - }); -}); - -test('alter column change name #2', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name1'), - email: text('email'), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ - 'public.users.name->public.users.name1', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - oldColumnName: 'name', - newColumnName: 'name1', - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'email', - notNull: false, - primaryKey: false, - type: 'text', - autoincrement: false, - }, - }); -}); - -test('alter column change name #3', async (t) => { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - email: text('email'), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name1'), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ - 'public.users.name->public.users.name1', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'alter_table_rename_column', - tableName: 'users', - schema: '', - oldColumnName: 'name', - newColumnName: 'name1', - }); - - expect(statements[1]).toStrictEqual({ - type: 'alter_table_drop_column', - tableName: 'users', - schema: '', - columnName: 'email', - }); -}); - -test('alter table add composite pk', async (t) => { - const schema1 = { - table: sqliteTable('table', { - id1: integer('id1'), - id2: integer('id2'), - }), - }; - - const schema2 = { - table: sqliteTable( - 'table', - { - id1: integer('id1'), - id2: integer('id2'), - }, - (t) => { - return { - pk: primaryKey({ columns: [t.id1, t.id2] }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'id1', - notNull: false, - primaryKey: false, - type: 'integer', - }, { - autoincrement: false, - generated: undefined, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }], - compositePKs: [['id1', 'id2']], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column drop not null', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name').notNull(), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column add not null', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - primaryKey: false, - type: 'text', - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column add default', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name').default('dan'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - default: "'dan'", - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column drop default', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name').default('dan'), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column add default not null', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name').notNull().default('dan'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - primaryKey: false, - type: 'text', - default: "'dan'", - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('alter column add default not null with indexes', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name'), - }, (table) => ({ - someIndex: index('index_name').on(table.name), - })), - }; - - const to = { - users: sqliteTable('table', { - name: text('name').notNull().default('dan'), - }, (table) => ({ - someIndex: index('index_name').on(table.name), - })), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: true, - primaryKey: false, - type: 'text', - default: "'dan'", - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - data: 'index_name;name;false;', - schema: '', - tableName: 'table', - type: 'create_index', - internal: undefined, - }); - expect(sqlStatements.length).toBe(7); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` ( -\t\`name\` text DEFAULT 'dan' NOT NULL -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); - expect(sqlStatements[6]).toBe(`CREATE INDEX \`index_name\` ON \`table\` (\`name\`);`); -}); - -test('alter column drop default not null', async (t) => { - const from = { - users: sqliteTable('table', { - name: text('name').notNull().default('dan'), - }), - }; - - const to = { - users: sqliteTable('table', { - name: text('name'), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'recreate_table', - columns: [{ - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }], - compositePKs: [], - referenceData: [], - tableName: 'table', - uniqueConstraints: [], - checkConstraints: [], - }); - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` ( -\t\`name\` text -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('alter column drop generated', async (t) => { - const from = { - users: sqliteTable('table', { - id: int('id').primaryKey().notNull(), - name: text('name').generatedAlwaysAs('drizzle is the best').notNull(), - }), - }; - - const to = { - users: sqliteTable('table', { - id: int('id').primaryKey().notNull(), - name: text('name').notNull(), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - from, - to, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'table', - type: 'alter_table_alter_column_drop_generated', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`); - expect(sqlStatements[1]).toBe(`ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`); -}); - -test('recreate table with nested references', async (t) => { - let users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - age: integer('age'), - }); - let subscriptions = sqliteTable('subscriptions', { - id: int('id').primaryKey({ autoIncrement: true }), - userId: integer('user_id').references(() => users.id), - customerId: text('customer_id'), - }); - const schema1 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references(() => subscriptions.id), - }), - }; - - users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: false }), - name: text('name'), - age: integer('age'), - }); - const schema2 = { - users: users, - subscriptions: subscriptions, - subscriptionMetadata: sqliteTable('subscriptions_metadata', { - id: int('id').primaryKey({ autoIncrement: true }), - subscriptionId: text('subscription_id').references(() => subscriptions.id), - }), - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite( - schema1, - schema2, - [], - ); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - columns: [ - { - autoincrement: false, - generated: undefined, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - generated: undefined, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: undefined, - name: 'age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }); - - expect(sqlStatements.length).toBe(6); - expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); - expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( -\t\`id\` integer PRIMARY KEY NOT NULL, -\t\`name\` text, -\t\`age\` integer -);\n`); - expect(sqlStatements[2]).toBe( - `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, - ); - expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); - expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); - expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); -}); - -test('text default values escape single quotes', async (t) => { - const schema1 = { - table: sqliteTable('table', { - id: integer('id').primaryKey(), - }), - }; - - const schem2 = { - table: sqliteTable('table', { - id: integer('id').primaryKey(), - text: text('text').default("escape's quotes"), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite(schema1, schem2, []); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toStrictEqual( - "ALTER TABLE `table` ADD `text` text DEFAULT 'escape''s quotes';", - ); -}); diff --git a/drizzle-kit/tests/sqlite-tables.test.ts b/drizzle-kit/tests/sqlite-tables.test.ts deleted file mode 100644 index 651c3633c4..0000000000 --- a/drizzle-kit/tests/sqlite-tables.test.ts +++ /dev/null @@ -1,637 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { - AnySQLiteColumn, - foreignKey, - index, - int, - primaryKey, - sqliteTable, - text, - unique, - uniqueIndex, -} from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasSqlite } from './schemaDiffer'; - -test('add table #1', async () => { - const to = { - users: sqliteTable('users', {}), - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); -}); - -test('add table #2', async () => { - const to = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - autoincrement: true, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }); -}); - -test('add table #3', async () => { - const to = { - users: sqliteTable( - 'users', - { - id: int('id'), - }, - (t) => { - return { - pk: primaryKey({ - name: 'users_pk', - columns: [t.id], - }), - }; - }, - ), - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - name: 'id', - notNull: false, - primaryKey: true, - type: 'integer', - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); -}); - -test('add table #4', async () => { - const to = { - users: sqliteTable('users', {}), - posts: sqliteTable('posts', {}), - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'posts', - columns: [], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); -}); - -test('add table #5', async () => { - // no schemas in sqlite -}); - -test('add table #6', async () => { - const from = { - users1: sqliteTable('users1', {}), - }; - - const to = { - users2: sqliteTable('users2', {}), - }; - - const { statements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users2', - columns: [], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'drop_table', - tableName: 'users1', - schema: undefined, - policies: [], - }); -}); - -test('add table #7', async () => { - const from = { - users1: sqliteTable('users1', {}), - }; - - const to = { - users: sqliteTable('users', {}), - users2: sqliteTable('users2', {}), - }; - - const { statements } = await diffTestSchemasSqlite(from, to, [ - 'public.users1->public.users2', - ]); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users1', - tableNameTo: 'users2', - fromSchema: undefined, - toSchema: undefined, - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); -}); - -test('add table #8', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('reportee_id').references((): AnySQLiteColumn => users.id), - }); - - const to = { - users, - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - autoincrement: true, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'reportee_id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - uniqueConstraints: [], - checkConstraints: [], - referenceData: [ - { - columnsFrom: ['reportee_id'], - columnsTo: ['id'], - name: 'users_reportee_id_users_id_fk', - onDelete: 'no action', - onUpdate: 'no action', - tableFrom: 'users', - tableTo: 'users', - }, - ], - }); -}); - -test('add table #9', async () => { - const to = { - users: sqliteTable( - 'users', - { - id: int('id').primaryKey({ autoIncrement: true }), - reporteeId: int('reportee_id'), - }, - (t) => { - return { - reporteeIdx: index('reportee_idx').on(t.reporteeId), - }; - }, - ), - }; - - const { statements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [ - { - autoincrement: true, - name: 'id', - notNull: true, - primaryKey: true, - type: 'integer', - }, - { - autoincrement: false, - name: 'reportee_id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); - - expect(statements[1]).toStrictEqual({ - type: 'create_index', - tableName: 'users', - internal: { - indexes: {}, - }, - schema: undefined, - data: 'reportee_idx;reportee_id;false;', - }); -}); - -test('add table #10', async () => { - const to = { - users: sqliteTable('table', { - json: text('json', { mode: 'json' }).default({}), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` text DEFAULT '{}'\n);\n", - ); -}); - -test('add table #11', async () => { - const to = { - users: sqliteTable('table', { - json: text('json', { mode: 'json' }).default([]), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` text DEFAULT '[]'\n);\n", - ); -}); - -test('add table #12', async () => { - const to = { - users: sqliteTable('table', { - json: text('json', { mode: 'json' }).default([1, 2, 3]), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - "CREATE TABLE `table` (\n\t`json` text DEFAULT '[1,2,3]'\n);\n", - ); -}); - -test('add table #13', async () => { - const to = { - users: sqliteTable('table', { - json: text('json', { mode: 'json' }).default({ key: 'value' }), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value"}\'\n);\n', - ); -}); - -test('add table #14', async () => { - const to = { - users: sqliteTable('table', { - json: text('json', { mode: 'json' }).default({ - key: 'value', - arr: [1, 2, 3], - }), - }), - }; - - const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', - ); -}); - -test('add table with indexes', async () => { - const from = {}; - - const to = { - users: sqliteTable( - 'users', - { - id: int('id').primaryKey(), - name: text('name'), - email: text('email'), - }, - (t) => ({ - uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), - indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), - indexExprMultiple: index('indexExprMultiple').on( - sql`(lower(${t.email}))`, - sql`(lower(${t.email}))`, - ), - - uniqueCol: uniqueIndex('uniqueCol').on(t.email), - indexCol: index('indexCol').on(t.email), - indexColMultiple: index('indexColMultiple').on(t.email, t.email), - - indexColExpr: index('indexColExpr').on( - sql`(lower(${t.email}))`, - t.email, - ), - }), - ), - }; - - const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); - expect(sqlStatements.length).toBe(8); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `users` (\n\t`id` integer PRIMARY KEY NOT NULL,\n\t`name` text,\n\t`email` text\n);\n', - 'CREATE UNIQUE INDEX `uniqueExpr` ON `users` ((lower("email")));', - 'CREATE INDEX `indexExpr` ON `users` ((lower("email")));', - 'CREATE INDEX `indexExprMultiple` ON `users` ((lower("email")),(lower("email")));', - 'CREATE UNIQUE INDEX `uniqueCol` ON `users` (`email`);', - 'CREATE INDEX `indexCol` ON `users` (`email`);', - 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', - 'CREATE INDEX `indexColExpr` ON `users` ((lower("email")),`email`);', - ]); -}); - -test('composite primary key', async () => { - const from = {}; - const to = { - table: sqliteTable('works_to_creators', { - workId: int('work_id').notNull(), - creatorId: int('creator_id').notNull(), - classification: text('classification').notNull(), - }, (t) => ({ - pk: primaryKey({ - columns: [t.workId, t.creatorId, t.classification], - }), - })), - }; - - const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `works_to_creators` (\n\t`work_id` integer NOT NULL,\n\t`creator_id` integer NOT NULL,\n\t`classification` text NOT NULL,\n\tPRIMARY KEY(`work_id`, `creator_id`, `classification`)\n);\n', - ]); -}); - -test('add column before creating unique constraint', async () => { - const from = { - table: sqliteTable('table', { - id: int('id').primaryKey(), - }), - }; - const to = { - table: sqliteTable('table', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }, (t) => ({ - uq: unique('uq').on(t.name), - })), - }; - - const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); - - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `table` ADD `name` text NOT NULL;', - 'CREATE UNIQUE INDEX `uq` ON `table` (`name`);', - ]); -}); - -test('optional db aliases (snake case)', async () => { - const from = {}; - - const t1 = sqliteTable( - 't1', - { - t1Id1: int().notNull().primaryKey(), - t1Col2: int().notNull(), - t1Col3: int().notNull(), - t2Ref: int().notNull().references(() => t2.t2Id), - t1Uni: int().notNull(), - t1UniIdx: int().notNull(), - t1Idx: int().notNull(), - }, - (table) => ({ - uni: unique('t1_uni').on(table.t1Uni), - uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), - idx: index('t1_idx').on(table.t1Idx), - fk: foreignKey({ - columns: [table.t1Col2, table.t1Col3], - foreignColumns: [t3.t3Id1, t3.t3Id2], - }), - }), - ); - - const t2 = sqliteTable( - 't2', - { - t2Id: int().primaryKey({ autoIncrement: true }), - }, - ); - - const t3 = sqliteTable( - 't3', - { - t3Id1: int(), - t3Id2: int(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3Id1, table.t3Id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'snake_case'); - - const st1 = `CREATE TABLE \`t1\` ( - \`t1_id1\` integer PRIMARY KEY NOT NULL, - \`t1_col2\` integer NOT NULL, - \`t1_col3\` integer NOT NULL, - \`t2_ref\` integer NOT NULL, - \`t1_uni\` integer NOT NULL, - \`t1_uni_idx\` integer NOT NULL, - \`t1_idx\` integer NOT NULL, - FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON UPDATE no action ON DELETE no action, - FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON UPDATE no action ON DELETE no action -); -`; - - const st2 = `CREATE UNIQUE INDEX \`t1_uni_idx\` ON \`t1\` (\`t1_uni_idx\`);`; - - const st3 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`; - - const st4 = `CREATE UNIQUE INDEX \`t1_uni\` ON \`t1\` (\`t1_uni\`);`; - - const st5 = `CREATE TABLE \`t2\` ( - \`t2_id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL -); -`; - - const st6 = `CREATE TABLE \`t3\` ( - \`t3_id1\` integer, - \`t3_id2\` integer, - PRIMARY KEY(\`t3_id1\`, \`t3_id2\`) -); -`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); -}); - -test('optional db aliases (camel case)', async () => { - const from = {}; - - const t1 = sqliteTable( - 't1', - { - t1_id1: int().notNull().primaryKey(), - t1_col2: int().notNull(), - t1_col3: int().notNull(), - t2_ref: int().notNull().references(() => t2.t2_id), - t1_uni: int().notNull(), - t1_uni_idx: int().notNull(), - t1_idx: int().notNull(), - }, - (table) => ({ - uni: unique('t1Uni').on(table.t1_uni), - uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), - idx: index('t1Idx').on(table.t1_idx), - fk: foreignKey({ - columns: [table.t1_col2, table.t1_col3], - foreignColumns: [t3.t3_id1, t3.t3_id2], - }), - }), - ); - - const t2 = sqliteTable( - 't2', - { - t2_id: int().primaryKey({ autoIncrement: true }), - }, - ); - - const t3 = sqliteTable( - 't3', - { - t3_id1: int(), - t3_id2: int(), - }, - (table) => ({ - pk: primaryKey({ - columns: [table.t3_id1, table.t3_id2], - }), - }), - ); - - const to = { - t1, - t2, - t3, - }; - - const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'camelCase'); - - const st1 = `CREATE TABLE \`t1\` ( - \`t1Id1\` integer PRIMARY KEY NOT NULL, - \`t1Col2\` integer NOT NULL, - \`t1Col3\` integer NOT NULL, - \`t2Ref\` integer NOT NULL, - \`t1Uni\` integer NOT NULL, - \`t1UniIdx\` integer NOT NULL, - \`t1Idx\` integer NOT NULL, - FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON UPDATE no action ON DELETE no action, - FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON UPDATE no action ON DELETE no action -); -`; - - const st2 = `CREATE UNIQUE INDEX \`t1UniIdx\` ON \`t1\` (\`t1UniIdx\`);`; - - const st3 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`; - - const st4 = `CREATE UNIQUE INDEX \`t1Uni\` ON \`t1\` (\`t1Uni\`);`; - - const st5 = `CREATE TABLE \`t2\` ( - \`t2Id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL -); -`; - - const st6 = `CREATE TABLE \`t3\` ( - \`t3Id1\` integer, - \`t3Id2\` integer, - PRIMARY KEY(\`t3Id1\`, \`t3Id2\`) -); -`; - - expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); -}); diff --git a/drizzle-kit/tests/sqlite-views.test.ts b/drizzle-kit/tests/sqlite-views.test.ts deleted file mode 100644 index 8021ba37ef..0000000000 --- a/drizzle-kit/tests/sqlite-views.test.ts +++ /dev/null @@ -1,218 +0,0 @@ -import { sql } from 'drizzle-orm'; -import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasSqlite } from './schemaDiffer'; - -test('create view', async () => { - const users = sqliteTable('users', { id: int('id').default(1) }); - const view = sqliteView('view').as((qb) => qb.select().from(users)); - const to = { - users: users, - testView: view, - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_create_table', - tableName: 'users', - columns: [{ - autoincrement: false, - default: 1, - name: 'id', - type: 'integer', - primaryKey: false, - notNull: false, - }], - compositePKs: [], - uniqueConstraints: [], - referenceData: [], - checkConstraints: [], - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'view', - definition: 'select "id" from "users"', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( -\t\`id\` integer DEFAULT 1 -);\n`); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); -}); - -test('drop view', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - - expect(sqlStatements.length).toBe(1); - expect(sqlStatements[0]).toBe( - `DROP VIEW \`view\`;`, - ); -}); - -test('alter view', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), - }; - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - name: 'view', - type: 'sqlite_create_view', - definition: 'SELECT * FROM users WHERE users.id = 1', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe( - `DROP VIEW \`view\`;`, - ); - expect(sqlStatements[1]).toBe( - `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, - ); -}); - -test('create view with existing flag', async () => { - const view = sqliteView('view', {}).existing(); - const to = { - testView: view, - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('drop view with existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - }; - - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view with existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).existing(), - }; - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); - - expect(statements.length).toBe(0); - expect(sqlStatements.length).toBe(0); -}); - -test('rename view and drop existing flag', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).existing(), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'new_view', - definition: 'SELECT * FROM users', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe(`DROP VIEW \`view\`;`); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); -}); - -test('rename view and alter ".as"', async () => { - const users = sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }); - - const from = { - users: users, - testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), - }; - const to = { - users, - testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), - }; - const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); - - expect(statements.length).toBe(2); - expect(statements[0]).toStrictEqual({ - name: 'view', - type: 'drop_view', - }); - expect(statements[1]).toStrictEqual({ - type: 'sqlite_create_view', - name: 'new_view', - definition: 'SELECT * FROM users WHERE 1=1', - }); - - expect(sqlStatements.length).toBe(2); - expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); - expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); -}); diff --git a/drizzle-kit/tests/sqlite/grammar.test.ts b/drizzle-kit/tests/sqlite/grammar.test.ts new file mode 100644 index 0000000000..7fa1566a98 --- /dev/null +++ b/drizzle-kit/tests/sqlite/grammar.test.ts @@ -0,0 +1,163 @@ +import { parseSqliteDdl, parseViewSQL } from 'src/dialects/sqlite/grammar'; +import { afterAll, beforeAll, beforeEach, describe, expect, test } from 'vitest'; +import { prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('view definition', () => { + parseViewSQL('CREATE VIEW current_cycle AS\nSELECT\n* from users;'); +}); + +describe('parse ddl', (t) => { + test('all uniques', async () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text,' + '\n' + + '\`column1\` text,' + '\n' + + '\`column2\` text,' + '\n' + + '\`column3\` text,' + '\n' + + '\`column4\` text UNIQUE,' + '\n' + + '\`column5\` text CONSTRAINT [hey] UNIQUE,' + '\n' + + '\`column6\` text,' + '\n' + + 'CONSTRAINT [unique_name] UNIQUE(\`column\`),' + '\n' + + 'CONSTRAINT unique_name1 UNIQUE(\`column1\`),' + '\n' + + 'CONSTRAINT "unique_name2" UNIQUE(\`column2\`),' + '\n' + + 'CONSTRAINT \`unique_name3\` UNIQUE(\`column3\`)' + '\n' + + ')'; + + await db.run(ddl); + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [ + { name: 'unique_name', columns: ['column'] }, + { name: 'unique_name1', columns: ['column1'] }, + { name: 'unique_name2', columns: ['column2'] }, + { name: 'unique_name3', columns: ['column3'] }, + { name: null, columns: ['column4'] }, + { name: 'hey', columns: ['column5'] }, + ], + pk: { name: null, columns: [] }, + }); + }); + + test('corner case uniques', async () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text,' + '\n' + + '\`column1\` text,' + '\n' + + '\`column2\` text,' + '\n' + + '\`column3\` text,' + + '\`column4\` \ntext UNIQUE,' + '\n' + + '\`column5\` text \nCONSTRAINT [hey] \tUNIQUE\n\t,' + '\n' + + '\`column6\` text \nCONSTRAINT "hey" \tUNIQUE\n\t,' + '\n' + + '\`column7\` text \nCONSTRAINT \`hey\` \tUNIQUE\n\t,' + '\n' + + '\`column8\` text \nCONSTRAINT hey \tUNIQUE\n\t,' + '\n' + + '\`column9\` text,' + '\n' + + 'CONSTRAINT\n\t [unique_name] UNIQUE\n(\`column\`),' + + 'CONSTRAINT unique_name1 UNIQUE(\`column1\`),' + '\n' + + 'CONSTRAINT "unique_name2"\n UNIQUE(\`column2\`),' + '\n' + + 'CONSTRAINT \`unique_name3\` UNIQUE(\`column3\`)' + '\n' + + ')'; + + await db.run(ddl); + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [ + { name: 'unique_name', columns: ['column'] }, + { name: 'unique_name1', columns: ['column1'] }, + { name: 'unique_name2', columns: ['column2'] }, + { name: 'unique_name3', columns: ['column3'] }, + { name: null, columns: ['column4'] }, + { name: 'hey', columns: ['column5'] }, + { name: 'hey', columns: ['column6'] }, + { name: 'hey', columns: ['column7'] }, + { name: 'hey', columns: ['column8'] }, + ], + pk: { name: null, columns: [] }, + }); + }); + + test('pk #1', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text' + '\n' + + 'CONSTRAINT [pk] PRIMARY KEY(\`column\`)' + '\n' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { name: 'pk', columns: ['column'] }, + }); + }); + test('pk #2', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text' + '\n' + + 'CONSTRAINT pk PRIMARY KEY(\`column\`)' + '\n' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { name: 'pk', columns: ['column'] }, + }); + }); + test('pk #3', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text' + '\n' + + 'CONSTRAINT "pk" PRIMARY KEY(\`column\`)' + '\n' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { name: 'pk', columns: ['column'] }, + }); + }); + test('pk #4', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text' + '\n' + + 'CONSTRAINT `pk` PRIMARY KEY(\`column\`)' + '\n' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { name: 'pk', columns: ['column'] }, + }); + }); + test('pk #5', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text PRIMARY KEY' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { + name: null, + columns: [`column`], + }, + }); + }); + test('pk #6', () => { + const ddl = 'CREATE TABLE \`users\` (' + '\n' + + '\`column\` text CONSTRAINT "pk" PRIMARY KEY' + + ')'; + + expect(parseSqliteDdl(ddl)).toStrictEqual({ + uniques: [], + pk: { + name: 'pk', + columns: [`column`], + }, + }); + }); +}); diff --git a/drizzle-kit/tests/sqlite/mocks.ts b/drizzle-kit/tests/sqlite/mocks.ts new file mode 100644 index 0000000000..66a6bf03f6 --- /dev/null +++ b/drizzle-kit/tests/sqlite/mocks.ts @@ -0,0 +1,344 @@ +import type { Database } from 'better-sqlite3'; +import BetterSqlite3 from 'better-sqlite3'; +import { is } from 'drizzle-orm'; +import { int, SQLiteColumnBuilder, SQLiteTable, sqliteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; +import { existsSync, mkdirSync, rmSync, writeFileSync } from 'fs'; +import { introspect } from 'src/cli/commands/pull-sqlite'; +import { suggestions } from 'src/cli/commands/push-sqlite'; +import { CasingType } from 'src/cli/validations/common'; +import { EmptyProgressView } from 'src/cli/views'; +import { hash } from 'src/dialects/common'; +import { createDDL, interimToDDL, SQLiteDDL } from 'src/dialects/sqlite/ddl'; +import { ddlDiff, ddlDiffDry } from 'src/dialects/sqlite/diff'; +import { defaultFromColumn, fromDrizzleSchema, prepareFromSchemaFiles } from 'src/dialects/sqlite/drizzle'; +import { fromDatabaseForDrizzle } from 'src/dialects/sqlite/introspect'; +import { ddlToTypeScript } from 'src/dialects/sqlite/typescript'; +import { SQLiteDB } from 'src/utils'; +import { mockResolver } from 'src/utils/mocks'; +import { tsc } from 'tests/utils'; +import 'zx/globals'; + +mkdirSync('tests/sqlite/tmp/', { recursive: true }); + +export type SqliteSchema = Record | SQLiteView>; + +export const drizzleToDDL = (schema: SqliteSchema, casing?: CasingType) => { + const tables = Object.values(schema).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; + const views = Object.values(schema).filter((it) => is(it, SQLiteView)) as SQLiteView[]; + + return interimToDDL(fromDrizzleSchema(tables, views, casing)); +}; + +export const diff = async ( + left: SqliteSchema | SQLiteDDL, + right: SqliteSchema | SQLiteDDL, + renamesArr: string[], + casing?: CasingType | undefined, +) => { + const { ddl: ddl1, errors: err1 } = 'entities' in left && '_' in left + ? { ddl: left as SQLiteDDL, errors: [] } + : drizzleToDDL(left, casing); + const { ddl: ddl2, errors: err2 } = 'entities' in right && '_' in right + ? { ddl: right as SQLiteDDL, errors: [] } + : drizzleToDDL(right, casing); + + if (err1.length > 0 || err2.length > 0) { + console.log('-----'); + console.log(err1.map((it) => it.type).join('\n')); + console.log('-----'); + console.log(err2.map((it) => it.type).join('\n')); + console.log('-----'); + } + + const renames = new Set(renamesArr); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + 'default', + ); + return { sqlStatements, statements, err1, err2, next: ddl2 }; +}; + +export const dbFrom = (client: Database) => { + return { + query: async (sql: string, params: any[] = []) => { + return client.prepare(sql).bind(params).all() as T[]; + }, + run: async (query: string) => { + client.prepare(query).run(); + }, + }; +}; + +export const diffAfterPull = async ( + client: Database, + initSchema: SqliteSchema, + testName: string, + casing?: CasingType | undefined, +) => { + const db = dbFrom(client); + + const { ddl: initDDL, errors: e1 } = drizzleToDDL(initSchema, casing); + const { sqlStatements: inits } = await ddlDiffDry(createDDL(), initDDL, 'push'); + for (const st of inits) { + client.exec(st); + } + + const path = `tests/sqlite/tmp/${testName}.ts`; + + const schema = await fromDatabaseForDrizzle(db); + const { ddl: ddl2, errors: err1 } = interimToDDL(schema); + const file = ddlToTypeScript(ddl2, 'camel', schema.viewsToColumns, 'sqlite'); + + writeFileSync(path, file.file); + await tsc(file.file); + + const res = await prepareFromSchemaFiles([path]); + const { ddl: ddl1, errors: err2 } = interimToDDL(fromDrizzleSchema(res.tables, res.views, casing)); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(new Set()), + mockResolver(new Set()), + 'push', + ); + + rmSync(path); + + return { sqlStatements, statements, initDDL, ddlAfterPull: ddl1, resultDdl: ddl2 }; +}; + +export const push = async (config: { + db: SQLiteDB; + to: SqliteSchema | SQLiteDDL; + renames?: string[]; + casing?: CasingType; + force?: boolean; + expectError?: boolean; + log?: 'statements'; +}) => { + const { db, to, expectError, force, log } = config; + const casing = config.casing ?? 'camelCase'; + + const { ddl: ddl1, errors: err1, viewColumns } = await introspect(db, () => true, new EmptyProgressView()); + const { ddl: ddl2, errors: err2 } = 'entities' in to && '_' in to + ? { ddl: to as SQLiteDDL, errors: [] } + : drizzleToDDL(to, casing); + + if (err2.length > 0) { + for (const e of err2) { + console.error(`err2: ${JSON.stringify(e)}`); + } + throw new Error(); + } + + if (err1.length > 0) { + for (const e of err1) { + console.error(`err3: ${JSON.stringify(e)}`); + } + throw new Error(); + } + + // TODO: handle errors + const renames = new Set(config.renames ?? []); + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + 'push', + ); + + const hints = await suggestions(db, statements); + + if (force) { + for (const st of hints) { + if (!st.statement) continue; + await db.run(st.statement); + } + } + + let error: Error | null = null; + for (const sql of sqlStatements) { + if (log === 'statements') console.log(sql); + try { + await db.run(sql); + } catch (e) { + if (!expectError) throw e; + error = e as Error; + break; + } + } + + // subsequent push + { + const { ddl: ddl1, errors, viewColumns } = await introspect(db, () => true, new EmptyProgressView()); + + const { sqlStatements, statements } = await ddlDiff( + ddl1, + ddl2, + mockResolver(renames), + mockResolver(renames), + 'push', + ); + if (sqlStatements.length > 0) { + console.error('---- subsequent push is not empty ----'); + console.log(sqlStatements.join('\n')); + throw new Error(); + } + } + + return { sqlStatements, statements, hints, error, next: ddl2 }; +}; + +export const diffDefault = async ( + kit: TestDatabase, + builder: T, + expectedDefault: string, + pre: SqliteSchema | null = null, +) => { + await kit.clear(); + + const config = (builder as any).config; + const def = config['default']; + const column = sqliteTable('table', { column: builder }).column; + const type = column.getSQLType(); + const columnDefault = defaultFromColumn(column, 'camelCase'); + const defaultSql = columnDefault ?? ''; + + const res = [] as string[]; + if (defaultSql !== expectedDefault) { + res.push(`Unexpected sql: \n${defaultSql}\n${expectedDefault}`); + } + + const init = { + ...pre, + table: sqliteTable('table', { column: builder }), + }; + + const { db, clear } = kit; + if (pre) await push({ db, to: pre }); + const { sqlStatements: st1 } = await push({ db, to: init }); + const { sqlStatements: st2 } = await push({ db, to: init }); + + const expectedInit = `CREATE TABLE \`table\` (\n\t\`column\` ${type} DEFAULT ${expectedDefault}\n);\n`; + if (st1.length !== 1 || st1[0] !== expectedInit) res.push(`Unexpected init:\n${st1}\n\n${expectedInit}`); + if (st2.length > 0) res.push(`Unexpected subsequent init:\n${st2.join('\n')}`); + + // introspect to schema + const schema = await fromDatabaseForDrizzle(db); + const { ddl: ddl1, errors: e1 } = interimToDDL(schema); + + const file = ddlToTypeScript(ddl1, 'camel', schema.viewsToColumns, 'sqlite'); + const path = `tests/sqlite/tmp/temp-${hash(String(Math.random()))}.ts`; + + if (existsSync(path)) rmSync(path); + writeFileSync(path, file.file); + await tsc(file.file); + + const response = await prepareFromSchemaFiles([path]); + const sch = fromDrizzleSchema(response.tables, response.views, 'camelCase'); + const { ddl: ddl2, errors: e3 } = interimToDDL(sch); + + const { sqlStatements: afterFileSqlStatements } = await ddlDiffDry(ddl1, ddl2, 'push'); + if (afterFileSqlStatements.length === 0) { + rmSync(path); + } else { + console.log(afterFileSqlStatements); + console.log(`./${path}`); + } + + await clear(); + + config.hasDefault = false; + config.default = undefined; + const schema1 = { + ...pre, + table: sqliteTable('table', { column: builder }), + }; + + config.hasDefault = true; + config.default = def; + const schema2 = { + ...pre, + table: sqliteTable('table', { column: builder }), + }; + + if (pre) await push({ db, to: pre }); + await push({ db, to: schema1 }); + const { sqlStatements: st3 } = await push({ db, to: schema2 }); + + const expectedAlter = + `CREATE TABLE \`__new_table\` (\n\t\`column\` ${column.getSQLType()} DEFAULT ${expectedDefault}\n);\n`; + if (st3.length !== 6 || st3[1] !== expectedAlter) { + res.push(`Unexpected default alter:\n${st3.join('\n')}\n\n${expectedAlter}`); + } + + await clear(); + + const schema3 = { + ...pre, + table: sqliteTable('table', { id: int() }), + }; + + const schema4 = { + ...pre, + table: sqliteTable('table', { id: int(), column: builder }), + }; + + if (pre) await push({ db, to: pre }); + await push({ db, to: schema3 }); + const { sqlStatements: st4 } = await push({ db, to: schema4 }); + + const expectedAddColumn = `ALTER TABLE \`table\` ADD \`column\` ${type} DEFAULT ${expectedDefault};`; + if (st4.length !== 1 || st4[0] !== expectedAddColumn) { + res.push(`Unexpected add column:\n${st4[0]}\n\n${expectedAddColumn}`); + } + + return res; +}; + +export type TestDatabase = { + db: SQLiteDB; + close: () => Promise; + clear: () => Promise; +}; + +export const prepareTestDatabase = () => { + let client = new BetterSqlite3(':memory:'); + + const db = { + query: async (sql: string, params?: any[]) => { + try { + const stmt = client.prepare(sql); + const res = stmt.all(...(params ?? [])) as any; + return res; + } catch (error) { + const newError = new Error(`query error: ${sql}\n\n${(error as Error).message}`); + throw newError; + } + }, + run: async (sql: string) => { + try { + const stmt = client.prepare(sql); + stmt.run(); + return; + } catch (error) { + const newError = new Error(`query error: ${sql}\n\n${(error as Error).message}`); + throw newError; + } + }, + }; + const close = async () => { + client.close(); + }; + const clear = async () => { + client.close(); + client = new BetterSqlite3(':memory:'); + }; + return { db, close, clear }; +}; diff --git a/drizzle-kit/tests/sqlite/pull.test.ts b/drizzle-kit/tests/sqlite/pull.test.ts new file mode 100644 index 0000000000..03b6e91ed7 --- /dev/null +++ b/drizzle-kit/tests/sqlite/pull.test.ts @@ -0,0 +1,183 @@ +import Database from 'better-sqlite3'; +import { SQL, sql } from 'drizzle-orm'; +import { check, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; +import * as fs from 'fs'; +import { interimToDDL } from 'src/dialects/sqlite/ddl'; +import { fromDatabaseForDrizzle } from 'src/dialects/sqlite/introspect'; +import { expect, test } from 'vitest'; +import { dbFrom, diffAfterPull, push } from './mocks'; + +fs.mkdirSync('tests/sqlite/tmp', { recursive: true }); + +test('generated always column: link to another column', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + users: sqliteTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs((): SQL => sql`\`email\``), + }), + }; + + const { statements, sqlStatements } = await diffAfterPull(sqlite, schema, 'generated-link-column'); + + expect(sqlStatements).toStrictEqual([]); +}); + +test('generated always column virtual: link to another column', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + users: sqliteTable('users', { + id: int('id'), + email: text('email'), + generatedEmail: text('generatedEmail').generatedAlwaysAs((): SQL => sql`\`email\``, { mode: 'virtual' }), + }), + }; + + const { statements, sqlStatements } = await diffAfterPull(sqlite, schema, 'generated-link-column-virtual'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('complex generated always', async () => { + const sqlite = new Database(':memory:'); + + const generatedExpression = `trim( + coalesce(\`first_name\`, '') || ' ' || coalesce(\`last_name\`, '') || + (CASE WHEN nullif(trim(coalesce(\`suffix\`, '')), '') IS NOT NULL THEN ' ' || trim(coalesce(\`suffix\`, '')) ELSE '' END) + )`; + + const schema = { + users: sqliteTable('users', { + id: int('id'), + firstName: text('first_name'), + lastName: text('last_name'), + suffix: text('suffix'), + fullName: text('full_name').generatedAlwaysAs((): SQL => sql.raw(generatedExpression), { mode: 'virtual' }), + }), + }; + + const { statements, sqlStatements, initDDL, resultDdl } = await diffAfterPull( + sqlite, + schema, + 'complex generated always', + ); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); + expect( + initDDL.columns.one({ name: 'full_name' })?.generated, + ).toEqual({ + as: `(${generatedExpression})`, + type: 'virtual', + }); + expect( + resultDdl.columns.one({ name: 'full_name' })?.generated, + ).toEqual({ + as: `(${generatedExpression})`, + type: 'virtual', + }); +}); + +test('instrospect strings with single quotes', async () => { + const sqlite = new Database(':memory:'); + + const schema = { + columns: sqliteTable('columns', { + text: text('text').default('escape\'s quotes " '), + }), + }; + + const { statements, sqlStatements } = await diffAfterPull(sqlite, schema, 'introspect-strings-with-single-quotes'); + + expect(sqlStatements).toStrictEqual([]); +}); + +test('introspect checks', async () => { + const sqlite = new Database(':memory:'); + + const initSchema = { + users: sqliteTable( + 'users', + { + id: int('id'), + name: text('name'), + age: int('age'), + }, + ( + table, + ) => [check('some_check1', sql`${table.age} > 21`), check('some_check2', sql`${table.age} IN (21, 22, 23)`)], + ), + }; + + const db = dbFrom(sqlite); + await push({ + db, + to: initSchema, + }); + + const schema = await fromDatabaseForDrizzle(db); + const { ddl, errors } = interimToDDL(schema); + + expect(errors.length).toBe(0); + expect(ddl.checks.list().length).toBe(2); + expect(ddl.checks.list()[0].name).toBe('some_check1'); + expect(ddl.checks.list()[0].value).toBe('"age" > 21'); + expect(ddl.checks.list()[1].name).toBe('some_check2'); + expect(ddl.checks.list()[1].value).toBe('"age" IN (21, 22, 23)'); +}); + +test('view #1', async () => { + const sqlite = new Database(':memory:'); + + const users = sqliteTable('users', { id: int('id') }); + const testView = sqliteView('some_view', { id: int('id') }).as(sql`SELECT * FROM ${users}`); + // view with \n newlines + const testView2 = sqliteView('some_view2', { id: int('id') }).as( + sql`SELECT\n*\nFROM\n${users}`, + ); + const testView3 = sqliteView('some_view3', { id: int('id') }).as( + sql`WITH temp as (SELECT 1) SELECT\n*\nFROM\n${users}`, + ); + + const schema = { + users: users, + testView, + testView2, + testView3, + }; + + const { statements, sqlStatements } = await diffAfterPull(sqlite, schema, 'view-1'); + + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); + +test('broken view', async () => { + const sqlite = new Database(':memory:'); + + const users = sqliteTable('users', { id: int('id') }); + const testView1 = sqliteView('some_view1', { id: int('id') }).as(sql`SELECT id FROM ${users}`); + const testView2 = sqliteView('some_view2', { id: int('id'), name: text('name') }).as( + sql`SELECT id, name FROM ${users}`, + ); + + const schema = { + users: users, + testView1, + testView2, + }; + + const { statements, sqlStatements, resultDdl } = await diffAfterPull(sqlite, schema, 'broken-view'); + + expect( + resultDdl.views.one({ + name: 'some_view2', + })?.error, + ).toBeTypeOf('string'); + expect(statements.length).toBe(0); + expect(sqlStatements.length).toBe(0); +}); diff --git a/drizzle-kit/tests/sqlite/sqlite-checks.test.ts b/drizzle-kit/tests/sqlite/sqlite-checks.test.ts new file mode 100644 index 0000000000..adc91c82d4 --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-checks.test.ts @@ -0,0 +1,250 @@ +import { sql } from 'drizzle-orm'; +import { check, int, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table with check', async (t) => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`age` integer,\n' + + '\tCONSTRAINT "some_check_name" CHECK("age" > 21)\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add check contraint to existing table', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`age` integer,\n' + + '\tCONSTRAINT "some_check_name" CHECK("age" > 21)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `age`) SELECT `id`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop check constraint to existing table', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n\t`id` integer PRIMARY KEY,\n\t`age` integer\n);\n', + 'INSERT INTO `__new_users`(`id`, `age`) SELECT `id`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename check constraint', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('new_some_check_name', sql`${table.age} > 21`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`age` integer,\n' + + '\tCONSTRAINT "new_some_check_name" CHECK("age" > 21)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `age`) SELECT `id`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('change check constraint value', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 21`)]), + }; + + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey(), + age: int('age'), + }, (table) => [check('some_check_name', sql`${table.age} > 10`)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`age` integer,\n' + + '\tCONSTRAINT "some_check_name" CHECK("age" > 10)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `age`) SELECT `id`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create checks with same names', async (t) => { + const to = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey(), + age: int('age'), + name: text('name'), + }, + ( + table, + ) => [check('some_check_name', sql`${table.age} > 21`), check('some_check_name', sql`${table.name} != 'Alex'`)], + ), + }; + + const { err2 } = await diff({}, to, []); + + // TODO revise: push does not return any errors. should I use push here? + // const {} = await push({ db, to }); + + expect(err2).toStrictEqual([{ name: 'some_check_name', type: 'conflict_check' }]); +}); + +test('db has checks. Push with same names', async () => { + // TODO: revise: it seems to me that this test is the same as one above, but they expect different results + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => [check('some_check', sql`${table.age} > 21`)]), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }, (table) => [check('some_check', sql`${table.age} > 22`)]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`name` text,\n' + + '\t`age` integer,\n' + + '\tCONSTRAINT "some_check" CHECK("age" > 22)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`, `age`) SELECT `id`, `name`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); diff --git a/drizzle-kit/tests/sqlite/sqlite-columns.test.ts b/drizzle-kit/tests/sqlite/sqlite-columns.test.ts new file mode 100644 index 0000000000..4d80f69b00 --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-columns.test.ts @@ -0,0 +1,1547 @@ +import { sql } from 'drizzle-orm'; +import { + AnySQLiteColumn, + blob, + foreignKey, + getTableConfig, + index, + int, + integer, + numeric, + primaryKey, + real, + sqliteTable, + text, + uniqueIndex, +} from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create table with id', async (t) => { + const schema = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const { sqlStatements: st } = await diff({}, schema, []); + + const { sqlStatements: pst } = await push({ db, to: schema }); + + const st0: string[] = [`CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY AUTOINCREMENT\n);\n`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [`ALTER TABLE \`users\` ADD \`name\` text NOT NULL;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `name` text;', + 'ALTER TABLE `users` ADD `email` text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #3', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name1: text('name1').default('name'), + name2: text('name2').notNull(), + name3: text('name3').default('name').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + "ALTER TABLE `users` ADD `name1` text DEFAULT 'name';", + 'ALTER TABLE `users` ADD `name2` text NOT NULL;', + "ALTER TABLE `users` ADD `name3` text DEFAULT 'name' NOT NULL;", + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #4', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name', { enum: ['one', 'two'] }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE `users` ADD `name` text;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #5', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }); + + const schema2 = { + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `report_to` integer REFERENCES users(id);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #6', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + password: text('password').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE `users` ADD `password` text NOT NULL;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('added column not null and without default to table with data', async (t) => { + const schema1 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }), + }; + + const schema2 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.run(`INSERT INTO \`companies\` ("name") VALUES ('drizzle');`); + await db.run(`INSERT INTO \`companies\` ("name") VALUES ('turso');`); + + // TODO: reivise + const { sqlStatements: pst, hints: phints, error } = await push({ + db, + to: schema2, + expectError: true, + force: true, + }); + + const st0: string[] = [`ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints[0].statement).toStrictEqual('DELETE FROM "companies" where true;'); + expect(error).toBeNull(); + + // TODO: check truncations +}); + +test('added column not null and without default to table without data', async (t) => { + const schema1 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }), + }; + + const schema2 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [`ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('add generated stored column', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + generatedName: text('gen_name').generatedAlwaysAs(sql`123`, { mode: 'stored' }), + }), + }; + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`gen_name` text GENERATED ALWAYS AS (123) STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`) SELECT `id` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add generated virtual column', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + generatedName: text('gen_name').generatedAlwaysAs(sql`123`, { mode: 'virtual' }), + }), + }; + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (123) VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column make generated', async (t) => { + const from = { + users: sqliteTable('users', { + id: int('id'), + generatedName: text('gen_name'), + }), + }; + const to = { + users: sqliteTable('users', { + id: int('id'), + generatedName: text('gen_name').generatedAlwaysAs(sql`123`, { mode: 'stored' }), + }), + }; + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`gen_name` text GENERATED ALWAYS AS (123) STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`) SELECT `id` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add columns #6', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email').unique().notNull(), + password: text('password').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE `users` ADD `password` text NOT NULL;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop column', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['ALTER TABLE `users` DROP COLUMN `name`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: integer().primaryKey({ autoIncrement: true }), + name: text(), + email: text(), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: integer().primaryKey({ autoIncrement: true }), + name: text(), + email: text('email2'), + }), + }; + + const renames = ['users.email->users.email2']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = ['ALTER TABLE `users` RENAME COLUMN `email` TO `email2`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column and change data type', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + age: integer('age'), + }), + }; + + const renames = ['users.name->users.age']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `users` RENAME COLUMN `name` TO `age`;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`age` integer\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `age`) SELECT `id`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('add index #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }), + }; + + const users = sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }, + (t) => [ + index('reportee_idx').on(t.reporteeId), + ], + ); + + const schema2 = { + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + // await push({ db, to: schema1 }); + // const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ['CREATE INDEX `reportee_idx` ON `users` (`report_to`);']; + expect(st).toStrictEqual(st0); + // expect(pst).toStrictEqual(st0); +}); + +test('dropped, added unique index', async (t) => { + const users = sqliteTable('users', { + id: integer('id').primaryKey().notNull(), + name: text('name').notNull(), + email: text('email'), + textJson: text('text_json', { mode: 'json' }), + blobJon: blob('blob_json', { mode: 'json' }), + blobBigInt: blob('blob_bigint', { mode: 'bigint' }), + numeric: numeric('numeric'), + createdAt: integer('created_at', { mode: 'timestamp' }), + createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), + real: real('real'), + text: text('text', { length: 255 }), + role: text('role', { enum: ['admin', 'user'] }).default('user'), + isConfirmed: integer('is_confirmed', { mode: 'boolean' }), + }); + + const schema1 = { + users, + customers: sqliteTable('customers', { + id: integer('id').primaryKey(), + address: text('address').notNull().unique(), + isConfirmed: integer('is_confirmed', { mode: 'boolean' }), + registrationDate: integer('registration_date', { mode: 'timestamp_ms' }).notNull().$defaultFn(() => new Date()), + userId: integer('user_id').notNull(), + }, (table) => [uniqueIndex('customers_address_unique').on(table.address)]), + + posts: sqliteTable('posts', { + id: integer('id').primaryKey(), + content: text('content'), + authorId: integer('author_id'), + }), + }; + + const schema2 = { + users, + customers: sqliteTable('customers', { + id: integer('id').primaryKey(), + address: text('address').notNull(), + isConfirmed: integer('is_confirmed', { mode: 'boolean' }), + registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) + .notNull() + .$defaultFn(() => new Date()), + userId: integer('user_id').notNull(), + }, (table) => [ + uniqueIndex('customers_is_confirmed_unique').on( + table.isConfirmed, + ), + ]), + + posts: sqliteTable('posts', { + id: integer('id').primaryKey(), + content: text('content'), + authorId: integer('author_id'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + `DROP INDEX IF EXISTS \`customers_address_unique\`;`, + `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('drop autoincrement. drop column with data', async (t) => { + const schema1 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: false }), + }), + }; + + const table = getTableConfig(schema1.companies); + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.run( + `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, + ); + await db.run( + `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, + ); + + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_companies` (\n\t`id` integer PRIMARY KEY\n);\n', + 'INSERT INTO `__new_companies`(`id`) SELECT `id` FROM `companies`;', + 'DROP TABLE `companies`;', + 'ALTER TABLE `__new_companies` RENAME TO `companies`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints[0].hint).toStrictEqual("· You're about to drop 'name' column(s) in a non-empty 'companies' table"); +}); + +test('drop autoincrement. drop column with data with pragma off', async (t) => { + await db.run('PRAGMA foreign_keys=OFF;'); + + const users = sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + }); + const schema1 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name'), + user_id: integer('user_id').references(() => users.id), + }), + }; + + const schema2 = { + companies: sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: false }), + user_id: integer('user_id').references(() => users.id), + }), + }; + + const table = getTableConfig(schema1.companies); + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.run( + `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, + ); + await db.run( + `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, + ); + + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_companies` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`user_id` integer,\n' + + '\tCONSTRAINT `fk_companies_user_id_users_id_fk` FOREIGN KEY (`user_id`) REFERENCES `users`(`id`)\n' + + ');\n', + 'INSERT INTO `__new_companies`(`id`, `user_id`) SELECT `id`, `user_id` FROM `companies`;', + 'DROP TABLE `companies`;', + 'ALTER TABLE `__new_companies` RENAME TO `companies`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints[0].hint).toStrictEqual("· You're about to drop 'name' column(s) in a non-empty 'companies' table"); +}); + +test('change autoincrement. other table references current', async (t) => { + const companies1 = sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: true }), + }); + const companies2 = sqliteTable('companies', { + id: integer('id').primaryKey({ autoIncrement: false }), + }); + + const users1 = sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').unique(), + companyId: text('company_id').references(() => companies1.id), + }); + + const users2 = sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').unique(), + companyId: text('company_id').references(() => companies2.id), + }); + + const schema1 = { + companies: companies1, + users: users1, + }; + + const schema2 = { + companies: companies2, + users: users2, + }; + + const { name: usersTableName } = getTableConfig(users1); + const { name: companiesTableName } = getTableConfig(companies1); + const seedStatements = [ + `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`, + `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`, + `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('1');`, + `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('2');`, + ]; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + for (const seedSt of seedStatements) { + await db.run(seedSt); + } + + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + `PRAGMA foreign_keys=OFF;`, + `CREATE TABLE \`__new_companies\` ( +\t\`id\` integer PRIMARY KEY +);\n`, + `INSERT INTO \`__new_companies\`(\`id\`) SELECT \`id\` FROM \`companies\`;`, + `DROP TABLE \`companies\`;`, + `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, + `PRAGMA foreign_keys=ON;`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('create composite primary key', async (t) => { + const schema1 = {}; + + const schema2 = { + table: sqliteTable('table', { + col1: integer('col1').notNull(), + col2: integer('col2').notNull(), + }, (t) => [primaryKey({ + columns: [t.col1, t.col2], + })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n\t`col1` integer NOT NULL,\n\t`col2` integer NOT NULL,\n\tCONSTRAINT \`table_pk\` PRIMARY KEY(`col1`, `col2`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('add foreign key #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }), + }; + + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), + }); + + const schema2 = { + users, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`report_to` integer,\n' + + '\tCONSTRAINT `fk_users_report_to_users_id_fk` FOREIGN KEY (`report_to`) REFERENCES `users`(`id`)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `report_to`) SELECT `id`, `report_to` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add foreign key #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }), + }; + + const schema2 = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('report_to'), + }, + (t) => [foreignKey({ + columns: [t.reporteeId], + foreignColumns: [t.id], + name: 'reportee_fk', + })], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`report_to` integer,\n' + + '\tCONSTRAINT `reportee_fk` FOREIGN KEY (`report_to`) REFERENCES `users`(`id`)\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `report_to`) SELECT `id`, `report_to` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column rename #1', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + }), + }; + + const renames = ['users.name->users.name1']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = ['ALTER TABLE `users` RENAME COLUMN `name` TO `name1`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column rename #2', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + email: text('email'), + }), + }; + + const renames = ['users.name->users.name1']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `users` RENAME COLUMN `name` TO `name1`;', + 'ALTER TABLE `users` ADD `email` text;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column rename #3', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name1'), + }), + }; + + const renames = ['users.name->users.name1']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `users` RENAME COLUMN `name` TO `name1`;', + 'ALTER TABLE `users` DROP COLUMN `email`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column rename #4', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + email: text('email'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name2'), + email: text('email2'), + }), + }; + + const renames = [ + 'users.name->users.name2', + 'users.email->users.email2', + ]; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `users` RENAME COLUMN `name` TO `name2`;', + 'ALTER TABLE `users` RENAME COLUMN `email` TO `email2`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename column in composite pk', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int(), + id2: int(), + name: text('name'), + }, (t) => [primaryKey({ columns: [t.id, t.id2] })]), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int(), + id3: int(), + name: text('name'), + }, (t) => [primaryKey({ columns: [t.id, t.id3] })]), + }; + + const renames = ['users.id2->users.id3']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = ['ALTER TABLE `users` RENAME COLUMN `id2` TO `id3`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column rename + alter type', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: int('name1'), + }), + }; + + const renames = ['users.name->users.name1']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `users` RENAME COLUMN `name` TO `name1`;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`name1` integer\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name1`) SELECT `id`, `name1` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter table add composite pk', async (t) => { + const schema1 = { + table: sqliteTable('table', { + id1: integer('id1'), + id2: integer('id2'), + }), + }; + + const schema2 = { + table: sqliteTable( + 'table', + { + id1: integer('id1'), + id2: integer('id2'), + }, + (t) => [primaryKey({ columns: [t.id1, t.id2] })], + ), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n' + + '\t`id1` integer,\n' + + '\t`id2` integer,\n' + + '\tCONSTRAINT \`table_pk\` PRIMARY KEY(`id1`, `id2`)\n' + + ');\n', + 'INSERT INTO `__new_table`(`id1`, `id2`) SELECT `id1`, `id2` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column drop not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').notNull(), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n\t`name` text\n);\n', + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column add not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull(), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n\t`name` text NOT NULL\n);\n', + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column add default', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').default('dan'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + "CREATE TABLE `__new_table` (\n\t`name` text DEFAULT 'dan'\n);\n", + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column drop default', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').default('dan'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n\t`name` text\n);\n', + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column add default not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + "CREATE TABLE `__new_table` (\n\t`name` text DEFAULT 'dan' NOT NULL\n);\n", + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column add default not null with indexes', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }, (table) => [index('index_name').on(table.name)]), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }, (table) => [index('index_name').on(table.name)]), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + "CREATE TABLE `__new_table` (\n\t`name` text DEFAULT 'dan' NOT NULL\n);\n", + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + 'CREATE INDEX `index_name` ON `table` (`name`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column add default not null with indexes #2', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }, (table) => [index('index_name').on(table.name)]), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + "CREATE TABLE `__new_table` (\n\t`name` text DEFAULT 'dan' NOT NULL\n);\n", + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + 'CREATE INDEX `index_name` ON `table` (`name`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column drop default not null', async (t) => { + const from = { + users: sqliteTable('table', { + name: text('name').notNull().default('dan'), + }), + }; + + const to = { + users: sqliteTable('table', { + name: text('name'), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n\t`name` text\n);\n', + 'INSERT INTO `__new_table`(`name`) SELECT `name` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column drop generated', async (t) => { + const from = { + users: sqliteTable('table', { + id: int('id').primaryKey().notNull(), + name: text('name').generatedAlwaysAs("'drizzle is the best'").notNull(), + }), + }; + + const to = { + users: sqliteTable('table', { + id: int('id').primaryKey().notNull(), + name: text('name').notNull(), + }), + }; + + const { sqlStatements: st } = await diff( + from, + to, + [], + ); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` DROP COLUMN `name`;', + 'ALTER TABLE `table` ADD `name` text NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('alter column drop not null, add not null', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }), + posts: sqliteTable('posts', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + userId: int('user_id'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + }), + posts: sqliteTable('posts', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + userId: int('user_id'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`name` text\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_posts` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`name` text NOT NULL,\n' + + '\t`user_id` integer\n' + + ');\n', + 'INSERT INTO `__new_posts`(`id`, `name`, `user_id`) SELECT `id`, `name`, `user_id` FROM `posts`;', + 'DROP TABLE `posts`;', + 'ALTER TABLE `__new_posts` RENAME TO `posts`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + const hints0: string[] = []; + expect(phints).toStrictEqual(hints0); +}); + +test('recreate table with nested references', async (t) => { + const users1 = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + age: integer('age'), + }); + + const subscriptions1 = sqliteTable('subscriptions', { + id: int('id').primaryKey({ autoIncrement: true }), + userId: integer('user_id').references(() => users1.id), + customerId: text('customer_id'), + }); + + const schema1 = { + users: users1, + subscriptions: subscriptions1, + subscriptionMetadata: sqliteTable('subscriptions_metadata', { + id: int('id').primaryKey({ autoIncrement: true }), + subscriptionId: text('subscription_id').references(() => subscriptions1.id), + }), + }; + + const users2 = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }); + + const subscriptions2 = sqliteTable('subscriptions', { + id: int('id').primaryKey({ autoIncrement: true }), + userId: integer('user_id').references(() => users2.id), + customerId: text('customer_id'), + }); + + const schema2 = { + users: users2, + subscriptions: subscriptions2, + subscriptionMetadata: sqliteTable('subscriptions_metadata', { + id: int('id').primaryKey({ autoIncrement: true }), + subscriptionId: text('subscription_id').references(() => subscriptions2.id), + }), + }; + + const { sqlStatements: st } = await diff( + schema1, + schema2, + [], + ); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`name` text,\n' + + '\t`age` integer\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`, `age`) SELECT `id`, `name`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('text default values escape single quotes', async (t) => { + const schema1 = { + table: sqliteTable('table', { + id: integer('id').primaryKey(), + }), + }; + + const schema2 = { + table: sqliteTable('table', { + id: integer('id').primaryKey(), + text: text('text').default("escape's quotes"), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst } = await push({ db, to: schema2 }); + + const st0: string[] = ["ALTER TABLE `table` ADD `text` text DEFAULT 'escape''s quotes';"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); diff --git a/drizzle-kit/tests/sqlite/sqlite-constraints.test.ts b/drizzle-kit/tests/sqlite/sqlite-constraints.test.ts new file mode 100644 index 0000000000..801c59de9e --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-constraints.test.ts @@ -0,0 +1,1742 @@ +import { AnySQLiteColumn, foreignKey, int, primaryKey, sqliteTable, text, unique } from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, drizzleToDDL, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('unique #1. add unique. inline param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #1_0. drop table with unique', async () => { + const from = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + const to = {}; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = ['DROP TABLE `users`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #1_1. drop column with unique', async () => { + const from = { + users: sqliteTable('users', { + id: int(), + name: text().unique(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`id\` integer +);\n`, + 'INSERT INTO `__new_users`(`id`) SELECT `id` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #2. no changes unique. inline param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('unique #3. add unique. inline param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text().unique('unique_name'), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text CONSTRAINT \`unique_name\` UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #4. add unique. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text CONSTRAINT \`unique_name\` UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #5. add unique. 3rd param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #6. no changes unique. 3rd param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique().on(t.name)]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('unique #7.no changes unique. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [unique('unique_name').on(t.name, t.name2)]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [unique('unique_name').on(t.name, t.name2)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('unique #8. rename unique. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique('unique_name').on(t.name)]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [], + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text CONSTRAINT \`unique_name2\` UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique #9. rename unique. 3rd without + with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique().on(t.name)]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [unique('unique_name2').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [], + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text CONSTRAINT \`unique_name2\` UNIQUE +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('unique multistep #1', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = ['CREATE TABLE `users` (\n\t`name` text UNIQUE\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['users->users2', 'users2.name->users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` RENAME COLUMN `name` TO `name2`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3, next: pn3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + expect(n3.uniques.list()).toStrictEqual([{ + columns: ['name2'], + nameExplicit: false, + name: 'users_name_unique', + entityType: 'uniques', + table: 'users2', + }]); + expect(pn3.uniques.list()).toStrictEqual([{ + columns: ['name2'], + nameExplicit: false, + name: 'users2_name2_unique', + entityType: 'uniques', + table: 'users2', + }]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e3 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + + expect(st4).toStrictEqual(e3); + expect(pst4).toStrictEqual(e3); +}); + +test('unique multistep #2', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + const e1 = ['CREATE TABLE `users` (\n\t`name` text UNIQUE\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').unique(), + }), + }; + + const r1 = [ + 'users->users2', + 'users2.name->users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, r1); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames: r1 }); + + const e2 = [ + 'ALTER TABLE \`users\` RENAME TO \`users2\`;', + 'ALTER TABLE \`users2\` RENAME COLUMN \`name\` TO \`name2\`;', + ]; + expect(pst2).toStrictEqual(e2); + expect(st2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }, (t) => [unique().on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4, next: pn4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); + expect(n4.uniques.list()).toStrictEqual([{ + columns: [ + 'name2', + ], + entityType: 'uniques', + name: 'users_name_unique', + nameExplicit: false, + table: 'users2', + }]); + expect(pn4.uniques.list()).toStrictEqual([{ + columns: [ + 'name2', + ], + entityType: 'uniques', + name: 'users2_name2_unique', + nameExplicit: false, + table: 'users2', + }]); +}); + +test('unique multistep #3', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().unique(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`name` text UNIQUE\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`name` text UNIQUE\n);\n', + ]); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').unique(), + }), + }; + + const renames = ['users->users2', 'users2.name->users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` RENAME COLUMN `name` TO `name2`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }, (t) => [unique('name_unique').on(t.name)]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text CONSTRAINT \`name_unique\` UNIQUE +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: sqliteTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + const e5 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st5).toStrictEqual(e5); + expect(pst5).toStrictEqual(e5); +}); + +test('pk #1. add pk. inline param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text PRIMARY KEY +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #1_0. drop table with pk', async () => { + const from = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + const to = {}; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'DROP TABLE `users`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #1_0. drop column with pk', async () => { + const from = { + users: sqliteTable('users', { + id: int(), + name: text().primaryKey(), + }), + }; + const to = { + users: sqliteTable('users', { + id: int(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`id\` integer +);\n`, + 'INSERT INTO `__new_users`(`id`) SELECT `id` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #1_2. add pk', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text PRIMARY KEY +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #1_3. add pk', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ name: 'test_pk', columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text, + CONSTRAINT \`test_pk\` PRIMARY KEY(\`name\`) +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #2. no changes pk. inline param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #3. add pk. inline param with autoincrement', async () => { + const from = { + users: sqliteTable('users', { + name: int(), + }), + }; + const to = { + users: sqliteTable('users', { + name: int().primaryKey({ autoIncrement: true, onConflict: 'replace' }), + }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` integer PRIMARY KEY AUTOINCREMENT +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #4. add pk. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ name: 'unique_name', columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text, + CONSTRAINT \`unique_name\` PRIMARY KEY(\`name\`) +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #5. add pk. 3rd param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text PRIMARY KEY +);\n`, + 'INSERT INTO `__new_users`(`name`) SELECT `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #6. no changes pk. 3rd param without name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #7.no changes pk. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ name: 'pk_name', columns: [t.name] })]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ name: 'pk_name', columns: [t.name] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('pk #8. rename pk. 3rd param with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ name: 'pk_name', columns: [t.name, t.name2] })]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ name: 'pk_name_new', columns: [t.name, t.name2] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [], + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text, + \`name2\` text, + CONSTRAINT \`pk_name_new\` PRIMARY KEY(\`name\`, \`name2\`) +);\n`, + 'INSERT INTO `__new_users`(`name`, `name2`) SELECT `name`, `name2` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk #9. rename pk. 3rd without + with name', async () => { + const from = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ columns: [t.name, t.name2] })]), + }; + const to = { + users: sqliteTable('users', { + name: text(), + name2: text(), + }, (t) => [primaryKey({ name: 'pk_name', columns: [t.name, t.name2] })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ + db, + to, + renames: [], + }); + + const st0 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( + \`name\` text, + \`name2\` text, + CONSTRAINT \`pk_name\` PRIMARY KEY(\`name\`, \`name2\`) +);\n`, + 'INSERT INTO `__new_users`(`name`, `name2`) SELECT `name`, `name2` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('pk multistep #1', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = ['CREATE TABLE `users` (\n\t`name` text PRIMARY KEY\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').primaryKey(), + }), + }; + + const renames = ['users->users2', 'users2.name->users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` RENAME COLUMN `name` TO `name2`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3, next: pn3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + expect(n3.pks.list()).toStrictEqual([{ + columns: ['name2'], + nameExplicit: false, + name: 'users_pk', + entityType: 'pks', + table: 'users2', + }]); + expect(pn3.pks.list()).toStrictEqual([{ + columns: ['name2'], + nameExplicit: false, + name: 'users2_pk', + entityType: 'pks', + table: 'users2', + }]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e3 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + + expect(st4).toStrictEqual(e3); + expect(pst4).toStrictEqual(e3); +}); + +test('pk multistep #2', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + const e1 = ['CREATE TABLE `users` (\n\t`name` text PRIMARY KEY\n);\n']; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').primaryKey(), + }), + }; + + const r1 = [ + 'users->users2', + 'users2.name->users2.name2', + ]; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, r1); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames: r1 }); + + const e2 = [ + 'ALTER TABLE \`users\` RENAME TO \`users2\`;', + 'ALTER TABLE \`users2\` RENAME COLUMN \`name\` TO \`name2\`;', + ]; + expect(pst2).toStrictEqual(e2); + expect(st2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }, (t) => [primaryKey({ columns: [t.name] })]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4, next: pn4 } = await push({ db, to: sch3 }); + expect(st4).toStrictEqual([]); + expect(pst4).toStrictEqual([]); + expect(n4.pks.list()).toStrictEqual([{ + columns: [ + 'name2', + ], + entityType: 'pks', + name: 'users_pk', + nameExplicit: false, + table: 'users2', + }]); + expect(pn4.pks.list()).toStrictEqual([{ + columns: [ + 'name2', + ], + entityType: 'pks', + name: 'users2_pk', + nameExplicit: false, + table: 'users2', + }]); +}); + +test('pk multistep #3', async () => { + const sch1 = { + users: sqliteTable('users', { + name: text().primaryKey(), + }), + }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + expect(st1).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`name` text PRIMARY KEY\n);\n', + ]); + expect(pst1).toStrictEqual([ + 'CREATE TABLE `users` (\n\t`name` text PRIMARY KEY\n);\n', + ]); + + const sch2 = { + users: sqliteTable('users2', { + name: text('name2').primaryKey(), + }), + }; + + const renames = ['users->users2', 'users2.name->users2.name2']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` RENAME COLUMN `name` TO `name2`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const sch3 = { + users: sqliteTable('users2', { + name: text('name2'), + }, (t) => [primaryKey({ name: 'name_pk', columns: [t.name] })]), + }; + + const { sqlStatements: st4, next: n4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + + const e4 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text, + CONSTRAINT \`name_pk\` PRIMARY KEY(\`name2\`) +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); + + const sch4 = { + users: sqliteTable('users2', { + name: text('name2'), + }), + }; + + const { sqlStatements: st5 } = await diff(n4, sch4, []); + const { sqlStatements: pst5 } = await push({ db, to: sch4 }); + const e5 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`name2\` text +); +`, + 'INSERT INTO `__new_users2`(`name2`) SELECT `name2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st5).toStrictEqual(e5); + expect(pst5).toStrictEqual(e5); +}); + +test('fk #0', async () => { + const users = sqliteTable('users', { + id: int().references((): AnySQLiteColumn => users.id2), + id2: int(), + }); + + const to = { + users, + }; + + const { sqlStatements } = await diff({}, to, []); + // const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \`users\` (\n\t\`id\` integer,\n\t\`id2\` integer,\n\tCONSTRAINT \`fk_users_id_users_id2_fk\` FOREIGN KEY (\`id\`) REFERENCES \`users\`(\`id2\`)\n);\n`, + ]; + expect(sqlStatements).toStrictEqual(e); + // expect(pst).toStrictEqual(e); +}); + +test('fk #1', async () => { + const users = sqliteTable('users', { + id: int().primaryKey(), + }); + const posts = sqliteTable('posts', { + id: int().primaryKey(), + authorId: int().references(() => users.id), + }); + + const to = { + posts, + users, + }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \`posts\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`authorId\` integer,\n\tCONSTRAINT \`fk_posts_authorId_users_id_fk\` FOREIGN KEY (\`authorId\`) REFERENCES \`users\`(\`id\`)\n);\n`, + `CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY\n);\n`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #2', async () => { + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users.id), + }); + + const to = { users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`id2\` integer,\n\tCONSTRAINT \`fk_users_id2_users_id_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`users\`(\`id\`)\n);\n`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #3', async () => { + const posts = sqliteTable('posts', { + id: int(), + }); + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ + name: 'fk_name', + columns: [t.id2], + foreignColumns: [posts.id], + })]); + + const to = { posts, users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \`posts\` (\n\t\`id\` integer\n);\n`, + `CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`id2\` integer,\n\tCONSTRAINT \`fk_name\` FOREIGN KEY (\`id2\`) REFERENCES \`posts\`(\`id\`)\n);\n`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #4', async () => { + const posts = sqliteTable('posts', { + id: int(), + }); + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ + columns: [t.id2], + foreignColumns: [posts.id], + })]); + + const to = { posts, users }; + + const { sqlStatements } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + `CREATE TABLE \`posts\` (\n\t\`id\` integer\n);\n`, + `CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`id2\` integer,\n\tCONSTRAINT \`fk_users_id2_posts_id_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`posts\`(\`id\`)\n);\n`, + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #5', async () => { + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users.id), + }); + + const users2 = sqliteTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnySQLiteColumn => users2.id), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2', 'users2.id->users2.id3']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE \`users\` RENAME TO \`users2\`;', + 'ALTER TABLE \`users2\` RENAME COLUMN \`id\` TO \`id3\`;', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #6', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users.id1), + }); + + const users2 = sqliteTable('users', { + id1: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ name: 'id2_id1_fk', columns: [t.id2], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users.users_id2_users_id1_fkey->users.id2_id1_fk']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( +\t\`id1\` integer PRIMARY KEY, +\t\`id2\` integer, +\tCONSTRAINT \`id2_id1_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`users\`(\`id1\`) +);\n`, + 'INSERT INTO \`__new_users\`(\`id1\`, \`id2\`) SELECT \`id1\`, \`id2\` FROM \`users\`;', + 'DROP TABLE \`users\`;', + 'ALTER TABLE \`__new_users\` RENAME TO \`users\`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #8', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int().references((): AnySQLiteColumn => users.id1), + }); + + const users2 = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int().references((): AnySQLiteColumn => users.id2), + }); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const e = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( +\t\`id1\` integer PRIMARY KEY, +\t\`id2\` integer UNIQUE, +\t\`id3\` integer, +\tCONSTRAINT \`fk_users_id3_users_id2_fk\` FOREIGN KEY (\`id3\`) REFERENCES \`users\`(\`id2\`) +);\n`, + 'INSERT INTO \`__new_users\`(\`id1\`, \`id2\`, \`id3\`) SELECT \`id1\`, \`id2\`, \`id3\` FROM \`users\`;', + 'DROP TABLE \`users\`;', + 'ALTER TABLE \`__new_users\` RENAME TO \`users\`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #9', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id1] })]); + + const users2 = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ name: 'fk1', columns: [t.id3], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('fk #10', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ columns: [t.id3], foreignColumns: [t.id1] })]); + + const users2 = sqliteTable('users', { + id1: int().primaryKey(), + id2: int().unique(), + id3: int(), + }, (t) => [foreignKey({ columns: [t.id3], foreignColumns: [t.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const { sqlStatements } = await diff(from, to, []); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + expect(sqlStatements).toStrictEqual([]); + expect(pst).toStrictEqual([]); +}); + +test('fk #11', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + }); + + const users2 = sqliteTable('users2', { + id1: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users2.id1), + }); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` ADD `id2` integer REFERENCES users2(id1);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #12', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + }); + + const users2 = sqliteTable('users2', { + id1: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ columns: [t.id2], foreignColumns: [users.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` ADD `id2` integer REFERENCES users2(id1);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #13', async () => { + const users = sqliteTable('users', { + id1: int().primaryKey(), + }); + + const users2 = sqliteTable('users2', { + id1: int().primaryKey(), + id2: int(), + }, (t) => [foreignKey({ name: 'hey_fk', columns: [t.id2], foreignColumns: [users.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'ALTER TABLE `users2` ADD `id2` integer CONSTRAINT \`hey_fk\` REFERENCES users2(id1);', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #14', async () => { + const users = sqliteTable('users', { + id1: int(), + id2: int(), + }); + + const users2 = sqliteTable('users2', { + id1: int(), + id2: int(), + }, (t) => [foreignKey({ name: 'hey_fk', columns: [t.id2, t.id1], foreignColumns: [users.id1, users.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`id1\` integer, + \`id2\` integer, + CONSTRAINT \`hey_fk\` FOREIGN KEY (\`id2\`,\`id1\`) REFERENCES \`users2\`(\`id1\`,\`id1\`) +);\n`, + 'INSERT INTO `__new_users2`(`id1`, `id2`) SELECT `id1`, `id2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk #15', async () => { + const users = sqliteTable('users', { + id1: int(), + id2: int(), + }); + + const users2 = sqliteTable('users2', { + id1: int(), + id2: int(), + }, (t) => [foreignKey({ columns: [t.id2, t.id1], foreignColumns: [users.id1, users.id1] })]); + + const from = { users }; + const to = { users: users2 }; + + const renames = ['users->users2']; + const { sqlStatements } = await diff(from, to, renames); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const e = [ + 'ALTER TABLE `users` RENAME TO `users2`;', + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`id1\` integer, + \`id2\` integer, + CONSTRAINT \`fk_users2_id2_id1_users_id1_id1_fk\` FOREIGN KEY (\`id2\`,\`id1\`) REFERENCES \`users2\`(\`id1\`,\`id1\`) +);\n`, + 'INSERT INTO `__new_users2`(`id1`, `id2`) SELECT `id1`, `id2` FROM `users2`;', + 'DROP TABLE `users2`;', + 'ALTER TABLE `__new_users2` RENAME TO `users2`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(sqlStatements).toStrictEqual(e); + expect(pst).toStrictEqual(e); +}); + +test('fk multistep #1', async () => { + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users.id), + }); + + const users2 = sqliteTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnySQLiteColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`id2\` integer,' + '\n' + + '\tCONSTRAINT \`fk_users_id2_users_id_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`users\`(\`id\`)\n);\n', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const renames = ['users->users2', 'users2.id->users2.id3']; + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, renames); + const { sqlStatements: pst2 } = await push({ db, to: sch2, renames }); + + const e2 = [ + 'ALTER TABLE \`users\` RENAME TO \`users2\`;', + 'ALTER TABLE \`users2\` RENAME COLUMN \`id\` TO \`id3\`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3, next: n3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); + + const users3 = sqliteTable('users2', { + id: int('id3').primaryKey(), + id2: int(), + }); + const sch3 = { users: users3 }; + + const { sqlStatements: st4 } = await diff(n3, sch3, []); + const { sqlStatements: pst4 } = await push({ db, to: sch3 }); + const e4 = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users2\` ( + \`id3\` integer PRIMARY KEY, + \`id2\` integer +);\n`, + 'INSERT INTO \`__new_users2\`(\`id3\`, \`id2\`) SELECT \`id3\`, \`id2\` FROM \`users2\`;', + 'DROP TABLE \`users2\`;', + 'ALTER TABLE \`__new_users2\` RENAME TO \`users2\`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st4).toStrictEqual(e4); + expect(pst4).toStrictEqual(e4); +}); + +test('fk multistep #2', async () => { + const users = sqliteTable('users', { + id: int().primaryKey(), + id2: int().references((): AnySQLiteColumn => users.id), + }); + + const users2 = sqliteTable('users2', { + id: int('id3').primaryKey(), + id2: int().references((): AnySQLiteColumn => users2.id), + }); + + const sch1 = { users }; + const sch2 = { users: users2 }; + + const { sqlStatements: st1, next: n1 } = await diff({}, sch1, []); + const { sqlStatements: pst1 } = await push({ db, to: sch1 }); + + const e1 = [ + 'CREATE TABLE \`users\` (\n\t\`id\` integer PRIMARY KEY,\n\t\`id2\` integer,' + + '\n\tCONSTRAINT \`fk_users_id2_users_id_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`users\`(\`id\`)\n);\n', + ]; + expect(st1).toStrictEqual(e1); + expect(pst1).toStrictEqual(e1); + + const { sqlStatements: st2, next: n2 } = await diff(n1, sch2, []); + const { sqlStatements: pst2 } = await push({ db, to: sch2 }); + + const e2 = [ + 'CREATE TABLE \`users2\` (\n\t\`id3\` integer PRIMARY KEY,\n\t\`id2\` integer,' + + '\n\tCONSTRAINT \`fk_users2_id2_users2_id3_fk\` FOREIGN KEY (\`id2\`) REFERENCES \`users2\`(\`id3\`)\n);\n', + 'DROP TABLE \`users\`;', + ]; + expect(st2).toStrictEqual(e2); + expect(pst2).toStrictEqual(e2); + + const { sqlStatements: st3 } = await diff(n2, sch2, []); + const { sqlStatements: pst3 } = await push({ db, to: sch2 }); + + expect(st3).toStrictEqual([]); + expect(pst3).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/sqlite/sqlite-defaults.test.ts b/drizzle-kit/tests/sqlite/sqlite-defaults.test.ts new file mode 100644 index 0000000000..a7ac68a30a --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-defaults.test.ts @@ -0,0 +1,176 @@ +import { sql } from 'drizzle-orm'; +import { blob, integer, numeric, real, text } from 'drizzle-orm/sqlite-core'; +import { DB } from 'src/utils'; +import { afterAll, beforeAll, expect, test } from 'vitest'; +import { diffDefault, prepareTestDatabase, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} + +let _: TestDatabase; +let db: DB; + +beforeAll(async () => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +test('integer', async () => { + const res1 = await diffDefault(_, integer({ mode: 'number' }).default(10), '10'); + const res2 = await diffDefault(_, integer({ mode: 'number' }).default(0), '0'); + const res3 = await diffDefault(_, integer({ mode: 'number' }).default(-10), '-10'); + const res4 = await diffDefault(_, integer({ mode: 'number' }).default(1e4), '10000'); + const res5 = await diffDefault(_, integer({ mode: 'number' }).default(-1e4), '-10000'); + + const res6 = await diffDefault(_, integer({ mode: 'boolean' }).default(true), '1'); + const res7 = await diffDefault(_, integer({ mode: 'boolean' }).default(false), '0'); + + const date = new Date('2025-05-23T12:53:53.115Z'); + const res8 = await diffDefault(_, integer({ mode: 'timestamp' }).default(date), `1748004833`); + const res9 = await diffDefault(_, integer({ mode: 'timestamp_ms' }).default(date), `${date.getTime()}`); + // const res10 = await diffDefault( + // _, + // integer({ mode: 'timestamp_ms' }).defaultNow(), + // `(cast((julianday('now') - 2440587.5)*86400000 as integer))`, + // ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); +}); + +test('text', async () => { + const res1 = await diffDefault(_, text().default('text'), `'text'`); + const res2 = await diffDefault(_, text().default("text'text"), `'text''text'`); + // raw default sql for the line below: ('text''\text"') + const res3 = await diffDefault(_, text().default('text\'\\text"'), `'text''\\\\text"'`); + const res4 = await diffDefault(_, text({ enum: ['one', 'two', 'three'] }).default('one'), `'one'`); + const res5 = await diffDefault(_, text().default(sql`CURRENT_TIME`), 'CURRENT_TIME'); + const res6 = await diffDefault(_, text().default(sql`CURRENT_DATE`), 'CURRENT_DATE'); + const res7 = await diffDefault(_, text().default(sql`CURRENT_TIMESTAMP`), 'CURRENT_TIMESTAMP'); + const res8 = await diffDefault(_, text({ mode: 'json' }).default({ key: 'value' }), `'{"key":"value"}'`); + const res9 = await diffDefault( + _, + text({ mode: 'json' }).default({ key: 9223372036854775807n }), + `'{"key":9223372036854775807}'`, + ); + const res10 = await diffDefault( + _, + text({ mode: 'json' }).default(sql`'{"key":9223372036854775807}'`), + `'{"key":9223372036854775807}'`, + ); + const res11 = await diffDefault( + _, + text({ mode: 'json' }).default([9223372036854775807n, 9223372036854775806n]), + `'[9223372036854775807,9223372036854775806]'`, + ); + const res12 = await diffDefault( + _, + text({ mode: 'json' }).default({ key: 'value\\\'"' }), + `'{"key":"value\\\\''\\""}'`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); +}); + +test('real', async () => { + const res1 = await diffDefault(_, real().default(1000.123), '1000.123'); + expect.soft(res1).toStrictEqual([]); +}); + +test('numeric', async () => { + const res1 = await diffDefault(_, numeric().default('10.123'), "'10.123'"); + const res2 = await diffDefault(_, numeric({ mode: 'bigint' }).default(9223372036854775807n), "'9223372036854775807'"); + const res3 = await diffDefault(_, numeric({ mode: 'number' }).default(9007199254740991), '9007199254740991'); + const res4 = await diffDefault( + _, + numeric({ mode: 'string' }).default('9223372036854775807'), + "'9223372036854775807'", + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); +}); + +test('blob', async () => { + const res1 = await diffDefault(_, blob({ mode: 'buffer' }).default(Buffer.from('text')), `X'74657874'`); + const res2 = await diffDefault( + _, + blob({ mode: 'buffer' }).default(Buffer.from("text'text")), + `X'746578742774657874'`, + ); + // raw default sql for the line below: ('text''\text"') + const res3 = await diffDefault( + _, + blob({ mode: 'buffer' }).default(Buffer.from('text\'\\text"')), + `X'74657874275C7465787422'`, + ); + + const res4 = await diffDefault(_, blob({ mode: 'bigint' }).default(9223372036854775807n), "'9223372036854775807'"); + + const res5 = await diffDefault(_, blob({ mode: 'json' }).default(9223372036854775807n), "'9223372036854775807'"); + const res6 = await diffDefault(_, blob({ mode: 'json' }).default({}), `'{}'`); + const res7 = await diffDefault(_, blob({ mode: 'json' }).default([]), `'[]'`); + const res8 = await diffDefault(_, blob({ mode: 'json' }).default([1, 2, 3]), `'[1,2,3]'`); + const res9 = await diffDefault(_, blob({ mode: 'json' }).default({ key: 'value' }), `'{"key":"value"}'`); + // raw default sql for the line below: '{"key":"val'\ue"}' + const res10 = await diffDefault(_, blob({ mode: 'json' }).default({ key: "val'\\ue" }), `'{"key":"val''\\\\ue"}'`); + + const res11 = await diffDefault( + _, + blob({ mode: 'json' }).default({ key: 9223372036854775807n }), + `'{"key":9223372036854775807}'`, + ); + const res12 = await diffDefault( + _, + blob({ mode: 'json' }).default(sql`'{"key":9223372036854775807}'`), + `'{"key":9223372036854775807}'`, + ); + const res13 = await diffDefault( + _, + blob({ mode: 'json' }).default([9223372036854775807n, 9223372036854775806n]), + `'[9223372036854775807,9223372036854775806]'`, + ); + const res14 = await diffDefault( + _, + blob({ mode: 'json' }).default({ key: 'value\\\'"' }), + `'{"key":"value\\\\''\\""}'`, + ); + + expect.soft(res1).toStrictEqual([]); + expect.soft(res2).toStrictEqual([]); + expect.soft(res3).toStrictEqual([]); + expect.soft(res4).toStrictEqual([]); + expect.soft(res5).toStrictEqual([]); + expect.soft(res6).toStrictEqual([]); + expect.soft(res7).toStrictEqual([]); + expect.soft(res8).toStrictEqual([]); + expect.soft(res9).toStrictEqual([]); + expect.soft(res10).toStrictEqual([]); + expect.soft(res11).toStrictEqual([]); + expect.soft(res12).toStrictEqual([]); + expect.soft(res13).toStrictEqual([]); + expect.soft(res14).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/sqlite-generated.test.ts b/drizzle-kit/tests/sqlite/sqlite-generated.test.ts similarity index 51% rename from drizzle-kit/tests/sqlite-generated.test.ts rename to drizzle-kit/tests/sqlite/sqlite-generated.test.ts index 2d3ceed978..bb7f1a7a40 100644 --- a/drizzle-kit/tests/sqlite-generated.test.ts +++ b/drizzle-kit/tests/sqlite/sqlite-generated.test.ts @@ -1,3 +1,8 @@ +import { SQL, sql } from 'drizzle-orm'; +import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + // 1. add stored column to existing table - not supported + // 2. add virtual column to existing table - supported + // 3. create table with stored/virtual columns(pg, mysql, sqlite) @@ -6,10 +11,22 @@ // 6. drop stored/virtual expression -> supported with drop+add column // 7. alter generated expession -> stored not supported, virtual supported -import { SQL, sql } from 'drizzle-orm'; -import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import { expect, test } from 'vitest'; -import { diffTestSchemasSqlite } from './schemaDiffer'; +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); // should generate 0 statements + warning/error in console test('generated as callback: add column with stored generated constraint', async () => { @@ -32,14 +49,30 @@ test('generated as callback: add column with stored generated constraint', async }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add column with virtual generated constraint', async () => { @@ -62,48 +95,34 @@ test('generated as callback: add column with virtual generated constraint', asyn }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '("name" || \'hello\')', - type: 'virtual', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - referenceData: undefined, - tableName: 'users', - type: 'sqlite_alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add generated constraint to an exisiting column as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; + const to = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() @@ -113,14 +132,29 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') STORED NOT NULL\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { @@ -145,34 +179,21 @@ test('generated as callback: add generated constraint to an exisiting column as }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("name" || \'to add\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') VIRTUAL NOT NULL;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: drop generated constraint as stored', async () => { @@ -196,31 +217,21 @@ test('generated as callback: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: drop generated constraint as virtual', async () => { @@ -244,31 +255,21 @@ test('generated as callback: drop generated constraint as virtual', async () => }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // no way to do it @@ -276,7 +277,6 @@ test('generated as callback: change generated constraint type from virtual to st const from = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, @@ -287,7 +287,6 @@ test('generated as callback: change generated constraint type from virtual to st const to = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, @@ -296,14 +295,29 @@ test('generated as callback: change generated constraint type from virtual to st }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: change generated constraint type from stored to virtual', async () => { @@ -330,34 +344,21 @@ test('generated as callback: change generated constraint type from stored to vir }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // not supported @@ -385,14 +386,30 @@ test('generated as callback: change stored generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: change virtual generated constraint', async () => { @@ -417,34 +434,21 @@ test('generated as callback: change virtual generated constraint', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add table with column with stored generated constraint', async () => { @@ -461,59 +465,20 @@ test('generated as callback: add table with column with stored generated constra }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("name" || \'hello\')', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n);\n', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as callback: add table with column with virtual generated constraint', async () => { @@ -530,59 +495,20 @@ test('generated as callback: add table with column with virtual generated constr }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("name" || \'hello\')', - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL\n);\n', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // --- @@ -601,20 +527,36 @@ test('generated as sql: add column with stored generated constraint', async () = id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || \'hello\' || 'hello'`, + sql`"name" || \'hello\' || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + "\t`gen_name` text GENERATED ALWAYS AS (\"name\" || 'hello' || 'hello') STORED\n" + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add column with virtual generated constraint', async () => { @@ -631,39 +573,26 @@ test('generated as sql: add column with virtual generated constraint', async () id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || \'hello\'`, + sql`"name" || \'hello\'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - referenceData: undefined, - tableName: 'users', - type: 'sqlite_alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add generated constraint to an exisiting column as stored', async () => { @@ -682,20 +611,36 @@ test('generated as sql: add generated constraint to an exisiting column as store name: text('name'), generatedName: text('gen_name') .notNull() - .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { + .generatedAlwaysAs(sql`"name" || 'to add'`, { mode: 'stored', }), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') STORED NOT NULL\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { @@ -714,40 +659,27 @@ test('generated as sql: add generated constraint to an exisiting column as virtu name: text('name'), generatedName: text('gen_name') .notNull() - .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { + .generatedAlwaysAs(sql`"name" || 'to add'`, { mode: 'virtual', }), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'to add\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') VIRTUAL NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: drop generated constraint as stored', async () => { @@ -757,7 +689,7 @@ test('generated as sql: drop generated constraint as stored', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'to delete'`, + sql`"name" || 'to delete'`, { mode: 'stored' }, ), }), @@ -771,31 +703,21 @@ test('generated as sql: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: drop generated constraint as virtual', async () => { @@ -805,7 +727,7 @@ test('generated as sql: drop generated constraint as virtual', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'to delete'`, + sql`"name" || 'to delete'`, { mode: 'virtual' }, ), }), @@ -819,31 +741,21 @@ test('generated as sql: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // no way to do it @@ -851,9 +763,8 @@ test('generated as sql: change generated constraint type from virtual to stored' const from = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(sql`"name"`, { mode: 'virtual', }), }), @@ -861,23 +772,37 @@ test('generated as sql: change generated constraint type from virtual to stored' const to = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: change generated constraint type from stored to virtual', async () => { @@ -886,7 +811,7 @@ test('generated as sql: change generated constraint type from stored to virtual' id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(sql`"name"`, { mode: 'stored', }), }), @@ -897,40 +822,27 @@ test('generated as sql: change generated constraint type from stored to virtual' id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // not supported @@ -940,7 +852,7 @@ test('generated as sql: change stored generated constraint', async () => { id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(sql`"name"`, { mode: 'stored', }), }), @@ -951,20 +863,36 @@ test('generated as sql: change stored generated constraint', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: change virtual generated constraint', async () => { @@ -973,7 +901,7 @@ test('generated as sql: change virtual generated constraint', async () => { id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`), + generatedName: text('gen_name').generatedAlwaysAs(sql`"name"`), }), }; const to = { @@ -982,39 +910,26 @@ test('generated as sql: change virtual generated constraint', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add table with column with stored generated constraint', async () => { @@ -1025,65 +940,26 @@ test('generated as sql: add table with column with stored generated constraint', id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("users"."name" || \'hello\')', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as sql: add table with column with virtual generated constraint', async () => { @@ -1094,65 +970,26 @@ test('generated as sql: add table with column with virtual generated constraint' id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - sql`"users"."name" || 'hello'`, + sql`"name" || 'hello'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // --- @@ -1161,30 +998,43 @@ test('generated as string: add column with stored generated constraint', async ( const from = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || \'hello\'`, + `"name" || \'hello\'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add column with virtual generated constraint', async () => { @@ -1201,39 +1051,26 @@ test('generated as string: add column with virtual generated constraint', async id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || \'hello\'`, + `"name" || \'hello\'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - column: { - generated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - autoincrement: false, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - referenceData: undefined, - tableName: 'users', - type: 'sqlite_alter_table_add_column', - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add generated constraint to an exisiting column as stored', async () => { @@ -1252,20 +1089,36 @@ test('generated as string: add generated constraint to an exisiting column as st name: text('name'), generatedName: text('gen_name') .notNull() - .generatedAlwaysAs(`"users"."name" || 'to add'`, { + .generatedAlwaysAs(`"name" || 'to add'`, { mode: 'stored', }), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') STORED NOT NULL\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add generated constraint to an exisiting column as virtual', async () => { @@ -1284,40 +1137,27 @@ test('generated as string: add generated constraint to an exisiting column as vi name: text('name'), generatedName: text('gen_name') .notNull() - .generatedAlwaysAs(`"users"."name" || 'to add'`, { + .generatedAlwaysAs(`"name" || 'to add'`, { mode: 'virtual', }), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'to add\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: true, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_set_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') VIRTUAL NOT NULL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: drop generated constraint as stored', async () => { @@ -1327,7 +1167,7 @@ test('generated as string: drop generated constraint as stored', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'to delete'`, + `"name" || 'to delete'`, { mode: 'stored' }, ), }), @@ -1341,31 +1181,21 @@ test('generated as string: drop generated constraint as stored', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: drop generated constraint as virtual', async () => { @@ -1375,7 +1205,7 @@ test('generated as string: drop generated constraint as virtual', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'to delete'`, + `"name" || 'to delete'`, { mode: 'virtual' }, ), }), @@ -1389,31 +1219,21 @@ test('generated as string: drop generated constraint as virtual', async () => { }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: undefined, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_drop_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', - ]); + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // no way to do it @@ -1423,7 +1243,7 @@ test('generated as string: change generated constraint type from virtual to stor id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(`"name"`, { mode: 'virtual', }), }), @@ -1434,20 +1254,36 @@ test('generated as string: change generated constraint type from virtual to stor id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`id2` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `id2`, `name`) SELECT `id`, `id2`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: change generated constraint type from stored to virtual', async () => { @@ -1456,7 +1292,7 @@ test('generated as string: change generated constraint type from stored to virtu id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(`"name"`, { mode: 'stored', }), }), @@ -1467,40 +1303,27 @@ test('generated as string: change generated constraint type from stored to virtu id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); // not supported @@ -1508,9 +1331,8 @@ test('generated as string: change stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { + generatedName: text('gen_name').generatedAlwaysAs(`"name"`, { mode: 'stored', }), }), @@ -1518,23 +1340,37 @@ test('generated as string: change stored generated constraint', async () => { const to = { users: sqliteTable('users', { id: int('id'), - id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([]); - expect(sqlStatements).toStrictEqual([]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer,\n' + + '\t`name` text,\n' + + '\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`) SELECT `id`, `name` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: change virtual generated constraint', async () => { @@ -1543,7 +1379,7 @@ test('generated as string: change virtual generated constraint', async () => { id: int('id'), id2: int('id2'), name: text('name'), - generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`), + generatedName: text('gen_name').generatedAlwaysAs(`"name"`), }), }; const to = { @@ -1552,39 +1388,26 @@ test('generated as string: change virtual generated constraint', async () => { id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columnAutoIncrement: false, - columnDefault: undefined, - columnGenerated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - columnName: 'gen_name', - columnNotNull: false, - columnOnUpdate: undefined, - columnPk: false, - newDataType: 'text', - schema: '', - tableName: 'users', - type: 'alter_table_alter_column_alter_generated', - }, - ]); - expect(sqlStatements).toStrictEqual([ + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', - 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', - ]); + 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add table with column with stored generated constraint', async () => { @@ -1595,65 +1418,26 @@ test('generated as string: add table with column with stored generated constrain id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, { mode: 'stored' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("users"."name" || \'hello\')', - type: 'stored', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); test('generated as string: add table with column with virtual generated constraint', async () => { @@ -1664,63 +1448,24 @@ test('generated as string: add table with column with virtual generated constrai id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( - `"users"."name" || 'hello'`, + `"name" || 'hello'`, { mode: 'virtual' }, ), }), }; - const { statements, sqlStatements } = await diffTestSchemasSqlite( + const { sqlStatements: st } = await diff( from, to, [], ); - expect(statements).toStrictEqual([ - { - columns: [ - { - autoincrement: false, - name: 'id', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'id2', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'name', - notNull: false, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - generated: { - as: '("users"."name" || \'hello\')', - type: 'virtual', - }, - name: 'gen_name', - notNull: false, - primaryKey: false, - type: 'text', - }, - ], - compositePKs: [], - referenceData: [], - tableName: 'users', - type: 'sqlite_create_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]); - expect(sqlStatements).toStrictEqual([ - 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', - ]); + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); }); diff --git a/drizzle-kit/tests/sqlite/sqlite-tables.test.ts b/drizzle-kit/tests/sqlite/sqlite-tables.test.ts new file mode 100644 index 0000000000..9c2d311d45 --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-tables.test.ts @@ -0,0 +1,963 @@ +import { sql } from 'drizzle-orm'; +import { + AnySQLiteColumn, + blob, + foreignKey, + index, + int, + integer, + numeric, + primaryKey, + real, + sqliteTable, + text, + unique, + uniqueIndex, +} from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('add table #1', async () => { + const to = { + users: sqliteTable('users', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE TABLE `users` (\n\t`id` integer\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #2', async () => { + const to = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer PRIMARY KEY AUTOINCREMENT\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #3', async () => { + const to = { + users: sqliteTable('users', { + id: int('id'), + }, (t) => [primaryKey({ + name: 'users_pk', + columns: [t.id], + })]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer,\n\tCONSTRAINT \`users_pk\` PRIMARY KEY(\`id\`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #4', async () => { + const to = { + users: sqliteTable('users', { id: int() }), + posts: sqliteTable('posts', { id: int() }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer\n);\n', + 'CREATE TABLE `posts` (\n\t`id` integer\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #5', async () => { + const to = { + users: sqliteTable('users', { + id1: integer(), + id2: integer(), + }, (t) => [ + primaryKey({ columns: [t.id1, t.id2] }), + ]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n' + + '\t`id1` integer,\n' + + '\t`id2` integer,\n' + + '\tCONSTRAINT \`users_pk\` PRIMARY KEY(`id1`, `id2`)\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #6', async () => { + const from = { + users1: sqliteTable('users1', { id: int() }), + }; + + const to = { + users2: sqliteTable('users2', { id: int() }), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE TABLE `users2` (\n\t`id` integer\n);\n', 'DROP TABLE `users1`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #7', async () => { + const from = { + users1: sqliteTable('users1', { id: int() }), + }; + + const to = { + users: sqliteTable('users', { id: int() }), + users2: sqliteTable('users2', { id: int() }), + }; + + const renames = ['users1->users2']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['CREATE TABLE `users` (\n\t`id` integer\n);\n', 'ALTER TABLE `users1` RENAME TO `users2`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #8', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('reportee_id').references((): AnySQLiteColumn => users.id), + }); + + const to = { + users, + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`reportee_id` integer,\n' + + '\tCONSTRAINT `fk_users_reportee_id_users_id_fk` FOREIGN KEY (`reportee_id`) REFERENCES `users`(`id`)\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #9', async () => { + const to = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey({ autoIncrement: true }), + reporteeId: int('reportee_id'), + }, + (t) => [index('reportee_idx').on(t.reporteeId)], + ), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`reportee_id` integer\n' + + ');\n', + 'CREATE INDEX `reportee_idx` ON `users` (`reportee_id`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #10', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({}), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ["CREATE TABLE `table` (\n\t`json` text DEFAULT '{}'\n);\n"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #11', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default([]), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ["CREATE TABLE `table` (\n\t`json` text DEFAULT '[]'\n);\n"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #12', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default([1, 2, 3]), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ["CREATE TABLE `table` (\n\t`json` text DEFAULT '[1,2,3]'\n);\n"]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #13', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({ key: 'value' }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value"}\'\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table #14', async () => { + const to = { + users: sqliteTable('table', { + json: text('json', { mode: 'json' }).default({ + key: 'value', + arr: [1, 2, 3], + }), + }), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table #1', async () => { + const from = { + users: sqliteTable('table', { + id: integer(), + }), + }; + const to = { + users: sqliteTable('table1', { + id: integer(), + }), + }; + + const renames = ['table->table1']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['ALTER TABLE `table` RENAME TO `table1`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table #2', async () => { + const profiles = sqliteTable('profiles', { + id: integer().primaryKey({ autoIncrement: true }), + }); + + const from = { + profiles, + users: sqliteTable( + 'table', + { + id: integer().primaryKey({ autoIncrement: true }), + profileId: integer(), + }, + (t) => [foreignKey({ + name: 'table_profileId', + columns: [t.id], + foreignColumns: [profiles.id], + })], + ), + }; + + const to = { + profiles, + users: sqliteTable( + 'table1', + { + id: integer().primaryKey({ autoIncrement: true }), + profileId: integer(), + }, + (t) => [foreignKey({ + name: 'table_profileId', + columns: [t.id], + foreignColumns: [profiles.id], + })], + ), + }; + + const renames = ['table->table1']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['ALTER TABLE `table` RENAME TO `table1`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename table #3', async () => { + const profiles = sqliteTable('profiles', { + id: integer().primaryKey({ autoIncrement: true }), + }); + + const from = { + profiles, + users: sqliteTable('table', { + id: integer().primaryKey({ autoIncrement: true }), + profileId: integer().references(() => profiles.id), + }), + }; + + const to = { + profiles, + users: sqliteTable('table1', { + id: integer().primaryKey({ autoIncrement: true }), + profileId: integer().references(() => profiles.id), + }), + }; + + const renames = ['table->table1']; + const { sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = ['ALTER TABLE `table` RENAME TO `table1`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add table with indexes', async () => { + const from = {}; + + const to = { + users: sqliteTable( + 'users', + { + id: int('id').primaryKey(), + name: text('name'), + email: text('email'), + }, + (t) => [ + uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), + index('indexExpr').on(sql`(lower(${t.email}))`), + index('indexExprMultiple').on( + sql`(lower(${t.email}))`, + sql`(lower(${t.email}))`, + ), + + uniqueIndex('uniqueCol').on(t.email), + index('indexCol').on(t.email), + index('indexColMultiple').on(t.email, t.email), + + index('indexColExpr').on( + sql`(lower(${t.email}))`, + t.email, + ), + ], + ), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `users` (\n\t`id` integer PRIMARY KEY,\n\t`name` text,\n\t`email` text\n);\n', + 'CREATE UNIQUE INDEX `uniqueExpr` ON `users` ((lower("email")));', + 'CREATE INDEX `indexExpr` ON `users` ((lower("email")));', + 'CREATE INDEX `indexExprMultiple` ON `users` ((lower("email")),(lower("email")));', + 'CREATE UNIQUE INDEX `uniqueCol` ON `users` (`email`);', + 'CREATE INDEX `indexCol` ON `users` (`email`);', + 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', + 'CREATE INDEX `indexColExpr` ON `users` ((lower("email")),`email`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('composite primary key', async () => { + const from = {}; + const to = { + table: sqliteTable('works_to_creators', { + workId: int('work_id').notNull(), + creatorId: int('creator_id').notNull(), + classification: text('classification').notNull(), + }, (t) => [primaryKey({ + columns: [t.workId, t.creatorId, t.classification], + })]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `works_to_creators` (\n\t`work_id` integer NOT NULL,\n\t`creator_id` integer NOT NULL,\n\t`classification` text NOT NULL,\n\tCONSTRAINT \`works_to_creators_pk\` PRIMARY KEY(`work_id`, `creator_id`, `classification`)\n);\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('add column before creating unique constraint', async () => { + const from = { + table: sqliteTable('table', { + id: int('id').primaryKey(), + }), + }; + const to = { + table: sqliteTable('table', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }, (t) => [unique('uq').on(t.name)]), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'ALTER TABLE `table` ADD `name` text NOT NULL;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_table` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`name` text NOT NULL CONSTRAINT \`uq\` UNIQUE\n' + + ');\n', + 'INSERT INTO `__new_table`(`id`) SELECT `id` FROM `table`;', + 'DROP TABLE `table`;', + 'ALTER TABLE `__new_table` RENAME TO `table`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create table with unique in third param and in column config', async () => { + const to = { + table: sqliteTable('table', { + id: int('id').unique(), + name: text('name').notNull(), + }, (t) => [unique('uq').on(t.name)]), + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'CREATE TABLE `table` (\n' + + '\t`id` integer UNIQUE,\n' + + '\t`name` text NOT NULL CONSTRAINT \`uq\` UNIQUE\n' + + ');\n', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('optional db aliases (snake case)', async () => { + const from = {}; + + const t1 = sqliteTable( + 't1', + { + t1Id1: int().notNull().primaryKey(), + t1Col2: int().notNull(), + t1Col3: int().notNull(), + t2Ref: int().notNull().references(() => t2.t2Id), + t1Uni: int().notNull(), + t1UniIdx: int().notNull(), + t1Idx: int().notNull(), + }, + (table) => [ + unique('t1_uni').on(table.t1Uni), + uniqueIndex('t1_uni_idx').on(table.t1UniIdx), + index('t1_idx').on(table.t1Idx), + foreignKey({ + columns: [table.t1Col2, table.t1Col3], + foreignColumns: [t3.t3Id1, t3.t3Id2], + }), + ], + ); + + const t2 = sqliteTable( + 't2', + { + t2Id: int().primaryKey({ autoIncrement: true }), + }, + ); + + const t3 = sqliteTable( + 't3', + { + t3Id1: int(), + t3Id2: int(), + }, + (table) => [primaryKey({ + columns: [table.t3Id1, table.t3Id2], + })], + ); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'snake_case'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, casing }); + + const st0: string[] = [ + 'CREATE TABLE `t1` (\n' + + '\t`t1_id1` integer PRIMARY KEY,\n' + + '\t`t1_col2` integer NOT NULL,\n' + + '\t`t1_col3` integer NOT NULL,\n' + + '\t`t2_ref` integer NOT NULL,\n' + + '\t`t1_uni` integer NOT NULL CONSTRAINT \`t1_uni\` UNIQUE,\n' + + '\t`t1_uni_idx` integer NOT NULL,\n' + + '\t`t1_idx` integer NOT NULL,\n' + + '\tCONSTRAINT `fk_t1_t2_ref_t2_t2_id_fk` FOREIGN KEY (`t2_ref`) REFERENCES `t2`(`t2_id`),\n' + + '\tCONSTRAINT `fk_t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk` FOREIGN KEY (`t1_col2`,`t1_col3`) REFERENCES `t3`(`t3_id1`,`t3_id2`)\n' + + ');\n', + 'CREATE TABLE `t2` (\n\t`t2_id` integer PRIMARY KEY AUTOINCREMENT\n);\n', + 'CREATE TABLE `t3` (\n' + + '\t`t3_id1` integer,\n' + + '\t`t3_id2` integer,\n' + + '\tCONSTRAINT \`t3_pk\` PRIMARY KEY(`t3_id1`, `t3_id2`)\n' + + ');\n', + 'CREATE UNIQUE INDEX `t1_uni_idx` ON `t1` (`t1_uni_idx`);', + 'CREATE INDEX `t1_idx` ON `t1` (`t1_idx`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('optional db aliases (camel case)', async () => { + const from = {}; + + const t1 = sqliteTable( + 't1', + { + t1_id1: int().notNull().primaryKey(), + t1_col2: int().notNull(), + t1_col3: int().notNull(), + t2_ref: int().notNull().references(() => t2.t2_id), + t1_uni: int().notNull(), + t1_uni_idx: int().notNull(), + t1_idx: int().notNull(), + }, + (table) => [ + unique('t1Uni').on(table.t1_uni), + uniqueIndex('t1UniIdx').on(table.t1_uni_idx), + index('t1Idx').on(table.t1_idx), + foreignKey({ + columns: [table.t1_col2, table.t1_col3], + foreignColumns: [t3.t3_id1, t3.t3_id2], + }), + ], + ); + + const t2 = sqliteTable( + 't2', + { + t2_id: int().primaryKey({ autoIncrement: true }), + }, + ); + + const t3 = sqliteTable( + 't3', + { + t3_id1: int(), + t3_id2: int(), + }, + (table) => [primaryKey({ + columns: [table.t3_id1, table.t3_id2], + })], + ); + + const to = { + t1, + t2, + t3, + }; + + const casing = 'camelCase'; + const { sqlStatements: st } = await diff(from, to, [], casing); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, casing }); + + const st0: string[] = [ + 'CREATE TABLE `t1` (\n' + + '\t`t1Id1` integer PRIMARY KEY,\n' + + '\t`t1Col2` integer NOT NULL,\n' + + '\t`t1Col3` integer NOT NULL,\n' + + '\t`t2Ref` integer NOT NULL,\n' + + '\t`t1Uni` integer NOT NULL CONSTRAINT `t1Uni` UNIQUE,\n' + + '\t`t1UniIdx` integer NOT NULL,\n' + + '\t`t1Idx` integer NOT NULL,\n' + + '\tCONSTRAINT `fk_t1_t2Ref_t2_t2Id_fk` FOREIGN KEY (`t2Ref`) REFERENCES `t2`(`t2Id`),\n' + + '\tCONSTRAINT `fk_t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk` FOREIGN KEY (`t1Col2`,`t1Col3`) REFERENCES `t3`(`t3Id1`,`t3Id2`)\n' + + ');\n', + 'CREATE TABLE `t2` (\n\t`t2Id` integer PRIMARY KEY AUTOINCREMENT\n);\n', + 'CREATE TABLE `t3` (\n' + + '\t`t3Id1` integer,\n' + + '\t`t3Id2` integer,\n' + + '\tCONSTRAINT `t3_pk` PRIMARY KEY(`t3Id1`, `t3Id2`)\n' + + ');\n', + 'CREATE UNIQUE INDEX `t1UniIdx` ON `t1` (`t1UniIdx`);', + 'CREATE INDEX `t1Idx` ON `t1` (`t1Idx`);', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('nothing changed in schema', async (t) => { + const users = sqliteTable('users', { + id: integer('id').primaryKey().notNull(), + name: text('name').notNull(), + email: text('email'), + textJson: text('text_json', { mode: 'json' }), + blobJon: blob('blob_json', { mode: 'json' }), + blobBigInt: blob('blob_bigint', { mode: 'bigint' }), + numeric: numeric('numeric'), + createdAt: integer('created_at', { mode: 'timestamp' }), + createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), + real: real('real'), + text: text('text', { length: 255 }), + role: text('role', { enum: ['admin', 'user'] }).default('user'), + isConfirmed: integer('is_confirmed', { + mode: 'boolean', + }), + }); + + const schema1 = { + users, + customers: sqliteTable('customers', { + id: integer('id').primaryKey(), + address: text('address').notNull(), + isConfirmed: integer('is_confirmed', { mode: 'boolean' }), + registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) + .notNull() + .$defaultFn(() => new Date()), + userId: integer('user_id') + .references(() => users.id) + .notNull(), + }), + + posts: sqliteTable('posts', { + id: integer('id').primaryKey(), + content: text('content'), + authorId: integer('author_id'), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema1, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema1 }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); + expect(phints).toStrictEqual([]); +}); + +test('create table with custom name references', async (t) => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }); + + const schema1 = { + users, + posts: sqliteTable('posts', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + userId: int('user_id'), + }, (t) => [foreignKey({ + columns: [t.id], + foreignColumns: [users.id], + name: 'custom_name_fk', + })]), + }; + + const schema2 = { + users, + posts: sqliteTable('posts', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + userId: int('user_id'), + }, (t) => [foreignKey({ + columns: [t.id], + foreignColumns: [users.id], + name: 'custom_name_fk', + })]), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + expect(st).toStrictEqual([]); + expect(pst).toStrictEqual([]); + expect(phints).toStrictEqual([]); +}); + +test('rename table and change data type', async (t) => { + const schema1 = { + users: sqliteTable('old_users', { + id: int('id').primaryKey({ autoIncrement: true }), + age: text('age'), + }), + }; + + const schema2 = { + users: sqliteTable('new_users', { + id: int('id').primaryKey({ autoIncrement: true }), + age: integer('age'), + }), + }; + + const renames = ['old_users->new_users']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `old_users` RENAME TO `new_users`;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_new_users` (\n' + + '\t`id` integer PRIMARY KEY AUTOINCREMENT,\n' + + '\t`age` integer\n' + + ');\n', + 'INSERT INTO `__new_new_users`(`id`, `age`) SELECT `id`, `age` FROM `new_users`;', + 'DROP TABLE `new_users`;', + 'ALTER TABLE `__new_new_users` RENAME TO `new_users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints).toStrictEqual([]); +}); + +test('recreate table with nested references', async (t) => { + let users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + age: integer('age'), + }); + let subscriptions = sqliteTable('subscriptions', { + id: int('id').primaryKey({ autoIncrement: true }), + userId: integer('user_id').references(() => users.id), + customerId: text('customer_id'), + }); + const schema1 = { + users: users, + subscriptions: subscriptions, + subscriptionMetadata: sqliteTable('subscriptions_metadata', { + id: int('id').primaryKey({ autoIncrement: true }), + subscriptionId: text('subscription_id').references( + () => subscriptions.id, + ), + }), + }; + + users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + }); + const schema2 = { + users: users, + subscriptions: subscriptions, + subscriptionMetadata: sqliteTable('subscriptions_metadata', { + id: int('id').primaryKey({ autoIncrement: true }), + subscriptionId: text('subscription_id').references( + () => subscriptions.id, + ), + }), + }; + + const renames = ['users.name->users.age']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'PRAGMA foreign_keys=OFF;', + `CREATE TABLE \`__new_users\` ( +\t\`id\` integer PRIMARY KEY, +\t\`name\` text, +\t\`age\` integer +);\n`, + `INSERT INTO \`__new_users\`(\`id\`, \`name\`, \`age\`) SELECT \`id\`, \`name\`, \`age\` FROM \`users\`;`, + `DROP TABLE \`users\`;`, + `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual([]); +}); + +test('recreate table with added column not null and without default with data', async (t) => { + const schema1 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + name: text('name'), + age: integer('age'), + }), + }; + + const schema2 = { + users: sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: false }), + name: text('name'), + age: integer('age'), + newColumn: text('new_column').notNull(), + }), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + await db.run(`INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`); + await db.run(`INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`); + + const { sqlStatements: pst, hints: phints, error } = await push({ + db, + to: schema2, + expectError: true, + force: true, + }); + + const st0: string[] = [ + 'ALTER TABLE `users` ADD `new_column` text NOT NULL;', + 'PRAGMA foreign_keys=OFF;', + 'CREATE TABLE `__new_users` (\n' + + '\t`id` integer PRIMARY KEY,\n' + + '\t`name` text,\n' + + '\t`age` integer,\n' + + '\t`new_column` text NOT NULL\n' + + ');\n', + 'INSERT INTO `__new_users`(`id`, `name`, `age`) SELECT `id`, `name`, `age` FROM `users`;', + 'DROP TABLE `users`;', + 'ALTER TABLE `__new_users` RENAME TO `users`;', + 'PRAGMA foreign_keys=ON;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints[0].statement).toStrictEqual('DELETE FROM "users" where true;'); + expect(error).toBeNull(); +}); + +test('rename table with composite primary key', async () => { + const productsCategoriesTable = (tableName: string) => { + return sqliteTable(tableName, { + productId: text('product_id').notNull(), + categoryId: text('category_id').notNull(), + }, (t) => [primaryKey({ + columns: [t.productId, t.categoryId], + })]); + }; + + const schema1 = { + table: productsCategoriesTable('products_categories'), + }; + const schema2 = { + test: productsCategoriesTable('products_to_categories'), + }; + + const renames = ['products_categories->products_to_categories']; + const { sqlStatements: st } = await diff(schema1, schema2, renames); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2, renames }); + + const st0: string[] = [ + 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/sqlite/sqlite-views.test.ts b/drizzle-kit/tests/sqlite/sqlite-views.test.ts new file mode 100644 index 0000000000..9165cfbfd8 --- /dev/null +++ b/drizzle-kit/tests/sqlite/sqlite-views.test.ts @@ -0,0 +1,288 @@ +import { sql } from 'drizzle-orm'; +import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { diff, prepareTestDatabase, push, TestDatabase } from './mocks'; + +// @vitest-environment-options {"max-concurrency":1} +let _: TestDatabase; +let db: TestDatabase['db']; + +beforeAll(() => { + _ = prepareTestDatabase(); + db = _.db; +}); + +afterAll(async () => { + await _.close(); +}); + +beforeEach(async () => { + await _.clear(); +}); + +test('create view', async () => { + const users = sqliteTable('users', { id: int('id').default(1) }); + const view = sqliteView('view').as((qb) => qb.select().from(users)); + const to = { + users: users, + testView: view, + }; + + const { sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + `CREATE TABLE \`users\` (\n\t\`id\` integer DEFAULT 1\n);\n`, + `CREATE VIEW \`view\` AS select "id" from "users";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #1', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [`DROP VIEW \`view\`;`]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('drop view #2', async () => { + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const schema2 = { + test: table, + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = ['DROP VIEW \`view\`;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(phints).toStrictEqual([]); +}); + +test('alter view ".as" #1', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), + }; + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'DROP VIEW `view`;', + 'CREATE VIEW `view` AS SELECT * FROM users WHERE users.id = 1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual([]); // ignore AS sql for 'push' +}); + +test('alter view ".as" #2', async () => { + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = ['DROP VIEW `view`;', 'CREATE VIEW `view` AS select "id" from "test";']; + expect.soft(st).toStrictEqual(st0); + expect.soft(pst).toStrictEqual([]); // ignore sql change for push + + expect(phints).toStrictEqual([]); +}); + +test('create view with existing flag', async () => { + const view = sqliteView('view', {}).existing(); + const to = { + testView: view, + }; + + const { statements, sqlStatements: st } = await diff({}, to, []); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(statements.length).toBe(0); +}); + +test('drop view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + }; + + const { statements, sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(statements.length).toBe(0); +}); + +test('rename view with existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).existing(), + }; + + const renames = ['view->new_view']; + const { statements, sqlStatements: st } = await diff(from, to, renames); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to, renames }); + + const st0: string[] = []; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + + expect(statements.length).toBe(0); +}); + +test('rename view and drop existing flag', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).existing(), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = ['CREATE VIEW `new_view` AS SELECT * FROM users;']; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('rename view and alter ".as"', async () => { + const users = sqliteTable('users', { + id: int('id').primaryKey({ autoIncrement: true }), + }); + + const from = { + users: users, + testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), + }; + const to = { + users, + testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), + }; + + const { sqlStatements: st } = await diff(from, to, []); + + await push({ db, to: from }); + const { sqlStatements: pst } = await push({ db, to }); + + const st0: string[] = [ + 'DROP VIEW `view`;', + 'CREATE VIEW `new_view` AS SELECT * FROM users WHERE 1=1;', + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); +}); + +test('create view', async () => { + const table = sqliteTable('test', { + id: int('id').primaryKey(), + }); + + const schema1 = { + test: table, + }; + + const schema2 = { + test: table, + view: sqliteView('view').as((qb) => qb.select().from(table)), + }; + + const { sqlStatements: st } = await diff(schema1, schema2, []); + + await push({ db, to: schema1 }); + const { sqlStatements: pst, hints: phints } = await push({ db, to: schema2 }); + + const st0: string[] = [ + `CREATE VIEW \`view\` AS select "id" from "test";`, + ]; + expect(st).toStrictEqual(st0); + expect(pst).toStrictEqual(st0); + expect(phints).toStrictEqual([]); +}); diff --git a/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts deleted file mode 100644 index 449b61c6cf..0000000000 --- a/drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts +++ /dev/null @@ -1,1812 +0,0 @@ -import { JsonAddColumnStatement, JsonSqliteAddColumnStatement, JsonStatement } from 'src/jsonStatements'; -import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema'; -import { SQLiteAlterTableAddColumnConvertor } from 'src/sqlgenerator'; -import { libSQLCombineStatements } from 'src/statementCombiner'; -import { expect, test } from 'vitest'; - -/** - * ! before: - * - * user: { - * id INT; - * first_name INT; - * iq INT; - * PRIMARY KEY (id, iq) - * INDEXES: { - * UNIQUE id; - * } - * } - * - * ! after: - * - * new_user: { - * id INT; - * first_name INT; - * iq INT; - * PRIMARY KEY (id, iq) - * INDEXES: {} - * } - * - * rename table and drop unique index - * expect to get "rename_table" statement and then "recreate_table" - */ -test(`rename table and drop index`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'rename_table', - fromSchema: '', - toSchema: '', - tableNameFrom: 'user', - tableNameTo: 'new_user', - }, - { - type: 'drop_index', - tableName: 'new_user', - data: 'user_first_name_unique;first_name;true;', - schema: '', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - first_name: { - name: 'first_name', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: { - user_first_name_unique: 'user_first_name_unique;first_name;true;', - }, - foreignKeys: {}, - compositePrimaryKeys: { - user_id_iq_pk: 'id,iq', - }, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - new_user: { - name: 'new_user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - first_name: { - name: 'first_name', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: { - new_user_id_iq_pk: 'id,iq', - }, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'rename_table', - fromSchema: '', - toSchema: '', - tableNameFrom: 'user', - tableNameTo: 'new_user', - }, - { - type: 'drop_index', - tableName: 'new_user', - data: 'user_first_name_unique;first_name;true;', - schema: '', - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -/** - * ! before: - * - * autoincrement1: { - * id INT PRIMARY KEY; - * } - * - * autoincrement2: { - * id INT PRIMARY KEY AUTOINCREMENT; - * } - * - * dropNotNull: { - * id INT NOT NULL; - * } - * - * ! after: - * - * autoincrement1: { - * id INT PRIMARY KEY AUTOINCREMENT; - * } - * - * autoincrement2: { - * id INT PRI { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_set_autoincrement', - tableName: 'autoincrement1', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: true, - columnPk: true, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_drop_autoincrement', - tableName: 'autoincrement2', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: true, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_drop_notnull', - tableName: 'dropNotNull', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - autoincrement1: { - name: 'autoincrement1', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - autoincrement2: { - name: 'autoincrement2', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: false, - autoincrement: true, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - dropNotNull: { - name: 'dropNotNull', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - autoincrement1: { - name: 'autoincrement1', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: true, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - autoincrement2: { - name: 'autoincrement2', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - dropNotNull: { - name: 'dropNotNull', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'recreate_table', - tableName: 'autoincrement1', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: true, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'recreate_table', - tableName: 'autoincrement2', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'alter_table_alter_column_drop_notnull', - tableName: 'dropNotNull', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -/** - * ! before: - * - * pk1: { - * id INT; - * } - * - * pk2: { - * id INT PRIMARY KEY; - * } - * - * ref_table: { - * id INT; - * } - * - * create_reference: { - * id INT; - * } - * - * ! after: - * - * pk1: { - * id INT PRIMARY KEY; - * } - * - * pk2: { - * id INT; - * } - * - * ref_table: { - * id INT; - * } - * - * create_reference: { - * id INT -> ref_table INT; - * } - * - * drop primary key for pk2 - * set primary key for pk1 - * "create_reference" reference on "ref_table" - * - * expect to: - * - "recreate_table" statement for pk1 - * - "recreate_table" statement for pk2 - * - "create_reference" statement for create_reference - */ -test(`drop and set primary key. create reference`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_set_pk', - tableName: 'pk1', - schema: '', - columnName: 'id', - }, - { - type: 'alter_table_alter_column_set_notnull', - tableName: 'pk1', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: true, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_drop_pk', - tableName: 'pk2', - columnName: 'id', - schema: '', - }, - { - type: 'alter_table_alter_column_drop_notnull', - tableName: 'pk2', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - { - type: 'create_reference', - tableName: 'create_reference', - data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'int', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - create_reference: { - name: 'create_reference', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - pk1: { - name: 'pk1', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - pk2: { - name: 'pk2', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - ref_table: { - name: 'ref_table', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - create_reference: { - name: 'create_reference', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - create_reference_id_ref_table_id_fk: - 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - pk1: { - name: 'pk1', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - pk2: { - name: 'pk2', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - ref_table: { - name: 'ref_table', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'recreate_table', - tableName: 'pk1', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'recreate_table', - tableName: 'pk2', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'create_reference', - tableName: 'create_reference', - data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'int', - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -/** - * ! before: - * - * fk1: { - * fk_id INT; - * fk_id1 INT; - * } - * - * fk2: { - * fk2_id INT; -> composite reference on ref_table id INT - * fk2_id1 INT; -> composite reference on ref_table id1 INT - * } - * - * ref_table: { - * id INT; - * id1 INT; - * } - * - * ! after: - * - * fk1: { - * fk_id INT; -> composite reference on ref_table id INT - * fk_id1 INT; -> composite reference on ref_table id1 INT - * } - * - * fk2: { - * fk2_id INT; - * fk2_id1 INT; - * } - * - * ref_table: { - * id INT; - * id1 INT; - * } - * - * set multi column reference for fk1 - * drop multi column reference for fk2 - * - * expect to: - * - "recreate_table" statement for fk1 - * - "recreate_table" statement for fk2 - */ -test(`set and drop multiple columns reference`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'delete_reference', - tableName: 'fk1', - data: 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action', - schema: '', - isMulticolumn: true, - }, - { - type: 'create_reference', - tableName: 'fk2', - data: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action', - schema: '', - isMulticolumn: true, - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - fk1: { - name: 'fk1', - columns: { - fk_id: { - name: 'fk_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - fk_id1: { - name: 'fk_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - fk1_fk_id_fk_id1_ref_table_id_id1_fk: - 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - fk2: { - name: 'fk2', - columns: { - fk2_id: { - name: 'fk2_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - fk2_id1: { - name: 'fk2_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - ref_table: { - name: 'ref_table', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - id1: { - name: 'id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - fk1: { - name: 'fk1', - columns: { - fk_id: { - name: 'fk_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - fk_id1: { - name: 'fk_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - fk2: { - name: 'fk2', - columns: { - fk2_id: { - name: 'fk2_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - fk2_id1: { - name: 'fk2_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - fk2_fk2_id_fk2_id1_ref_table_id_id1_fk: - 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - ref_table: { - name: 'ref_table', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - id1: { - name: 'id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'recreate_table', - tableName: 'fk1', - columns: [ - { - name: 'fk_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'fk_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'recreate_table', - tableName: 'fk2', - columns: [ - { - name: 'fk2_id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'fk2_id1', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [ - { - name: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk', - tableFrom: 'fk2', - tableTo: 'ref_table', - columnsFrom: ['fk2_id', 'fk2_id1'], - columnsTo: ['id', 'id1'], - onDelete: 'no action', - onUpdate: 'no action', - }, - ], - uniqueConstraints: [], - checkConstraints: [], - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -/** - * ! before: - * - * pk: { - * pk TEXT PRIMARY KEY; - * } - * - * simple: { - * simple TEXT; - * } - * - * unique: { - * unique INT UNIQUE; - * } - * - * ! after: - * - * pk: { - * pk INT PRIMARY KEY; - * } - * - * simple: { - * simple INT; - * } - * - * unique: { - * unique TEXT UNIQUE; - * } - * - * set new type for primary key column - * set new type for unique column - * set new type for column without pk or unique - * - * expect to: - * - "recreate_table" statement for pk - * - "recreate_table" statement for unique - * - "alter_table_alter_column_set_type" statement for simple - * - "create_index" statement for unique - */ -test(`set new type for primary key, unique and normal column`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_set_type', - tableName: 'pk', - columnName: 'pk', - newDataType: 'int', - oldDataType: 'text', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: true, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_set_type', - tableName: 'simple', - columnName: 'simple', - newDataType: 'int', - oldDataType: 'text', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_set_type', - tableName: 'unique', - columnName: 'unique', - newDataType: 'text', - oldDataType: 'int', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - pk: { - name: 'pk', - columns: { - pk: { - name: 'pk', - type: 'text', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - simple: { - name: 'simple', - columns: { - simple: { - name: 'simple', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - unique: { - name: 'unique', - columns: { - unique: { - name: 'unique', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: { - unique_unique_unique: 'unique_unique_unique;unique;true;', - }, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - pk: { - name: 'pk', - columns: { - pk: { - name: 'pk', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - simple: { - name: 'simple', - columns: { - simple: { - name: 'simple', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - unique: { - name: 'unique', - columns: { - unique: { - name: 'unique', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: { - unique_unique_unique: 'unique_unique_unique;unique;true;', - }, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'recreate_table', - tableName: 'pk', - columns: [ - { - name: 'pk', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - { - type: 'alter_table_alter_column_set_type', - tableName: 'simple', - columnName: 'simple', - newDataType: 'int', - oldDataType: 'text', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }, - { - type: 'alter_table_alter_column_set_type', - tableName: 'unique', - columnName: 'unique', - newDataType: 'text', - oldDataType: 'int', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add columns. set fk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add column and fk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add column and fk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - ]; - expect(libSQLCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); diff --git a/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts deleted file mode 100644 index 0ba6cf2782..0000000000 --- a/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts +++ /dev/null @@ -1,882 +0,0 @@ -import { JsonStatement } from 'src/jsonStatements'; -import { SingleStoreSchemaSquashed } from 'src/serializer/singlestoreSchema'; -import { singleStoreCombineStatements } from 'src/statementCombiner'; -import { expect, test } from 'vitest'; - -test(`change column data type`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_alter_column_set_type', - tableName: 'user', - columnName: 'lastName123', - newDataType: 'int', - oldDataType: 'text', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - columnIsUnique: false, - } as unknown as JsonStatement, - ]; - const json1: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - - const newJsonStatements = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'singlestore_recreate_table', - tableName: 'user', - columns: [ - { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`set autoincrement`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_set_autoincrement', - tableName: 'users', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: true, - columnPk: false, - } as unknown as JsonStatement, - ]; - - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - users: { - name: 'users', - columns: { - new_id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: true, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - email: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const newJsonStatements = [ - { - type: 'singlestore_recreate_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: true, - }, - { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`drop autoincrement`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_drop_autoincrement', - tableName: 'users', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: true, - columnPk: false, - } as unknown as JsonStatement, - ]; - - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - users: { - name: 'users', - columns: { - new_id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - email: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const newJsonStatements = [ - { - type: 'singlestore_recreate_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`drop autoincrement`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_drop_autoincrement', - tableName: 'users', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: true, - columnPk: false, - } as unknown as JsonStatement, - ]; - - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - users: { - name: 'users', - columns: { - new_id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - email: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const newJsonStatements = [ - { - type: 'singlestore_recreate_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`set not null`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_set_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - ]; - - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - users: { - name: 'users', - columns: { - new_id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - email: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const newJsonStatements = [ - { - type: 'singlestore_recreate_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`drop not null`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_alter_column_drop_notnull', - tableName: 'users', - columnName: 'name', - schema: '', - newDataType: 'text', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - ]; - - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - users: { - name: 'users', - columns: { - new_id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - email: { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const newJsonStatements = [ - { - type: 'singlestore_recreate_table', - tableName: 'users', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'email', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`renamed column and droped column "test"`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'test', - schema: '', - }, - ]; - const json1: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'test', - schema: '', - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`droped column that is part of composite pk`, async (t) => { - const statements: JsonStatement[] = [ - { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' }, - { - type: 'alter_table_alter_column_set_pk', - tableName: 'user', - schema: '', - columnName: 'id', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'iq', - schema: '', - }, - ]; - const json1: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - first_nam: { - name: 'first_nam', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: { - user_id_iq_pk: 'id,iq', - }, - uniqueConstraints: {}, - }, - }, - }; - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: false, - autoincrement: false, - }, - first_nam: { - name: 'first_name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'singlestore_recreate_table', - tableName: 'user', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: true, - notNull: false, - autoincrement: false, - }, - { - name: 'first_name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add column with pk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_add_column', - tableName: 'table', - column: { - name: 'test', - type: 'integer', - primaryKey: true, - notNull: false, - autoincrement: false, - }, - schema: '', - }, - ]; - const json2: SingleStoreSchemaSquashed = { - version: '1', - dialect: 'singlestore', - tables: { - table: { - name: 'table', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - }, - }, - }; - - const newJsonStatements = [ - { - columns: [ - { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - tableName: 'table', - type: 'singlestore_recreate_table', - uniqueConstraints: [], - }, - ]; - expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); diff --git a/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts deleted file mode 100644 index 20f953da62..0000000000 --- a/drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts +++ /dev/null @@ -1,1211 +0,0 @@ -import { JsonStatement } from 'src/jsonStatements'; -import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema'; -import { sqliteCombineStatements } from 'src/statementCombiner'; -import { expect, test } from 'vitest'; - -test(`renamed column and altered this column type`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_alter_column_set_type', - tableName: 'user', - columnName: 'lastName123', - newDataType: 'int', - oldDataType: 'text', - schema: '', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - columnIsUnique: false, - } as unknown as JsonStatement, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'recreate_table', - tableName: 'user', - columns: [ - { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - ]; - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`renamed column and droped column "test"`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'test', - schema: '', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - firstName: { - name: 'firstName', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - lastName: { - name: 'lastName123', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'user', - oldColumnName: 'lastName', - newColumnName: 'lastName123', - schema: '', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'test', - schema: '', - }, - ]; - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`droped column that is part of composite pk`, async (t) => { - const statements: JsonStatement[] = [ - { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' }, - { - type: 'alter_table_alter_column_set_pk', - tableName: 'user', - schema: '', - columnName: 'id', - }, - { - type: 'alter_table_drop_column', - tableName: 'user', - columnName: 'iq', - schema: '', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - first_nam: { - name: 'first_nam', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: { - user_id_iq_pk: 'id,iq', - }, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: false, - autoincrement: false, - }, - first_nam: { - name: 'first_nam', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'recreate_table', - tableName: 'user', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: true, - notNull: false, - autoincrement: false, - }, - { - name: 'first_nam', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - ]; - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`drop column "ref"."name", rename column "ref"."age". dropped primary key "user"."id". Set not null to "user"."iq"`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'ref', - oldColumnName: 'age', - newColumnName: 'age1', - schema: '', - }, - { - type: 'alter_table_alter_column_drop_pk', - tableName: 'user', - columnName: 'id', - schema: '', - }, - { - type: 'alter_table_alter_column_drop_autoincrement', - tableName: 'user', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_drop_notnull', - tableName: 'user', - columnName: 'id', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: false, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - { - type: 'alter_table_alter_column_set_notnull', - tableName: 'user', - columnName: 'iq', - schema: '', - newDataType: 'int', - columnDefault: undefined, - columnOnUpdate: undefined, - columnNotNull: true, - columnAutoIncrement: false, - columnPk: false, - } as unknown as JsonStatement, - { - type: 'alter_table_drop_column', - tableName: 'ref', - columnName: 'text', - schema: '', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: true, - }, - user_iq: { - name: 'user_iq', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - name: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - age: { - name: 'age', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: true, - }, - first_name: { - name: 'first_name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - user_iq: { - name: 'user_iq', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - age1: { - name: 'age1', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id: { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - first_name: { - name: 'first_name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - iq: { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'alter_table_rename_column', - tableName: 'ref', - oldColumnName: 'age', - newColumnName: 'age1', - schema: '', - }, - { - type: 'alter_table_drop_column', - tableName: 'ref', - columnName: 'text', - schema: '', - }, - { - type: 'recreate_table', - tableName: 'user', - columns: [ - { - name: 'id', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - { - name: 'first_name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'iq', - type: 'int', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [], - uniqueConstraints: [], - checkConstraints: [], - }, - ]; - - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`create reference on exising column (table includes unique index). expect to recreate column and recreate index`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'create_reference', - tableName: 'unique', - data: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action', - schema: '', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - pk: { - name: 'pk', - columns: { - pk: { - name: 'pk', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - unique: { - name: 'unique', - columns: { - unique: { - name: 'unique', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ref_pk: { - name: 'ref_pk', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: { - unique_unique_unique: 'unique_unique_unique;unique;true;', - }, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - pk: { - name: 'pk', - columns: { - pk: { - name: 'pk', - type: 'int', - primaryKey: true, - notNull: true, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - unique: { - name: 'unique', - columns: { - unique: { - name: 'unique', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ref_pk: { - name: 'ref_pk', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: { - unique_unique_unique: 'unique_unique_unique;unique;true;', - }, - foreignKeys: { - unique_ref_pk_pk_pk_fk: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements: JsonStatement[] = [ - { - type: 'recreate_table', - tableName: 'unique', - columns: [ - { - name: 'unique', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - { - name: 'ref_pk', - type: 'int', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - ], - compositePKs: [], - referenceData: [ - { - name: 'unique_ref_pk_pk_pk_fk', - tableFrom: 'unique', - tableTo: 'pk', - columnsFrom: ['ref_pk'], - columnsTo: ['pk'], - onDelete: 'no action', - onUpdate: 'no action', - }, - ], - uniqueConstraints: [], - checkConstraints: [], - }, - { - data: 'unique_unique_unique;unique;true;', - internal: undefined, - schema: '', - tableName: 'unique', - type: 'create_index', - }, - ]; - - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add columns. set fk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: undefined, - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - columns: [ - { - autoincrement: false, - name: 'id1', - notNull: true, - primaryKey: false, - type: 'text', - }, - { - autoincrement: false, - name: 'new_age', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'test', - notNull: false, - primaryKey: false, - type: 'integer', - }, - { - autoincrement: false, - name: 'test1', - notNull: false, - primaryKey: false, - type: 'integer', - }, - ], - compositePKs: [], - referenceData: [ - { - columnsFrom: [ - 'new_age', - ], - columnsTo: [ - 'new_age', - ], - name: 'ref_new_age_user_new_age_fk', - onDelete: 'no action', - onUpdate: 'no action', - tableFrom: 'ref', - tableTo: 'user', - }, - ], - tableName: 'ref', - type: 'recreate_table', - uniqueConstraints: [], - checkConstraints: [], - }, - ]; - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); - -test(`add column and fk`, async (t) => { - const statements: JsonStatement[] = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - { - type: 'create_reference', - tableName: 'ref', - data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - schema: '', - columnNotNull: false, - columnDefault: undefined, - columnType: 'integer', - }, - ]; - const json1: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - const json2: SQLiteSchemaSquashed = { - version: '6', - dialect: 'sqlite', - tables: { - ref: { - name: 'ref', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test: { - name: 'test', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - test1: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: { - ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', - }, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - user: { - name: 'user', - columns: { - id1: { - name: 'id1', - type: 'text', - primaryKey: false, - notNull: true, - autoincrement: false, - }, - new_age: { - name: 'new_age', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }, - indexes: {}, - foreignKeys: {}, - compositePrimaryKeys: {}, - uniqueConstraints: {}, - checkConstraints: {}, - }, - }, - enums: {}, - views: {}, - }; - - const newJsonStatements = [ - { - type: 'sqlite_alter_table_add_column', - tableName: 'ref', - column: { - name: 'test1', - type: 'integer', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', - }, - ]; - expect(sqliteCombineStatements(statements, json2)).toStrictEqual( - newJsonStatements, - ); -}); diff --git a/drizzle-kit/tests/test/sqlite.test.ts b/drizzle-kit/tests/test/sqlite.test.ts deleted file mode 100644 index 9a00e8def3..0000000000 --- a/drizzle-kit/tests/test/sqlite.test.ts +++ /dev/null @@ -1,39 +0,0 @@ -import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import { diffTestSchemasSqlite } from 'tests/schemaDiffer'; -import { expect } from 'vitest'; -import { DialectSuite, run } from '../common'; - -const sqliteSuite: DialectSuite = { - async columns1() { - const schema1 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - }), - }; - - const schema2 = { - users: sqliteTable('users', { - id: int('id').primaryKey({ autoIncrement: true }), - name: text('name'), - }), - }; - - const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); - - expect(statements.length).toBe(1); - expect(statements[0]).toStrictEqual({ - type: 'sqlite_alter_table_add_column', - tableName: 'users', - referenceData: undefined, - column: { - name: 'name', - type: 'text', - primaryKey: false, - notNull: false, - autoincrement: false, - }, - }); - }, -}; - -run(sqliteSuite); diff --git a/drizzle-kit/tests/testsinglestore.ts b/drizzle-kit/tests/testsinglestore.ts deleted file mode 100644 index 1dc97d9c32..0000000000 --- a/drizzle-kit/tests/testsinglestore.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { index, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; -import { diffTestSchemasSingleStore } from './schemaDiffer'; - -const from = { - users: singlestoreTable( - 'table', - { - name: text('name'), - }, - (t) => { - return { - idx: index('name_idx').on(t.name), - }; - }, - ), -}; - -const to = { - users: singlestoreTable('table', { - name: text('name'), - }), -}; - -diffTestSchemasSingleStore(from, to, []).then((res) => { - const { statements, sqlStatements } = res; - - console.log(statements); - console.log(sqlStatements); -}); diff --git a/drizzle-kit/tests/utils.ts b/drizzle-kit/tests/utils.ts new file mode 100644 index 0000000000..c150dd0e4b --- /dev/null +++ b/drizzle-kit/tests/utils.ts @@ -0,0 +1,101 @@ +export const measure = (prom: Promise, label: string): Promise => { + return new Promise(async (res, rej) => { // oxlint-disable-line no-async-promise-executor + console.time(label); + try { + const result = await prom; + console.timeEnd(label); + res(result); + } catch (e) { + console.timeEnd(label); + rej(e); + } + }); +}; + +import { Table } from 'drizzle-orm'; +import * as ts from 'typescript'; + +const options = { + noEmit: true, + skipLibCheck: true, + target: ts.ScriptTarget.ES2020, + module: ts.ModuleKind.NodeNext, + moduleResolution: ts.ModuleResolutionKind.NodeNext, +}; + +type VFile = { text: string; version: number }; + +export function makeTSC2(options: ts.CompilerOptions, fileName = 'temp.ts') { + const files = new Map(); + const sys = ts.sys; // fall back to real FS for libs, node_modules, etc. + + const ensure = (fn: string) => { + if (!files.has(fn)) files.set(fn, { text: '', version: 0 }); + return files.get(fn)!; + }; + ensure(fileName); + + const host: ts.LanguageServiceHost = { + getCompilationSettings: () => options, + getScriptFileNames: () => Array.from(files.keys()), + getScriptVersion: (fn) => (files.get(fn)?.version ?? 0).toString(), + getScriptSnapshot: (fn) => { + const mem = files.get(fn); + if (mem) return ts.ScriptSnapshot.fromString(mem.text); + // Defer to real FS for everything else + if (sys.fileExists(fn)) return ts.ScriptSnapshot.fromString(sys.readFile(fn)!); + return; + }, + getCurrentDirectory: () => sys.getCurrentDirectory(), + getDefaultLibFileName: (opts) => ts.getDefaultLibFilePath(opts), + fileExists: sys.fileExists, + readFile: sys.readFile, + readDirectory: sys.readDirectory, + directoryExists: sys.directoryExists?.bind(sys), + getDirectories: sys.getDirectories?.bind(sys), + useCaseSensitiveFileNames: () => sys.useCaseSensitiveFileNames, + }; + + const registry = ts.createDocumentRegistry(); + const service = ts.createLanguageService(host, registry); + + const formatHost: ts.FormatDiagnosticsHost = { + getCurrentDirectory: host.getCurrentDirectory, + getCanonicalFileName: (f) => host.useCaseSensitiveFileNames?.() ? f : f.toLowerCase(), + getNewLine: () => sys.newLine, + }; + + async function tsc2(content: string, fn: string = fileName): Promise { + const f = ensure(fn); + f.text = content; + f.version++; + + // Ask LS for diagnostics (incremental & fast) + const syntactic = service.getSyntacticDiagnostics(fn); + const semantic = service.getSemanticDiagnostics(fn); + const optionsDiag = service.getCompilerOptionsDiagnostics(); + + const diags = [...optionsDiag, ...syntactic, ...semantic]; + if (diags.length) { + const message = ts.formatDiagnostics(diags, formatHost); + console.log(content); + console.log(); + console.error(message); + throw new Error(message); + } + } + + return { tsc2, service, update: tsc2 }; +} + +export const tsc = makeTSC2(options).tsc2; + +// export const tsc = async (path: string) => { +// const typeCheckResult = +// await $`bun tsc --noEmit --skipLibCheck --target ES2020 --module NodeNext --moduleResolution NodeNext ${path}` +// // .quiet() +// .nothrow(); +// if (typeCheckResult.exitCode !== 0) { +// throw new Error(typeCheckResult.stderr || typeCheckResult.stdout); +// } +// }; diff --git a/drizzle-kit/tsconfig.build.json b/drizzle-kit/tsconfig.build.json index b57ab6b000..cc8525a68b 100644 --- a/drizzle-kit/tsconfig.build.json +++ b/drizzle-kit/tsconfig.build.json @@ -1,4 +1,5 @@ { "extends": "./tsconfig.json", - "include": ["src"] + "include": ["src"], + "exclude": ["tests", "node_modules"] } diff --git a/drizzle-kit/tsconfig.json b/drizzle-kit/tsconfig.json index 814139e470..7d92e52d5f 100644 --- a/drizzle-kit/tsconfig.json +++ b/drizzle-kit/tsconfig.json @@ -1,8 +1,10 @@ { "compilerOptions": { - "target": "es2021", - "lib": ["es2021"], + "target": "ESNext", + "lib": ["ESNext"], "types": ["node"], + "module": "preserve", + "moduleResolution": "bundler", "strictNullChecks": true, "strictFunctionTypes": false, "allowJs": true, @@ -12,8 +14,6 @@ "strict": true, "noImplicitOverride": true, "forceConsistentCasingInFileNames": true, - "module": "CommonJS", - "moduleResolution": "node", "resolveJsonModule": true, "noErrorTruncation": true, "isolatedModules": true, diff --git a/drizzle-kit/tsconfig.typetest.json b/drizzle-kit/tsconfig.typetest.json new file mode 100644 index 0000000000..c62fe6a3b5 --- /dev/null +++ b/drizzle-kit/tsconfig.typetest.json @@ -0,0 +1,28 @@ +{ + "compilerOptions": { + "target": "es2022", + "module": "esnext", + "moduleResolution": "node", + "lib": ["es2022"], + "types": ["node"], + "strictNullChecks": true, + "strictFunctionTypes": false, + "allowJs": true, + "skipLibCheck": true, + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "noImplicitOverride": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "noErrorTruncation": true, + "isolatedModules": true, + "sourceMap": true, + "baseUrl": ".", + "outDir": "dist", + "noEmit": true, + "typeRoots": ["node_modules/@types", "src/@types"] + }, + "include": ["dev", "tests", "drizzle.config.ts", "test.ts"], + "exclude": ["node_modules", "tests/**/tmp"] +} diff --git a/drizzle-kit/vitest.config.ts b/drizzle-kit/vitest.config.ts index fd728eb116..fca1016769 100644 --- a/drizzle-kit/vitest.config.ts +++ b/drizzle-kit/vitest.config.ts @@ -14,20 +14,23 @@ export default defineConfig({ // This one was excluded because we need to modify an API for SingleStore-generated columns. // It’s in the backlog. - exclude: ['tests/**/singlestore-generated.test.ts'], + exclude: [ + // 'tests/mssql/**/*.test.ts', + // 'tests/cockroach/**/*.test.ts', + 'tests/**/singlestore-generated.test.ts', + 'tests/singlestore/**/*.test.ts', + 'tests/gel/**/*.test.ts', + // 'tests/cockroach/', + 'tests/postgres/commutativity.test.ts', + 'tests/postgres/commutativity.integration.test.ts', + ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, - isolate: true, - poolOptions: { - threads: { - singleThread: true, - }, - }, - maxWorkers: 1, + maxConcurrency: 5, fileParallelism: false, }, plugins: [tsconfigPaths()], diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 5b9f0c65cf..076867b420 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -1,15 +1,17 @@ { "name": "drizzle-orm", - "version": "1.0.0-beta.1", + "version": "1.0.0-beta.2", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { "p": "prisma generate --schema src/prisma/schema.prisma", "build": "pnpm p && scripts/build.ts", + "build:artifact": "pnpm build", "b": "pnpm build", "test:types": "cd type-tests && tsc", "test": "vitest run", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz" }, "main": "./index.cjs", @@ -61,6 +63,7 @@ "@tursodatabase/database-common": ">=0.2.1", "@tursodatabase/database-wasm": ">=0.2.1", "@types/better-sqlite3": "*", + "@types/mssql": "^9.1.4", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", @@ -70,6 +73,7 @@ "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", + "mssql": "^11.0.1", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", @@ -173,7 +177,7 @@ }, "devDependencies": { "@arktype/attest": "^0.46.0", - "@aws-sdk/client-rds-data": "^3.549.0", + "@aws-sdk/client-rds-data": "^3.914.0", "@cloudflare/workers-types": "^4.20251004.0", "@electric-sql/pglite": "^0.2.12", "@libsql/client": "^0.10.0", @@ -214,8 +218,6 @@ "ts-morph": "^25.0.1", "tslib": "^2.5.2", "tsx": "^3.12.7", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "zod": "^3.20.2", "zx": "^7.2.2" } diff --git a/drizzle-orm/src/_relations.ts b/drizzle-orm/src/_relations.ts index adaee80765..4b1f9f704e 100644 --- a/drizzle-orm/src/_relations.ts +++ b/drizzle-orm/src/_relations.ts @@ -474,20 +474,20 @@ export function extractTablesRelationalConfig< const relations: Record = value.config( configHelpers(value.table), ); - let primaryKey: AnyColumn[] | undefined; + // let primaryKey: AnyColumn[] | undefined; for (const [relationName, relation] of Object.entries(relations)) { if (tableName) { const tableConfig = tablesConfig[tableName]!; tableConfig.relations[relationName] = relation; - if (primaryKey) { - tableConfig.primaryKey.push(...primaryKey); - } + // if (primaryKey) { + // tableConfig.primaryKey.push(...primaryKey); + // } } else { if (!(dbName in relationsBuffer)) { relationsBuffer[dbName] = { relations: {}, - primaryKey, + // primaryKey, }; } relationsBuffer[dbName]!.relations[relationName] = relation; @@ -723,3 +723,68 @@ export function mapRelationalRow( return result; } + +export function mapRelationalRowFromObj( + tablesConfig: TablesRelationalConfig, + tableConfig: TableRelationalConfig, + row: unknown[], + buildQueryResultSelection: BuildRelationalQueryResult['selection'], + mapColumnValue: (value: unknown) => unknown = (value) => value, +): Record { + const result: Record = {}; + + for ( + const [ + selectionItemIndex, + selectionItem, + ] of buildQueryResultSelection.entries() + ) { + if (selectionItem.isJson) { + const relation = tableConfig.relations[selectionItem.tsKey]!; + const isOne = is(relation, One); + const rawSubRows = row[selectionItemIndex] as unknown[] | null | [null] | string; + + let subRows = rawSubRows as unknown[] | null; + if (subRows || Array.isArray(subRows)) { + subRows = (typeof rawSubRows === 'string' ? JSON.parse(rawSubRows) : rawSubRows) as unknown[]; + + subRows = isOne + ? subRows.flatMap((r) => Array.isArray(r) ? r : Object.values(r as any)) + : subRows.map((r) => Array.isArray(r) ? r : Object.values(r as any)); + } + + result[selectionItem.tsKey] = isOne + ? subRows + && mapRelationalRowFromObj( + tablesConfig, + tablesConfig[selectionItem.relationTableTsKey!]!, + subRows, + selectionItem.selection, + mapColumnValue, + ) + : ((subRows ?? []) as unknown[][]).map((subRow) => + mapRelationalRowFromObj( + tablesConfig, + tablesConfig[selectionItem.relationTableTsKey!]!, + subRow, + selectionItem.selection, + mapColumnValue, + ) + ); + } else { + const value = mapColumnValue(row[selectionItemIndex]); + const field = selectionItem.field!; + let decoder; + if (is(field, Column)) { + decoder = field; + } else if (is(field, SQL)) { + decoder = field.decoder; + } else { + decoder = field.sql.decoder; + } + result[selectionItem.tsKey] = value === null ? null : decoder.mapFromDriverValue(value); + } + } + + return result; +} diff --git a/drizzle-orm/src/alias.ts b/drizzle-orm/src/alias.ts index 56da971196..0c5781b4e8 100644 --- a/drizzle-orm/src/alias.ts +++ b/drizzle-orm/src/alias.ts @@ -1,4 +1,5 @@ import type * as V1 from './_relations.ts'; +import { OriginalColumn } from './column-common.ts'; import type { AnyColumn } from './column.ts'; import { Column } from './column.ts'; import { entityKind, is } from './entity.ts'; @@ -7,16 +8,20 @@ import { SQL, sql } from './sql/sql.ts'; import { Table } from './table.ts'; import { ViewBaseConfig } from './view-common.ts'; -export class ColumnAliasProxyHandler implements ProxyHandler { - static readonly [entityKind]: string = 'ColumnAliasProxyHandler'; +export class ColumnTableAliasProxyHandler implements ProxyHandler { + static readonly [entityKind]: string = 'ColumnTableAliasProxyHandler'; - constructor(private table: Table | View) {} + constructor(private table: Table | View, private ignoreColumnAlias?: boolean) {} get(columnObj: TColumn, prop: string | symbol): any { if (prop === 'table') { return this.table; } + if (prop === 'isAlias' && this.ignoreColumnAlias) { + return false; + } + return columnObj[prop as keyof TColumn]; } } @@ -24,7 +29,7 @@ export class ColumnAliasProxyHandler implements ProxyHan export class TableAliasProxyHandler implements ProxyHandler { static readonly [entityKind]: string = 'TableAliasProxyHandler'; - constructor(private alias: string, private replaceOriginalName: boolean) {} + constructor(private alias: string, private replaceOriginalName: boolean, private ignoreColumnAlias?: boolean) {} get(target: T, prop: string | symbol): any { if (prop === Table.Symbol.IsAlias) { @@ -58,7 +63,7 @@ export class TableAliasProxyHandler implements ProxyHand Object.keys(columns).map((key) => { proxiedColumns[key] = new Proxy( columns[key]!, - new ColumnAliasProxyHandler(new Proxy(target, this)), + new ColumnTableAliasProxyHandler(new Proxy(target, this), this.ignoreColumnAlias), ); }); @@ -67,13 +72,42 @@ export class TableAliasProxyHandler implements ProxyHand const value = target[prop as keyof typeof target]; if (is(value, Column)) { - return new Proxy(value as AnyColumn, new ColumnAliasProxyHandler(new Proxy(target, this))); + return new Proxy( + value as AnyColumn, + new ColumnTableAliasProxyHandler(new Proxy(target, this), this.ignoreColumnAlias), + ); } return value; } } +export class ColumnAliasProxyHandler implements ProxyHandler { + static readonly [entityKind]: string = 'ColumnAliasProxyHandler'; + + constructor(private alias: string) {} + + get(target: T, prop: keyof Column): any { + if (prop === 'isAlias') { + return true; + } + + if (prop === 'name') { + return this.alias; + } + + if (prop === 'keyAsName') { + return false; + } + + if (prop === OriginalColumn) { + return () => target; + } + + return target[prop]; + } +} + export class RelationTableAliasProxyHandler implements ProxyHandler { static readonly [entityKind]: string = 'RelationTableAliasProxyHandler'; @@ -89,7 +123,11 @@ export class RelationTableAliasProxyHandler implements Pr } export function aliasedTable(table: T, tableAlias: string): T { - return new Proxy(table, new TableAliasProxyHandler(tableAlias, false)); + return new Proxy(table, new TableAliasProxyHandler(tableAlias, false, false)); +} + +export function aliasedColumn(column: T, alias: string): T { + return new Proxy(column, new ColumnAliasProxyHandler(alias)); } export function aliasedRelation(relation: T, tableAlias: string): T { @@ -99,7 +137,10 @@ export function aliasedRelation(relation: T, tableAlias: export function aliasedTableColumn(column: T, tableAlias: string): T { return new Proxy( column, - new ColumnAliasProxyHandler(new Proxy(column.table, new TableAliasProxyHandler(tableAlias, false))), + new ColumnTableAliasProxyHandler( + new Proxy(column.table, new TableAliasProxyHandler(tableAlias, false, false)), + false, + ), ); } @@ -121,3 +162,12 @@ export function mapColumnsInSQLToAlias(query: SQL, alias: string): SQL { return c; })); } + +// Defined separately from the Column class to resolve circular dependency +Column.prototype.as = function(alias: string): Column { + return aliasedColumn(this, alias); +}; + +export function getOriginalColumnFromAlias(column: T): T { + return column[OriginalColumn](); +} diff --git a/drizzle-orm/src/aws-data-api/common/index.ts b/drizzle-orm/src/aws-data-api/common/index.ts index cbc65cd6c2..fb15fc102b 100644 --- a/drizzle-orm/src/aws-data-api/common/index.ts +++ b/drizzle-orm/src/aws-data-api/common/index.ts @@ -1,7 +1,16 @@ import type { Field } from '@aws-sdk/client-rds-data'; -import { TypeHint } from '@aws-sdk/client-rds-data'; +import type { TypeHint } from '@aws-sdk/client-rds-data'; import type { QueryTypingsValue } from '~/sql/sql.ts'; +export const typeHint: { [K in TypeHint]: K } = { + DATE: 'DATE', + DECIMAL: 'DECIMAL', + JSON: 'JSON', + TIME: 'TIME', + TIMESTAMP: 'TIMESTAMP', + UUID: 'UUID', +}; + export function getValueFromDataApi(field: Field) { if (field.stringValue !== undefined) { return field.stringValue; @@ -41,17 +50,17 @@ export function getValueFromDataApi(field: Field) { export function typingsToAwsTypeHint(typings?: QueryTypingsValue): TypeHint | undefined { if (typings === 'date') { - return TypeHint.DATE; + return typeHint.DATE; } else if (typings === 'decimal') { - return TypeHint.DECIMAL; + return typeHint.DECIMAL; } else if (typings === 'json') { - return TypeHint.JSON; + return typeHint.JSON; } else if (typings === 'time') { - return TypeHint.TIME; + return typeHint.TIME; } else if (typings === 'timestamp') { - return TypeHint.TIMESTAMP; + return typeHint.TIMESTAMP; } else if (typings === 'uuid') { - return TypeHint.UUID; + return typeHint.UUID; } else { return undefined; } @@ -67,11 +76,11 @@ export function toValueParam(value: any, typings?: QueryTypingsValue): { value: response.value = { isNull: true }; } else if (typeof value === 'string') { switch (response.typeHint) { - case TypeHint.DATE: { + case typeHint.DATE: { response.value = { stringValue: value.split('T')[0]! }; break; } - case TypeHint.TIMESTAMP: { + case typeHint.TIMESTAMP: { response.value = { stringValue: value.replace('T', ' ').replace('Z', '') }; break; } @@ -86,7 +95,7 @@ export function toValueParam(value: any, typings?: QueryTypingsValue): { value: response.value = { doubleValue: value }; } else if (typeof value === 'boolean') { response.value = { booleanValue: value }; - } else if (value instanceof Date) { // eslint-disable-line no-instanceof/no-instanceof + } else if (value instanceof Date) { // oxlint-disable-line drizzle-internal/no-instanceof // TODO: check if this clause is needed? Seems like date value always comes as string response.value = { stringValue: value.toISOString().replace('T', ' ').replace('Z', '') }; } else { diff --git a/drizzle-orm/src/aws-data-api/pg/driver.ts b/drizzle-orm/src/aws-data-api/pg/driver.ts index defe9def6c..292d55b4dd 100644 --- a/drizzle-orm/src/aws-data-api/pg/driver.ts +++ b/drizzle-orm/src/aws-data-api/pg/driver.ts @@ -142,9 +142,6 @@ export function drizzle< TClient extends AwsDataApiClient = RDSDataClient, >( ...params: [ - TClient, - DrizzleAwsDataApiPgConfig, - ] | [ ( | ( & DrizzleConfig @@ -163,11 +160,6 @@ export function drizzle< ): AwsDataApiPgDatabase & { $client: TClient; } { - // eslint-disable-next-line no-instanceof/no-instanceof - if (params[0] instanceof RDSDataClient || params[0].constructor.name !== 'Object') { - return construct(params[0] as TClient, params[1] as DrizzleAwsDataApiPgConfig) as any; - } - if ((params[0] as { client?: TClient }).client) { const { client, ...drizzleConfig } = params[0] as { client: TClient; diff --git a/drizzle-orm/src/better-sqlite3/driver.ts b/drizzle-orm/src/better-sqlite3/driver.ts index fa36e85a9d..80d14b42cb 100644 --- a/drizzle-orm/src/better-sqlite3/driver.ts +++ b/drizzle-orm/src/better-sqlite3/driver.ts @@ -5,7 +5,7 @@ import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { BetterSQLiteSession } from './session.ts'; export type DrizzleBetterSQLite3DatabaseConfig = @@ -83,10 +83,10 @@ export function drizzle< ...params: | [] | [ - Database | string, + string, ] | [ - Database | string, + string, DrizzleConfig, ] | [ @@ -108,30 +108,26 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { - connection?: DrizzleBetterSQLite3DatabaseConfig; - client?: Database; - } - & DrizzleConfig; - - if (client) return construct(client, drizzleConfig) as any; + const { connection, client, ...drizzleConfig } = params[0] as + & { + connection?: DrizzleBetterSQLite3DatabaseConfig; + client?: Database; + } + & DrizzleConfig; - if (typeof connection === 'object') { - const { source, ...options } = connection; + if (client) return construct(client, drizzleConfig) as any; - const instance = new Client(source, options); + if (typeof connection === 'object') { + const { source, ...options } = connection; - return construct(instance, drizzleConfig) as any; - } - - const instance = new Client(connection); + const instance = new Client(source, options); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as Database, params[1] as DrizzleConfig | undefined) as any; + const instance = new Client(connection); + + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/bun-sql/driver.ts b/drizzle-orm/src/bun-sql/driver.ts index 022ea62d46..8197e73ea3 100644 --- a/drizzle-orm/src/bun-sql/driver.ts +++ b/drizzle-orm/src/bun-sql/driver.ts @@ -11,9 +11,9 @@ export function drizzle< TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -47,9 +47,9 @@ export namespace drizzle { TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -84,9 +84,9 @@ export namespace drizzle { TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -121,9 +121,9 @@ export namespace drizzle { TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, BunMySqlDrizzleConfig, ] | [ ( diff --git a/drizzle-orm/src/bun-sql/mysql/driver.ts b/drizzle-orm/src/bun-sql/mysql/driver.ts index 24bed16cca..b7e7c8e9bf 100644 --- a/drizzle-orm/src/bun-sql/mysql/driver.ts +++ b/drizzle-orm/src/bun-sql/mysql/driver.ts @@ -9,7 +9,7 @@ import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { Mode } from '~/mysql-core/session.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { BunMySqlPreparedQueryHKT, BunMySqlQueryResultHKT } from './session.ts'; import { BunMySqlSession } from './session.ts'; @@ -87,9 +87,9 @@ export function drizzle< TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, BunMySqlDrizzleConfig, ] | [ ( @@ -110,26 +110,22 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as { - connection?: { url?: string } & SQL.Options; - client?: TClient; - } & BunMySqlDrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & SQL.Options; + client?: TClient; + } & BunMySqlDrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object' && connection.url !== undefined) { - const { url, ...config } = connection; + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; - const instance = new SQL({ url, ...config }); - return construct(instance, drizzleConfig) as any; - } - - const instance = new SQL(connection); + const instance = new SQL({ url, ...config }); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as BunMySqlDrizzleConfig | undefined) as any; + const instance = new SQL(connection); + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/bun-sql/postgres/driver.ts b/drizzle-orm/src/bun-sql/postgres/driver.ts index 2ee923730e..bc49fdef7e 100644 --- a/drizzle-orm/src/bun-sql/postgres/driver.ts +++ b/drizzle-orm/src/bun-sql/postgres/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { BunSQLQueryResultHKT } from './session.ts'; import { BunSQLSession } from './session.ts'; @@ -69,9 +69,9 @@ export function drizzle< TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -92,26 +92,22 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as { - connection?: { url?: string } & SQL.Options; - client?: TClient; - } & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & SQL.Options; + client?: TClient; + } & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object' && connection.url !== undefined) { - const { url, ...config } = connection; + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; - const instance = new SQL({ url, ...config }); - return construct(instance, drizzleConfig) as any; - } - - const instance = new SQL(connection); + const instance = new SQL({ url, ...config }); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + const instance = new SQL(connection); + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/bun-sql/sqlite/driver.ts b/drizzle-orm/src/bun-sql/sqlite/driver.ts index 4753ceead2..321475c1f5 100644 --- a/drizzle-orm/src/bun-sql/sqlite/driver.ts +++ b/drizzle-orm/src/bun-sql/sqlite/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { BunSQLiteRunResult } from './session.ts'; import { BunSQLiteSession } from './session.ts'; @@ -69,9 +69,9 @@ export function drizzle< TClient extends SQL = SQL, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -92,26 +92,22 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as { - connection?: { url?: string } & SQL.Options; - client?: TClient; - } & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & SQL.Options; + client?: TClient; + } & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object' && connection.url !== undefined) { - const { url, ...config } = connection; + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; - const instance = new SQL({ url, ...config }); - return construct(instance, drizzleConfig) as any; - } - - const instance = new SQL(connection); + const instance = new SQL({ url, ...config }); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + const instance = new SQL(connection); + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/bun-sql/sqlite/session.ts b/drizzle-orm/src/bun-sql/sqlite/session.ts index 18987cc583..9e1588bb7e 100644 --- a/drizzle-orm/src/bun-sql/sqlite/session.ts +++ b/drizzle-orm/src/bun-sql/sqlite/session.ts @@ -206,8 +206,6 @@ export class BunSQLitePreparedQuery< private isRqbV2Query?: TIsRqbV2, ) { super('async', executeMethod, query, cache, queryMetadata, cacheConfig); - this.customResultMapper = customResultMapper; - this.fields = fields; } async run(placeholderValues: Record = {}): Promise { diff --git a/drizzle-orm/src/bun-sqlite/driver.ts b/drizzle-orm/src/bun-sqlite/driver.ts index 4989e65654..01c084c490 100644 --- a/drizzle-orm/src/bun-sqlite/driver.ts +++ b/drizzle-orm/src/bun-sqlite/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { SQLiteBunSession } from './session.ts'; export class SQLiteBunDatabase< @@ -104,10 +104,10 @@ export function drizzle< ...params: | [] | [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ @@ -129,32 +129,28 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & ({ - connection?: DrizzleBunSqliteDatabaseConfig | string; - client?: TClient; - }) - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & ({ + connection?: DrizzleBunSqliteDatabaseConfig | string; + client?: TClient; + }) + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object') { - const { source, ...opts } = connection; + if (typeof connection === 'object') { + const { source, ...opts } = connection; - const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined; + const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined; - const instance = new Database(source, options); - - return construct(instance, drizzleConfig) as any; - } - - const instance = new Database(connection); + const instance = new Database(source, options); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as Database, params[1] as DrizzleConfig | undefined) as any; + const instance = new Database(connection); + + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/cockroach-core/alias.ts b/drizzle-orm/src/cockroach-core/alias.ts new file mode 100644 index 0000000000..1bf0d9a30d --- /dev/null +++ b/drizzle-orm/src/cockroach-core/alias.ts @@ -0,0 +1,11 @@ +import { TableAliasProxyHandler } from '~/alias.ts'; +import type { BuildAliasTable } from './query-builders/select.types.ts'; +import type { CockroachTable } from './table.ts'; +import type { CockroachViewBase } from './view-base.ts'; + +export function alias( + table: TTable, + alias: TAlias, +): BuildAliasTable { + return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; +} diff --git a/drizzle-orm/src/cockroach-core/checks.ts b/drizzle-orm/src/cockroach-core/checks.ts new file mode 100644 index 0000000000..0077960896 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/checks.ts @@ -0,0 +1,32 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/index.ts'; +import type { CockroachTable } from './table.ts'; + +export class CheckBuilder { + static readonly [entityKind]: string = 'CockroachCheckBuilder'; + + protected brand!: 'CockroachConstraintBuilder'; + + constructor(public name: string, public value: SQL) {} + + /** @internal */ + build(table: CockroachTable): Check { + return new Check(table, this); + } +} + +export class Check { + static readonly [entityKind]: string = 'CockroachCheck'; + + readonly name: string; + readonly value: SQL; + + constructor(public table: CockroachTable, builder: CheckBuilder) { + this.name = builder.name; + this.value = builder.value; + } +} + +export function check(name: string, value: SQL): CheckBuilder { + return new CheckBuilder(name, value); +} diff --git a/drizzle-orm/src/cockroach-core/columns/all.ts b/drizzle-orm/src/cockroach-core/columns/all.ts new file mode 100644 index 0000000000..f7ff121ec0 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/all.ts @@ -0,0 +1,56 @@ +import { bigint, int8 } from './bigint.ts'; +import { bit } from './bit.ts'; +import { bool } from './bool.ts'; +import { char } from './char.ts'; +import { customType } from './custom.ts'; +import { date } from './date.ts'; +import { decimal, numeric } from './decimal.ts'; +import { doublePrecision, float } from './float.ts'; +import { geometry } from './geometry.ts'; +import { inet } from './inet.ts'; +import { int4 } from './integer.ts'; +import { interval } from './interval.ts'; +import { jsonb } from './jsonb.ts'; +import { real } from './real.ts'; +import { int2, smallint } from './smallint.ts'; +import { string, text } from './string.ts'; +import { time } from './time.ts'; +import { timestamp } from './timestamp.ts'; +import { uuid } from './uuid.ts'; +import { varbit } from './varbit.ts'; +import { varchar } from './varchar.ts'; +import { vector } from './vector.ts'; + +export function getCockroachColumnBuilders() { + return { + bigint, + bool, + char, + customType, + date, + doublePrecision, + inet, + int4, + int2, + int8, + interval, + jsonb, + numeric, + decimal, + geometry, + real, + smallint, + text, + time, + timestamp, + uuid, + varchar, + bit, + vector, + float, + string, + varbit, + }; +} + +export type CockroachColumnsBuilders = ReturnType; diff --git a/drizzle-orm/src/cockroach-core/columns/bigint.ts b/drizzle-orm/src/cockroach-core/columns/bigint.ts new file mode 100644 index 0000000000..810ad9aaf0 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/bigint.ts @@ -0,0 +1,111 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn } from './common.ts'; +import { CockroachIntColumnBaseBuilder } from './int.common.ts'; + +export class CockroachBigInt53Builder extends CockroachIntColumnBaseBuilder<{ + dataType: 'number int53'; + data: number; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'CockroachBigInt53Builder'; + + constructor(name: string) { + super(name, 'number int53', 'CockroachBigInt53'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachBigInt53( + table, + this.config, + ); + } +} + +export class CockroachBigInt53> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachBigInt53'; + + getSQLType(): string { + return 'int8'; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'number') { + return value; + } + return Number(value); + } +} + +export class CockroachBigInt64Builder extends CockroachIntColumnBaseBuilder<{ + dataType: 'bigint int64'; + data: bigint; + driverParam: string; +}> { + static override readonly [entityKind]: string = 'CockroachBigInt64Builder'; + + constructor(name: string) { + super(name, 'bigint int64', 'CockroachBigInt64'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachBigInt64( + table, + this.config, + ); + } +} + +export class CockroachBigInt64> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachBigInt64'; + + getSQLType(): string { + return 'int8'; + } + + // eslint-disable-next-line unicorn/prefer-native-coercion-functions + override mapFromDriverValue(value: string): bigint { + return BigInt(value); + } +} + +export interface CockroachBigIntConfig { + mode: T; +} + +export function bigint( + config: CockroachBigIntConfig, +): TMode extends 'number' ? CockroachBigInt53Builder : CockroachBigInt64Builder; +export function bigint( + name: string, + config: CockroachBigIntConfig, +): TMode extends 'number' ? CockroachBigInt53Builder : CockroachBigInt64Builder; +export function bigint(a: string | CockroachBigIntConfig, b?: CockroachBigIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config.mode === 'number') { + return new CockroachBigInt53Builder(name); + } + return new CockroachBigInt64Builder(name); +} +export function int8( + config: CockroachBigIntConfig, +): TMode extends 'number' ? CockroachBigInt53Builder : CockroachBigInt64Builder; +export function int8( + name: string, + config: CockroachBigIntConfig, +): TMode extends 'number' ? CockroachBigInt53Builder : CockroachBigInt64Builder; +export function int8(a: string | CockroachBigIntConfig, b?: CockroachBigIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config.mode === 'number') { + return new CockroachBigInt53Builder(name); + } + return new CockroachBigInt64Builder(name); +} diff --git a/drizzle-orm/src/cockroach-core/columns/bit.ts b/drizzle-orm/src/cockroach-core/columns/bit.ts new file mode 100644 index 0000000000..e54d7927d5 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/bit.ts @@ -0,0 +1,54 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachBitBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string binary'; + data: string; + driverParam: string; +}, { length: number | undefined; setLength: boolean; isLengthExact: true }> { + static override readonly [entityKind]: string = 'CockroachBitBuilder'; + + constructor(name: string, config: CockroachBitConfig) { + super(name, 'string binary', 'CockroachBit'); + this.config.length = config.length ?? 1; + this.config.setLength = config.length !== undefined; + this.config.isLengthExact = true; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachBit( + table, + this.config, + ); + } +} + +export class CockroachBit & { length?: number }> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachBit'; + + getSQLType(): string { + return this.config.setLength ? `bit(${this.length})` : 'bit'; + } +} + +export interface CockroachBitConfig { + length?: number | undefined; +} + +export function bit(config?: CockroachBitConfig): CockroachBitBuilder; +export function bit( + name: string, + config?: CockroachBitConfig, +): CockroachBitBuilder; +export function bit(a?: string | CockroachBitConfig, b: CockroachBitConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachBitBuilder(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/columns/bool.ts b/drizzle-orm/src/cockroach-core/columns/bool.ts new file mode 100644 index 0000000000..5c32997076 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/bool.ts @@ -0,0 +1,40 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachBooleanBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'boolean'; + data: boolean; + driverParam: boolean; +}> { + static override readonly [entityKind]: string = 'CockroachBooleanBuilder'; + + constructor(name: string) { + super(name, 'boolean', 'CockroachBoolean'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachBoolean( + table, + this.config, + ); + } +} + +export class CockroachBoolean> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachBoolean'; + + getSQLType(): string { + return 'bool'; + } +} + +export function bool(name?: string) { + return new CockroachBooleanBuilder(name ?? ''); +} + +export const boolean = bool; diff --git a/drizzle-orm/src/cockroach-core/columns/char.ts b/drizzle-orm/src/cockroach-core/columns/char.ts new file mode 100644 index 0000000000..3a60766c2a --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/char.ts @@ -0,0 +1,70 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachCharBuilder< + TEnum extends [string, ...string[]], +> extends CockroachColumnWithArrayBuilder< + { + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + enumValues: TEnum; + driverParam: string; + }, + { enumValues: TEnum | undefined; length: number; setLength: boolean } +> { + static override readonly [entityKind]: string = 'CockroachCharBuilder'; + + constructor(name: string, config: CockroachCharConfig) { + super(name, config.enum?.length ? 'string enum' : 'string', 'CockroachChar'); + this.config.enumValues = config.enum; + this.config.length = config.length ?? 1; + this.config.setLength = config.length !== undefined; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachChar( + table, + this.config, + ); + } +} + +export class CockroachChar> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachChar'; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return this.config.setLength ? `char(${this.length})` : `char`; + } +} + +export interface CockroachCharConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + enum?: TEnum; + length?: number | undefined; +} + +export function char>( + config?: CockroachCharConfig>, +): CockroachCharBuilder>; +export function char< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + name: string, + config?: CockroachCharConfig>, +): CockroachCharBuilder>; +export function char(a?: string | CockroachCharConfig, b: CockroachCharConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachCharBuilder(name, config as any); +} diff --git a/drizzle-orm/src/cockroach-core/columns/common.ts b/drizzle-orm/src/cockroach-core/columns/common.ts new file mode 100644 index 0000000000..0593a17175 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/common.ts @@ -0,0 +1,310 @@ +import type { + ColumnBuilderBaseConfig, + ColumnBuilderRuntimeConfig, + ColumnType, + HasGenerated, +} from '~/column-builder.ts'; +import { ColumnBuilder } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Update } from '~/utils.ts'; + +import type { ForeignKey, UpdateDeleteAction } from '~/cockroach-core/foreign-keys.ts'; +import { ForeignKeyBuilder } from '~/cockroach-core/foreign-keys.ts'; +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { SQL } from '~/sql/sql.ts'; +import { iife } from '~/tracing-utils.ts'; +import { makeCockroachArray, parseCockroachArray } from '../utils/array.ts'; + +export type CockroachColumns = Record>; + +export interface ReferenceConfig { + ref: () => CockroachColumn; + config: { + name?: string; + onUpdate?: UpdateDeleteAction; + onDelete?: UpdateDeleteAction; + }; +} +export abstract class CockroachColumnBuilder< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, +> extends ColumnBuilder { + private foreignKeyConfigs: ReferenceConfig[] = []; + + static override readonly [entityKind]: string = 'CockroachColumnBuilder'; + + references( + ref: ReferenceConfig['ref'], + config: ReferenceConfig['config'] = {}, + ): this { + this.foreignKeyConfigs.push({ ref, config }); + return this; + } + + unique( + name?: string, + ): this { + this.config.isUnique = true; + this.config.uniqueName = name; + return this; + } + + generatedAlwaysAs(as: SQL | T['data'] | (() => SQL)): HasGenerated { + this.config.generated = { + as, + type: 'always', + mode: 'stored', + }; + return this as HasGenerated; + } + + /** @internal */ + buildForeignKeys(column: CockroachColumn, table: CockroachTable): ForeignKey[] { + return this.foreignKeyConfigs.map(({ ref, config }) => { + return iife( + (ref, config) => { + const builder = new ForeignKeyBuilder(() => { + const foreignColumn = ref(); + return { name: config.name, columns: [column], foreignColumns: [foreignColumn] }; + }); + if (config.onUpdate) { + builder.onUpdate(config.onUpdate); + } + if (config.onDelete) { + builder.onDelete(config.onDelete); + } + return builder.build(table); + }, + ref, + config, + ); + }); + } + + /** @internal */ + abstract build(table: CockroachTable): CockroachColumn; + + /** @internal */ + buildExtraConfigColumn( + table: AnyCockroachTable<{ name: TTableName }>, + ): ExtraConfigColumn { + return new ExtraConfigColumn(table, this.config); + } +} + +export abstract class CockroachColumnWithArrayBuilder< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, +> extends CockroachColumnBuilder { + static override readonly [entityKind]: string = 'CockroachColumnWithArrayBuilder'; + array(size?: TSize): Omit< + CockroachArrayBuilder< + & { + name: string; + dataType: 'array basecolumn'; + data: T['data'][]; + driverParam: T['driverParam'][] | string; + baseBuilder: T; + } + & (T extends { notNull: true } ? { notNull: true } : {}) + & (T extends { hasDefault: true } ? { hasDefault: true } : {}), + T + >, + 'array' + > { + return new CockroachArrayBuilder( + this.config.name, + this as CockroachColumnWithArrayBuilder, + size as any, + ) as any; // size as any + } +} + +// To understand how to use `CockroachColumn` and `AnyCockroachColumn`, see `Column` and `AnyColumn` documentation. +export abstract class CockroachColumn< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = {}, +> extends Column { + static override readonly [entityKind]: string = 'CockroachColumn'; + + /** @internal */ + override readonly table: CockroachTable; + + constructor( + table: CockroachTable, + config: ColumnBuilderRuntimeConfig & TRuntimeConfig, + ) { + super(table, config); + this.table = table; + } + + /** @internal */ + override shouldDisableInsert(): boolean { + return (this.config.generatedIdentity !== undefined && this.config.generatedIdentity.type === 'always') + || (this.config.generated !== undefined && this.config.generated.type !== 'byDefault'); + } +} + +export type IndexedExtraConfigType = { order?: 'asc' | 'desc' }; + +export class ExtraConfigColumn< + T extends ColumnBaseConfig = ColumnBaseConfig, +> extends CockroachColumn { + static override readonly [entityKind]: string = 'ExtraConfigColumn'; + + override getSQLType(): string { + return this.getSQLType(); + } + + indexConfig: IndexedExtraConfigType = { + order: this.config.order ?? 'asc', + }; + defaultConfig: IndexedExtraConfigType = { + order: 'asc', + }; + + asc(): Omit { + this.indexConfig.order = 'asc'; + return this; + } + + desc(): Omit { + this.indexConfig.order = 'desc'; + return this; + } +} + +export class IndexedColumn { + static readonly [entityKind]: string = 'IndexedColumn'; + constructor( + name: string | undefined, + keyAsName: boolean, + type: string, + indexConfig: IndexedExtraConfigType, + ) { + this.name = name; + this.keyAsName = keyAsName; + this.type = type; + this.indexConfig = indexConfig; + } + + name: string | undefined; + keyAsName: boolean; + type: string; + indexConfig: IndexedExtraConfigType; +} + +export type AnyCockroachColumn> = {}> = CockroachColumn< + Required, TPartial>> +>; + +export type CockroachArrayColumnBuilderBaseConfig = ColumnBuilderBaseConfig<'array basecolumn'> & { + baseBuilder: ColumnBuilderBaseConfig; +}; + +export class CockroachArrayBuilder< + T extends CockroachArrayColumnBuilderBaseConfig, + TBase extends ColumnBuilderBaseConfig | CockroachArrayColumnBuilderBaseConfig, +> extends CockroachColumnWithArrayBuilder< + T & { + baseBuilder: TBase extends CockroachArrayColumnBuilderBaseConfig ? CockroachArrayBuilder< + TBase, + TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder + : never + > + : CockroachColumnWithArrayBuilder; + }, + { + baseBuilder: TBase extends CockroachArrayColumnBuilderBaseConfig ? CockroachArrayBuilder< + TBase, + TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder + : never + > + : CockroachColumnWithArrayBuilder; + length: number | undefined; + } +> { + static override readonly [entityKind]: string = 'CockroachArrayBuilder'; + + constructor( + name: string, + baseBuilder: CockroachArrayBuilder['config']['baseBuilder'], + length: number | undefined, + ) { + super(name, 'array basecolumn', 'CockroachArray'); + this.config.baseBuilder = baseBuilder; + this.config.length = length; + } + + /** @internal */ + override build(table: CockroachTable) { + const baseColumn: any = this.config.baseBuilder.build(table); + return new CockroachArray( + table, + this.config as any, + baseColumn, + ); + } +} + +export class CockroachArray< + T extends ColumnBaseConfig<'array basecolumn'> & { + length: number | undefined; + baseBuilder: ColumnBuilderBaseConfig; + }, + TBase extends ColumnBuilderBaseConfig, +> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachArray'; + + constructor( + table: CockroachTable, + config: CockroachArrayBuilder['config'], + readonly baseColumn: CockroachColumn, + readonly range?: [number | undefined, number | undefined], + ) { + super(table, config); + } + + getSQLType(): string { + return `${this.baseColumn.getSQLType()}[${typeof this.length === 'number' ? this.length : ''}]`; + } + + override mapFromDriverValue(value: unknown[] | string): T['data'] { + if (typeof value === 'string') { + value = parseCockroachArray(value); + } + return value.map((v) => this.baseColumn.mapFromDriverValue(v)); + } + + // Needed for arrays of custom types + mapFromJsonValue(value: unknown[] | string): T['data'] { + if (typeof value === 'string') { + // Thank you node-postgres for not parsing enum arrays + value = parseCockroachArray(value); + } + + const base = this.baseColumn; + + return 'mapFromJsonValue' in base + ? value.map((v) => (<(value: unknown) => unknown> base.mapFromJsonValue)(v)) + : value.map((v) => base.mapFromDriverValue(v)); + } + + override mapToDriverValue(value: unknown[], isNestedArray = false): unknown[] | string { + const a = value.map((v) => + v === null + ? null + : is(this.baseColumn, CockroachArray) + ? this.baseColumn.mapToDriverValue(v as unknown[], true) + : this.baseColumn.mapToDriverValue(v) + ); + if (isNestedArray) return a; + return makeCockroachArray(a); + } +} diff --git a/drizzle-orm/src/cockroach-core/columns/custom.ts b/drizzle-orm/src/cockroach-core/columns/custom.ts new file mode 100644 index 0000000000..b3b9c2b343 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/custom.ts @@ -0,0 +1,369 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBuilderBaseConfig } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { SQL, SQLGenerator } from '~/sql/sql.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export type ConvertCustomConfig> = + & { + dataType: 'custom'; + data: T['data']; + driverParam: T['driverData']; + } + & (T['notNull'] extends true ? { notNull: true } : {}) + & (T['default'] extends true ? { hasDefault: true } : {}); + +export interface CockroachCustomColumnInnerConfig { + customTypeValues: CustomTypeValues; +} + +export class CockroachCustomColumnBuilder> + extends CockroachColumnWithArrayBuilder< + T, + { + fieldConfig: CustomTypeValues['config']; + customTypeParams: CustomTypeParams; + } + > +{ + static override readonly [entityKind]: string = 'CockroachCustomColumnBuilder'; + + constructor( + name: string, + fieldConfig: CustomTypeValues['config'], + customTypeParams: CustomTypeParams, + ) { + super(name, 'custom', 'CockroachCustomColumn'); + this.config.fieldConfig = fieldConfig; + this.config.customTypeParams = customTypeParams; + } + + /** @internal */ + build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachCustomColumn( + table, + this.config, + ); + } +} + +export class CockroachCustomColumn> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachCustomColumn'; + + private sqlName: string; + private mapTo?: (value: T['data']) => T['driverParam']; + private mapFrom?: (value: T['driverParam']) => T['data']; + private mapJson?: (value: unknown) => T['data']; + private forJsonSelect?: (identifier: SQL, sql: SQLGenerator, arrayDimensions?: number) => SQL; + + constructor( + table: CockroachTable, + config: CockroachCustomColumnBuilder['config'], + ) { + super(table, config); + this.sqlName = config.customTypeParams.dataType(config.fieldConfig); + this.mapTo = config.customTypeParams.toDriver; + this.mapFrom = config.customTypeParams.fromDriver; + this.mapJson = config.customTypeParams.fromJson; + this.forJsonSelect = config.customTypeParams.forJsonSelect; + } + + getSQLType(): string { + return this.sqlName; + } + + override mapFromDriverValue(value: T['driverParam']): T['data'] { + return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; + } + + mapFromJsonValue(value: unknown): T['data'] { + return typeof this.mapJson === 'function' ? this.mapJson(value) : this.mapFromDriverValue(value) as T['data']; + } + + jsonSelectIdentifier(identifier: SQL, sql: SQLGenerator, arrayDimensions?: number): SQL { + if (typeof this.forJsonSelect === 'function') return this.forJsonSelect(identifier, sql, arrayDimensions); + + const rawType = this.getSQLType().toLowerCase(); + const parenPos = rawType.indexOf('('); + const type = (parenPos + 1) ? rawType.slice(0, parenPos) : rawType; + + switch (type) { + case 'geometry': + case 'timestamp': + case 'decimal': + case 'int8': { + const arrVal = '[]'.repeat(arrayDimensions ?? 0); + + return sql`${identifier}::text${sql.raw(arrVal).if(arrayDimensions)}`; + } + default: { + return identifier; + } + } + } + + override mapToDriverValue(value: T['data']): T['driverParam'] { + return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; + } +} + +export type CustomTypeValues = { + /** + * Required type for custom column, that will infer proper type model + * + * Examples: + * + * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` + * + * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` + */ + data: unknown; + + /** + * Type helper, that represents what type database driver is accepting for specific database data type + */ + driverData?: unknown; + + /** + * Type helper, that represents what type database driver is returning for specific database data type + * + * Needed only in case driver's output and input for type differ + * + * Defaults to {@link driverData} + */ + driverOutput?: unknown; + + /** + * Type helper, that represents what type field returns after being aggregated to JSON + */ + jsonData?: unknown; + + /** + * What config type should be used for {@link CustomTypeParams} `dataType` generation + */ + config?: Record; + + /** + * Whether the config argument should be required or not + * @default false + */ + configRequired?: boolean; + + /** + * If your custom data type should be notNull by default you can use `notNull: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + notNull?: boolean; + + /** + * If your custom data type has default you can use `default: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + default?: boolean; +}; + +export interface CustomTypeParams { + /** + * Database data type string representation, that is used for migrations + * @example + * ``` + * `jsonb`, `text` + * ``` + * + * If database data type needs additional params you can use them from `config` param + * @example + * ``` + * `varchar(256)`, `numeric(2,3)` + * ``` + * + * To make `config` be of specific type please use config generic in {@link CustomTypeValues} + * + * @example + * Usage example + * ``` + * dataType() { + * return 'boolean'; + * }, + * ``` + * Or + * ``` + * dataType(config) { + * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; + * } + * ``` + */ + dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; + + /** + * Optional mapping function, that is used to transform inputs from desired to be used in code format to one suitable for driver + * @example + * For example, when using jsonb we need to map JS/TS object to string before writing to database + * ``` + * toDriver(value: TData): string { + * return JSON.stringify(value); + * } + * ``` + */ + toDriver?: (value: T['data']) => T['driverData'] | SQL; + + /** + * Optional mapping function, that is used for transforming data returned by driver to desired column's output format + * @example + * For example, when using timestamp we need to map string Date representation to JS Date + * ``` + * fromDriver(value: string): Date { + * return new Date(value); + * } + * ``` + * + * It'll cause the returned data to change from: + * ``` + * { + * customField: "2025-04-07T03:25:16.635Z"; + * } + * ``` + * to: + * ``` + * { + * customField: new Date("2025-04-07T03:25:16.635Z"); + * } + * ``` + */ + fromDriver?: (value: 'driverOutput' extends keyof T ? T['driverOutput'] : T['driverData']) => T['data']; + + /** + * Optional mapping function, that is used for transforming data returned by transofmed to JSON in database data to desired format + * + * Used by [relational queries](https://orm.drizzle.team/docs/rqb-v2) + * + * Defaults to {@link fromDriver} function + * @example + * For example, when querying bigint column via [RQB](https://orm.drizzle.team/docs/rqb-v2) or [JSON functions](https://orm.drizzle.team/docs/json-functions), the result field will be returned as it's string representation, as opposed to bigint from regular query + * To handle that, we need a separate function to handle such field's mapping: + * ``` + * fromJson(value: string): bigint { + * return BigInt(value); + * }, + * ``` + * + * It'll cause the returned data to change from: + * ``` + * { + * customField: "5044565289845416380"; + * } + * ``` + * to: + * ``` + * { + * customField: 5044565289845416380n; + * } + * ``` + */ + fromJson?: (value: T['jsonData']) => T['data']; + + /** + * Optional selection modifier function, that is used for modifying selection of column inside [JSON functions](https://orm.drizzle.team/docs/json-functions) + * + * Additional mapping that could be required for such scenarios can be handled using {@link fromJson} function + * + * Used by [relational queries](https://orm.drizzle.team/docs/rqb-v2) + * + * Following types are being casted to text by default: `bytea`, `geometry`, `timestamp`, `numeric`, `bigint` + * @example + * For example, when using bigint we need to cast field to text to preserve data integrity + * ``` + * forJsonSelect(identifier: SQL, sql: SQLGenerator, arrayDimensions?: number): SQL { + * return sql`${identifier}::text` + * }, + * ``` + * + * This will change query from: + * ``` + * SELECT + * row_to_json("t".*) + * FROM + * ( + * SELECT + * "table"."custom_bigint" AS "bigint" + * FROM + * "table" + * ) AS "t" + * ``` + * to: + * ``` + * SELECT + * row_to_json("t".*) + * FROM + * ( + * SELECT + * "table"."custom_bigint"::text AS "bigint" + * FROM + * "table" + * ) AS "t" + * ``` + * + * Returned by query object will change from: + * ``` + * { + * bigint: 5044565289845416000; // Partial data loss due to direct conversion to JSON format + * } + * ``` + * to: + * ``` + * { + * bigint: "5044565289845416380"; // Data is preserved due to conversion of field to text before JSON-ification + * } + * ``` + */ + forJsonSelect?: (identifier: SQL, sql: SQLGenerator, arrayDimensions?: number) => SQL; +} + +/** + * Custom cockroach database data type generator + */ +export function customType( + customTypeParams: CustomTypeParams, +): Equal extends true ? { + & T['config']>( + fieldConfig: TConfig, + ): CockroachCustomColumnBuilder>; + ( + dbName: string, + fieldConfig: T['config'], + ): CockroachCustomColumnBuilder>; + } + : { + (): CockroachCustomColumnBuilder>; + & T['config']>( + fieldConfig?: TConfig, + ): CockroachCustomColumnBuilder>; + ( + dbName: string, + fieldConfig?: T['config'], + ): CockroachCustomColumnBuilder>; + } +{ + return ( + a?: string | T['config'], + b?: T['config'], + ): CockroachCustomColumnBuilder> => { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachCustomColumnBuilder(name, config, customTypeParams); + }; +} diff --git a/drizzle-orm/src/cockroach-core/columns/date.common.ts b/drizzle-orm/src/cockroach-core/columns/date.common.ts new file mode 100644 index 0000000000..f3043c2b45 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/date.common.ts @@ -0,0 +1,15 @@ +import type { ColumnBuilderBaseConfig, ColumnType } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { sql } from '~/sql/sql.ts'; +import { CockroachColumnWithArrayBuilder } from './common.ts'; + +export abstract class CockroachDateColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, +> extends CockroachColumnWithArrayBuilder { + static override readonly [entityKind]: string = 'CockroachDateColumnBaseBuilder'; + + defaultNow() { + return this.default(sql`now()`); + } +} diff --git a/drizzle-orm/src/cockroach-core/columns/date.ts b/drizzle-orm/src/cockroach-core/columns/date.ts new file mode 100644 index 0000000000..dcbc54d46d --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/date.ts @@ -0,0 +1,94 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn } from './common.ts'; +import { CockroachDateColumnBaseBuilder } from './date.common.ts'; + +export class CockroachDateBuilder extends CockroachDateColumnBaseBuilder<{ + dataType: 'object date'; + data: Date; + driverParam: string; +}> { + static override readonly [entityKind]: string = 'CockroachDateBuilder'; + + constructor(name: string) { + super(name, 'object date', 'CockroachDate'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachDate( + table, + this.config, + ); + } +} + +export class CockroachDate> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachDate'; + + getSQLType(): string { + return 'date'; + } + + override mapFromDriverValue(value: string): Date { + return new Date(value); + } + + override mapToDriverValue(value: Date): string { + return value.toISOString(); + } +} + +export class CockroachDateStringBuilder extends CockroachDateColumnBaseBuilder<{ + dataType: 'string date'; + data: string; + driverParam: string; +}> { + static override readonly [entityKind]: string = 'CockroachDateStringBuilder'; + + constructor(name: string) { + super(name, 'string date', 'CockroachDateString'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachDateString( + table, + this.config, + ); + } +} + +export class CockroachDateString> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachDateString'; + + getSQLType(): string { + return 'date'; + } +} + +export interface CockroachDateConfig { + mode: T; +} + +export function date( + config?: CockroachDateConfig, +): Equal extends true ? CockroachDateBuilder : CockroachDateStringBuilder; +export function date( + name: string, + config?: CockroachDateConfig, +): Equal extends true ? CockroachDateBuilder + : CockroachDateStringBuilder; +export function date(a?: string | CockroachDateConfig, b?: CockroachDateConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'date') { + return new CockroachDateBuilder(name); + } + return new CockroachDateStringBuilder(name); +} diff --git a/drizzle-orm/src/cockroach-core/columns/decimal.ts b/drizzle-orm/src/cockroach-core/columns/decimal.ts new file mode 100644 index 0000000000..b1bbfc098d --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/decimal.ts @@ -0,0 +1,219 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachDecimalBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'string numeric'; + data: string; + driverParam: string; + }, + { + precision: number | undefined; + scale: number | undefined; + } +> { + static override readonly [entityKind]: string = 'CockroachDecimalBuilder'; + + constructor(name: string, precision?: number, scale?: number) { + super(name, 'string numeric', 'CockroachDecimal'); + this.config.precision = precision; + this.config.scale = scale; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachDecimal( + table, + this.config, + ); + } +} + +export class CockroachDecimal> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachDecimal'; + + readonly precision: number | undefined; + readonly scale: number | undefined; + + constructor(table: CockroachTable, config: CockroachDecimalBuilder['config']) { + super(table, config); + this.precision = config.precision; + this.scale = config.scale; + } + + override mapFromDriverValue(value: unknown): string { + if (typeof value === 'string') return value; + + return String(value); + } + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export class CockroachDecimalNumberBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'number'; + data: number; + driverParam: string; + }, + { + precision: number | undefined; + scale: number | undefined; + } +> { + static override readonly [entityKind]: string = 'CockroachDecimalNumberBuilder'; + + constructor(name: string, precision?: number, scale?: number) { + super(name, 'number', 'CockroachDecimalNumber'); + this.config.precision = precision; + this.config.scale = scale; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachDecimalNumber( + table, + this.config, + ); + } +} + +export class CockroachDecimalNumber> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachDecimalNumber'; + + readonly precision: number | undefined; + readonly scale: number | undefined; + + constructor( + table: CockroachTable, + config: CockroachDecimalNumberBuilder['config'], + ) { + super(table, config); + this.precision = config.precision; + this.scale = config.scale; + } + + override mapFromDriverValue(value: unknown): number { + if (typeof value === 'number') return value; + + return Number(value); + } + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export class CockroachDecimalBigIntBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'bigint int64'; + data: bigint; + driverParam: string; + }, + { + precision: number | undefined; + scale: number | undefined; + } +> { + static override readonly [entityKind]: string = 'CockroachDecimalBigIntBuilder'; + + constructor(name: string, precision?: number, scale?: number) { + super(name, 'bigint int64', 'CockroachDecimalBigInt'); + this.config.precision = precision; + this.config.scale = scale; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachDecimalBigInt( + table, + this.config, + ); + } +} + +export class CockroachDecimalBigInt> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachDecimalBigInt'; + + readonly precision: number | undefined; + readonly scale: number | undefined; + + constructor( + table: CockroachTable, + config: CockroachDecimalBigIntBuilder['config'], + ) { + super(table, config); + this.precision = config.precision; + this.scale = config.scale; + } + + override mapFromDriverValue = BigInt; + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export type CockroachDecimalConfig< + T extends 'string' | 'number' | 'bigint' = 'string' | 'number' | 'bigint', +> = + | { precision: number; scale?: number; mode?: T } + | { precision?: number; scale: number; mode?: T } + | { precision?: number; scale?: number; mode: T }; + +export function decimal( + config?: CockroachDecimalConfig, +): Equal extends true ? CockroachDecimalNumberBuilder + : Equal extends true ? CockroachDecimalBigIntBuilder + : CockroachDecimalBuilder; +export function decimal( + name: string, + config?: CockroachDecimalConfig, +): Equal extends true ? CockroachDecimalNumberBuilder + : Equal extends true ? CockroachDecimalBigIntBuilder + : CockroachDecimalBuilder; +export function decimal(a?: string | CockroachDecimalConfig, b?: CockroachDecimalConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + const mode = config?.mode; + return mode === 'number' + ? new CockroachDecimalNumberBuilder(name, config?.precision, config?.scale) + : mode === 'bigint' + ? new CockroachDecimalBigIntBuilder(name, config?.precision, config?.scale) + : new CockroachDecimalBuilder(name, config?.precision, config?.scale); +} + +// numeric is alias for decimal +export const numeric = decimal; diff --git a/drizzle-orm/src/cockroach-core/columns/enum.ts b/drizzle-orm/src/cockroach-core/columns/enum.ts new file mode 100644 index 0000000000..99e628419b --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/enum.ts @@ -0,0 +1,184 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { NonArray, Writable } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +// Enum as ts enum +export interface CockroachEnumObject { + (name?: string): CockroachEnumObjectColumnBuilder; + + readonly enumName: string; + readonly enumValues: string[]; + readonly schema: string | undefined; + /** @internal */ + [isCockroachEnumSym]: true; +} + +export class CockroachEnumObjectColumnBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'string enum'; + data: TValues[keyof TValues]; + enumValues: string[]; + driverParam: string; + }, + { enum: CockroachEnumObject } +> { + static override readonly [entityKind]: string = 'CockroachEnumObjectColumnBuilder'; + + constructor(name: string, enumInstance: CockroachEnumObject) { + super(name, 'string enum', 'CockroachEnumObjectColumn'); + this.config.enum = enumInstance; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachEnumObjectColumn( + table, + this.config, + ); + } +} + +export class CockroachEnumObjectColumn< + T extends ColumnBaseConfig<'string enum'> & { enumValues: object }, +> extends CockroachColumn }> { + static override readonly [entityKind]: string = 'CockroachEnumObjectColumn'; + + readonly enum; + override readonly enumValues = this.config.enum.enumValues; + + constructor( + table: CockroachTable, + config: CockroachEnumObjectColumnBuilder['config'], + ) { + super(table, config); + this.enum = config.enum; + } + + getSQLType(): string { + return this.enum.enumName; + } +} + +// Enum as string union +const isCockroachEnumSym = Symbol.for('drizzle:isCockroachEnum'); +export interface CockroachEnum { + (name?: string): CockroachEnumColumnBuilder; + + readonly enumName: string; + readonly enumValues: TValues; + readonly schema: string | undefined; + /** @internal */ + [isCockroachEnumSym]: true; +} + +export function isCockroachEnum(obj: unknown): obj is CockroachEnum<[string, ...string[]]> { + return !!obj && typeof obj === 'function' && isCockroachEnumSym in obj && obj[isCockroachEnumSym] === true; +} + +export class CockroachEnumColumnBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string'; + data: TValues[number]; + enumValues: TValues; + driverParam: string; +}, { enum: CockroachEnum }> { + static override readonly [entityKind]: string = 'CockroachEnumColumnBuilder'; + + constructor(name: string, enumInstance: CockroachEnum) { + super(name, 'string enum', 'CockroachEnumColumn'); + this.config.enum = enumInstance; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachEnumColumn( + table, + this.config, + ); + } +} + +export class CockroachEnumColumn< + T extends ColumnBaseConfig<'string enum'> & { enumValues: [string, ...string[]] }, +> extends CockroachColumn }> { + static override readonly [entityKind]: string = 'CockroachEnumColumn'; + + readonly enum = this.config.enum; + override readonly enumValues = this.config.enum.enumValues; + + constructor( + table: CockroachTable, + config: CockroachEnumColumnBuilder['config'], + ) { + super(table, config); + this.enum = config.enum; + } + + getSQLType(): string { + return this.enum.enumName; + } +} + +export function cockroachEnum>( + enumName: string, + values: T | Writable, +): CockroachEnum>; + +export function cockroachEnum>( + enumName: string, + enumObj: NonArray, +): CockroachEnumObject; + +export function cockroachEnum( + enumName: any, + input: any, +): any { + return Array.isArray(input) + ? cockroachEnumWithSchema(enumName, [...input] as [string, ...string[]], undefined) + : cockroachEnumObjectWithSchema(enumName, input, undefined); +} + +/** @internal */ +export function cockroachEnumWithSchema>( + enumName: string, + values: T | Writable, + schema?: string, +): CockroachEnum> { + const enumInstance: CockroachEnum> = Object.assign( + (name?: string): CockroachEnumColumnBuilder> => + new CockroachEnumColumnBuilder(name ?? '', enumInstance), + { + enumName, + enumValues: values, + schema, + [isCockroachEnumSym]: true, + } as const, + ); + + return enumInstance; +} + +/** @internal */ +export function cockroachEnumObjectWithSchema( + enumName: string, + values: T, + schema?: string, +): CockroachEnumObject { + const enumInstance: CockroachEnumObject = Object.assign( + (name?: string): CockroachEnumObjectColumnBuilder => + new CockroachEnumObjectColumnBuilder(name ?? '', enumInstance), + { + enumName, + enumValues: Object.values(values), + schema, + [isCockroachEnumSym]: true, + } as const, + ); + + return enumInstance; +} diff --git a/drizzle-orm/src/cockroach-core/columns/float.ts b/drizzle-orm/src/cockroach-core/columns/float.ts new file mode 100644 index 0000000000..59f9eebcfd --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/float.ts @@ -0,0 +1,49 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachFloatBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'number double'; + data: number; + driverParam: string | number; + } +> { + static override readonly [entityKind]: string = 'CockroachFloatBuilder'; + + constructor(name: string) { + super(name, 'number double', 'CockroachFloat'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachFloat( + table, + this.config, + ); + } +} + +export class CockroachFloat> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachFloat'; + + getSQLType(): string { + return 'float'; + } + + override mapFromDriverValue(value: string | number): number { + if (typeof value === 'string') { + return Number.parseFloat(value); + } + return value; + } +} +export function float(name?: string) { + return new CockroachFloatBuilder(name ?? ''); +} + +// double precision is alias for float +export const doublePrecision = float; diff --git a/drizzle-orm/src/cockroach-core/columns/geometry.ts b/drizzle-orm/src/cockroach-core/columns/geometry.ts new file mode 100644 index 0000000000..c702f9b8ba --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/geometry.ts @@ -0,0 +1,114 @@ +import type { CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; +import { parseEWKB } from './utils.ts'; + +export class CockroachGeometryBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'array geometry'; + data: [number, number]; + driverParam: string; +}, { srid: number | undefined }> { + static override readonly [entityKind]: string = 'CockroachGeometryBuilder'; + + constructor(name: string, srid?: number) { + super(name, 'array geometry', 'CockroachGeometry'); + this.config.srid = srid; + } + + /** @internal */ + override build(table: CockroachTable) { + return new CockroachGeometry( + table, + this.config as any, + ); + } +} + +export class CockroachGeometry> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachGeometry'; + + readonly srid = this.config.srid; + readonly mode = 'tuple'; + + getSQLType(): string { + return `geometry(point${this.srid === undefined ? '' : `,${this.srid}`})`; + } + + override mapFromDriverValue(value: string | [number, number]): [number, number] { + if (typeof value !== 'string') return value as [number, number]; + + return parseEWKB(value).point; + } + + override mapToDriverValue(value: [number, number]): string { + return `point(${value[0]} ${value[1]})`; + } +} + +export class CockroachGeometryObjectBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'object geometry'; + data: { x: number; y: number }; + driverParam: string; +}, { srid?: number }> { + static override readonly [entityKind]: string = 'CockroachGeometryObjectBuilder'; + + constructor(name: string, srid: number | undefined) { + super(name, 'object geometry', 'CockroachGeometryObject'); + this.config.srid = srid; + } + + /** @internal */ + override build(table: CockroachTable) { + return new CockroachGeometryObject( + table, + this.config as any, + ); + } +} + +export class CockroachGeometryObject> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachGeometryObject'; + + readonly srid = this.config.srid; + readonly mode = 'object'; + + getSQLType(): string { + return `geometry(point${this.srid === undefined ? '' : `,${this.srid}`})`; + } + + override mapFromDriverValue(value: string): { x: number; y: number } { + const parsed = parseEWKB(value); + return { x: parsed.point[0], y: parsed.point[1] }; + } + + override mapToDriverValue(value: { x: number; y: number }): string { + return `point(${value.x} ${value.y})`; + } +} + +export interface CockroachGeometryConfig { + mode?: T; + type?: 'point' | (string & {}); + srid?: number; +} + +export function geometry( + config?: CockroachGeometryConfig, +): Equal extends true ? CockroachGeometryObjectBuilder : CockroachGeometryBuilder; +export function geometry( + name: string, + config?: CockroachGeometryConfig, +): Equal extends true ? CockroachGeometryObjectBuilder : CockroachGeometryBuilder; +export function geometry(a?: string | CockroachGeometryConfig, b?: CockroachGeometryConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (!config?.mode || config.mode === 'tuple') { + return new CockroachGeometryBuilder(name, config?.srid); + } + return new CockroachGeometryObjectBuilder(name, config?.srid); +} diff --git a/drizzle-orm/src/cockroach-core/columns/index.ts b/drizzle-orm/src/cockroach-core/columns/index.ts new file mode 100644 index 0000000000..0076d5bf1e --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/index.ts @@ -0,0 +1,25 @@ +export * from './bigint.ts'; +export * from './bit.ts'; +export * from './bool.ts'; +export * from './char.ts'; +export * from './common.ts'; +export * from './custom.ts'; +export * from './date.ts'; +export * from './decimal.ts'; +export * from './enum.ts'; +export * from './float.ts'; +export * from './geometry.ts'; +export * from './inet.ts'; +export * from './int.common.ts'; +export * from './integer.ts'; +export * from './interval.ts'; +export * from './jsonb.ts'; +export * from './real.ts'; +export * from './smallint.ts'; +export * from './string.ts'; +export * from './time.ts'; +export * from './timestamp.ts'; +export * from './uuid.ts'; +export * from './varbit.ts'; +export * from './varchar.ts'; +export * from './vector.ts'; diff --git a/drizzle-orm/src/cockroach-core/columns/inet.ts b/drizzle-orm/src/cockroach-core/columns/inet.ts new file mode 100644 index 0000000000..11c1c8ddb7 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/inet.ts @@ -0,0 +1,38 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyCockroachTable } from '../table.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachInetBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string inet'; + data: string; + driverParam: string; +}> { + static override readonly [entityKind]: string = 'CockroachInetBuilder'; + + constructor(name: string) { + super(name, 'string inet', 'CockroachInet'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachInet( + table, + this.config, + ); + } +} + +export class CockroachInet> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachInet'; + + getSQLType(): string { + return 'inet'; + } +} + +export function inet(name?: string) { + return new CockroachInetBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/int.common.ts b/drizzle-orm/src/cockroach-core/columns/int.common.ts new file mode 100644 index 0000000000..e8e08ddc8f --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/int.common.ts @@ -0,0 +1,49 @@ +import type { ColumnBuilderBaseConfig, ColumnType, GeneratedIdentityConfig, IsIdentity } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import type { CockroachSequenceOptions } from '../sequence.ts'; +import { CockroachColumnWithArrayBuilder } from './common.ts'; + +export abstract class CockroachIntColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, +> extends CockroachColumnWithArrayBuilder< + T, + { generatedIdentity: GeneratedIdentityConfig } +> { + static override readonly [entityKind]: string = 'CockroachIntColumnBaseBuilder'; + + generatedAlwaysAsIdentity( + sequence?: CockroachSequenceOptions, + ): IsIdentity { + this.config.generatedIdentity = sequence + ? { + type: 'always', + sequenceOptions: sequence, + } + : { + type: 'always', + }; + + this.config.hasDefault = true; + this.config.notNull = true; + + return this as IsIdentity; + } + + generatedByDefaultAsIdentity( + sequence?: CockroachSequenceOptions, + ): IsIdentity { + this.config.generatedIdentity = sequence + ? { + type: 'byDefault', + sequenceOptions: sequence, + } + : { + type: 'byDefault', + }; + + this.config.hasDefault = true; + this.config.notNull = true; + + return this as IsIdentity; + } +} diff --git a/drizzle-orm/src/cockroach-core/columns/integer.ts b/drizzle-orm/src/cockroach-core/columns/integer.ts new file mode 100644 index 0000000000..7b578d0d29 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/integer.ts @@ -0,0 +1,46 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyCockroachTable } from '../table.ts'; +import { CockroachColumn } from './common.ts'; +import { CockroachIntColumnBaseBuilder } from './int.common.ts'; + +export class CockroachIntegerBuilder extends CockroachIntColumnBaseBuilder<{ + dataType: 'number int32'; + data: number; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'CockroachIntegerBuilder'; + + constructor(name: string) { + super(name, 'number int32', 'CockroachInteger'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachInteger( + table, + this.config, + ); + } +} + +export class CockroachInteger> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachInteger'; + + getSQLType(): string { + return 'int4'; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number.parseInt(value); + } + return value; + } +} + +export function int4(name?: string) { + return new CockroachIntegerBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/interval.ts b/drizzle-orm/src/cockroach-core/columns/interval.ts new file mode 100644 index 0000000000..bf97c75774 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/interval.ts @@ -0,0 +1,77 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; +import type { Precision } from './timestamp.ts'; + +export class CockroachIntervalBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string interval'; + data: string; + driverParam: string; +}, { intervalConfig: IntervalConfig }> { + static override readonly [entityKind]: string = 'CockroachIntervalBuilder'; + + constructor( + name: string, + intervalConfig: IntervalConfig, + ) { + super(name, 'string interval', 'CockroachInterval'); + this.config.intervalConfig = intervalConfig; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachInterval( + table, + this.config, + ); + } +} + +export class CockroachInterval> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachInterval'; + + readonly fields: IntervalConfig['fields'] = this.config.intervalConfig.fields; + readonly precision: IntervalConfig['precision'] = this.config.intervalConfig.precision; + + getSQLType(): string { + const fields = this.fields ? ` ${this.fields}` : ''; + const precision = this.precision ? `(${this.precision})` : ''; + return `interval${fields}${precision}`; + } +} + +export interface IntervalConfig { + fields?: + | 'year' + | 'month' + | 'day' + | 'hour' + | 'minute' + | 'second' + | 'year to month' + | 'day to hour' + | 'day to minute' + | 'day to second' + | 'hour to minute' + | 'hour to second' + | 'minute to second'; + precision?: Precision; +} + +export function interval( + config?: IntervalConfig, +): CockroachIntervalBuilder; +export function interval( + name: string, + config?: IntervalConfig, +): CockroachIntervalBuilder; +export function interval(a?: string | IntervalConfig, b: IntervalConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachIntervalBuilder(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/columns/jsonb.ts b/drizzle-orm/src/cockroach-core/columns/jsonb.ts new file mode 100644 index 0000000000..b17bad2cd3 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/jsonb.ts @@ -0,0 +1,57 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { CockroachColumn, CockroachColumnBuilder } from './common.ts'; + +export class CockroachJsonbBuilder extends CockroachColumnBuilder<{ + dataType: 'object json'; + data: unknown; + driverParam: unknown; +}> { + static override readonly [entityKind]: string = 'CockroachJsonbBuilder'; + + constructor(name: string) { + super(name, 'object json', 'CockroachJsonb'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachJsonb( + table, + this.config, + ); + } +} + +export class CockroachJsonb> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachJsonb'; + + constructor(table: CockroachTable, config: CockroachJsonbBuilder['config']) { + super(table, config); + } + + getSQLType(): string { + return 'jsonb'; + } + + override mapToDriverValue(value: T['data']): string { + return JSON.stringify(value); + } + + override mapFromDriverValue(value: T['data'] | string): T['data'] { + if (typeof value === 'string') { + try { + return JSON.parse(value); + } catch { + return value as T['data']; + } + } + return value; + } +} + +export function jsonb(name?: string) { + return new CockroachJsonbBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/real.ts b/drizzle-orm/src/cockroach-core/columns/real.ts new file mode 100644 index 0000000000..cd1c82f83b --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/real.ts @@ -0,0 +1,53 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachRealBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'number float'; + data: number; + driverParam: string | number; + }, + { length: number | undefined } +> { + static override readonly [entityKind]: string = 'CockroachRealBuilder'; + + constructor(name: string, length?: number) { + super(name, 'number float', 'CockroachReal'); + this.config.length = length; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachReal( + table, + this.config, + ); + } +} + +export class CockroachReal> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachReal'; + + constructor(table: CockroachTable, config: CockroachRealBuilder['config']) { + super(table, config); + } + + getSQLType(): string { + return 'real'; + } + + override mapFromDriverValue = (value: string | number): number => { + if (typeof value === 'string') { + return Number.parseFloat(value); + } + return value; + }; +} + +export function real(name?: string) { + return new CockroachRealBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/smallint.ts b/drizzle-orm/src/cockroach-core/columns/smallint.ts new file mode 100644 index 0000000000..d1f93c98a1 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/smallint.ts @@ -0,0 +1,49 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { CockroachColumn } from './common.ts'; +import { CockroachIntColumnBaseBuilder } from './int.common.ts'; + +export class CockroachSmallIntBuilder extends CockroachIntColumnBaseBuilder<{ + dataType: 'number int16'; + data: number; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'CockroachSmallIntBuilder'; + + constructor(name: string) { + super(name, 'number int16', 'CockroachSmallInt'); + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachSmallInt( + table, + this.config, + ); + } +} + +export class CockroachSmallInt> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachSmallInt'; + + getSQLType(): string { + return 'int2'; + } + + override mapFromDriverValue = (value: number | string): number => { + if (typeof value === 'string') { + return Number(value); + } + return value; + }; +} + +export function smallint(name?: string) { + return new CockroachSmallIntBuilder(name ?? ''); +} +export function int2(name?: string) { + return new CockroachSmallIntBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/string.ts b/drizzle-orm/src/cockroach-core/columns/string.ts new file mode 100644 index 0000000000..4f01bda072 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/string.ts @@ -0,0 +1,97 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachStringBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + enumValues: TEnum; + driverParam: string; + }, + { enumValues: TEnum | undefined; length: number | undefined } +> { + static override readonly [entityKind]: string = 'CockroachStringBuilder'; + + constructor(name: string, config: CockroachStringConfig) { + super(name, config.enum?.length ? 'string enum' : 'string', 'CockroachString'); + this.config.enumValues = config.enum; + this.config.length = config.length; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachString( + table, + this.config, + ); + } +} + +export class CockroachString< + T extends ColumnBaseConfig<'string' | 'string enum'>, +> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachString'; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return this.length === undefined ? `string` : `string(${this.length})`; + } +} + +export interface CockroachStringConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + enum?: TEnum; + length?: number | undefined; +} + +export interface CockroachTextConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + enum?: TEnum; +} + +export function string(): CockroachStringBuilder<[string, ...string[]]>; +export function string< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + config?: CockroachStringConfig>, +): CockroachStringBuilder>; +export function string< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + name: string, + config?: CockroachStringConfig>, +): CockroachStringBuilder>; +export function string(a?: string | CockroachStringConfig, b: CockroachStringConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachStringBuilder(name, config as any); +} + +// text is alias for string but without ability to add length +export function text(): CockroachStringBuilder<[string, ...string[]]>; +export function text< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + config?: CockroachTextConfig>, +): CockroachStringBuilder>; +export function text< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + name: string, + config?: CockroachTextConfig>, +): CockroachStringBuilder>; +export function text(a?: string | CockroachStringConfig, b: CockroachStringConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachStringBuilder(name, config as any); +} diff --git a/drizzle-orm/src/cockroach-core/columns/time.ts b/drizzle-orm/src/cockroach-core/columns/time.ts new file mode 100644 index 0000000000..057902de1f --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/time.ts @@ -0,0 +1,67 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; +import type { Precision } from './timestamp.ts'; + +export class CockroachTimeBuilder extends CockroachColumnWithArrayBuilder< + { + dataType: 'string time'; + data: string; + driverParam: string; + }, + { withTimezone: boolean; precision: number | undefined } +> { + static override readonly [entityKind]: string = 'CockroachTimeBuilder'; + + constructor( + name: string, + readonly withTimezone: boolean, + readonly precision: number | undefined, + ) { + super(name, 'string time', 'CockroachTime'); + this.config.withTimezone = withTimezone; + this.config.precision = precision; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachTime( + table, + this.config, + ); + } +} + +export class CockroachTime> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachTime'; + + readonly withTimezone: boolean; + readonly precision: number | undefined; + + constructor(table: CockroachTable, config: CockroachTimeBuilder['config']) { + super(table, config); + this.withTimezone = config.withTimezone; + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `time${this.withTimezone ? 'tz' : ''}${precision}`; + } +} + +export interface TimeConfig { + precision?: Precision; + withTimezone?: boolean; +} + +export function time(config?: TimeConfig): CockroachTimeBuilder; +export function time(name: string, config?: TimeConfig): CockroachTimeBuilder; +export function time(a?: string | TimeConfig, b: TimeConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachTimeBuilder(name, config.withTimezone ?? false, config.precision); +} diff --git a/drizzle-orm/src/cockroach-core/columns/timestamp.ts b/drizzle-orm/src/cockroach-core/columns/timestamp.ts new file mode 100644 index 0000000000..4ed29a9e3a --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/timestamp.ts @@ -0,0 +1,132 @@ +import type { AnyCockroachTable, CockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn } from './common.ts'; +import { CockroachDateColumnBaseBuilder } from './date.common.ts'; + +export class CockroachTimestampBuilder extends CockroachDateColumnBaseBuilder< + { + dataType: 'object date'; + data: Date; + driverParam: string; + }, + { withTimezone: boolean; precision: number | undefined } +> { + static override readonly [entityKind]: string = 'CockroachTimestampBuilder'; + + constructor(name: string, withTimezone: boolean, precision: number | undefined) { + super(name, 'object date', 'CockroachTimestamp'); + this.config.withTimezone = withTimezone; + this.config.precision = precision; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachTimestamp( + table, + this.config, + ); + } +} + +export class CockroachTimestamp> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachTimestamp'; + + readonly withTimezone: boolean; + readonly precision: number | undefined; + + constructor(table: CockroachTable, config: CockroachTimestampBuilder['config']) { + super(table, config); + this.withTimezone = config.withTimezone; + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `timestamp${this.withTimezone ? 'tz' : ''}${precision}`; + } + + override mapFromDriverValue = (value: string): Date | null => { + return new Date(this.withTimezone ? value : value + '+0000'); + }; + + override mapToDriverValue = (value: Date): string => { + return value.toISOString(); + }; +} + +export class CockroachTimestampStringBuilder extends CockroachDateColumnBaseBuilder< + { + dataType: 'string timestamp'; + data: string; + driverParam: string; + }, + { withTimezone: boolean; precision: number | undefined } +> { + static override readonly [entityKind]: string = 'CockroachTimestampStringBuilder'; + + constructor(name: string, withTimezone: boolean, precision: number | undefined) { + super(name, 'string timestamp', 'CockroachTimestampString'); + this.config.withTimezone = withTimezone; + this.config.precision = precision; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachTimestampString( + table, + this.config, + ); + } +} + +export class CockroachTimestampString> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachTimestampString'; + + readonly withTimezone: boolean; + readonly precision: number | undefined; + + constructor( + table: CockroachTable, + config: CockroachTimestampStringBuilder['config'], + ) { + super(table, config); + this.withTimezone = config.withTimezone; + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `timestamp${this.withTimezone ? 'tz' : ''}${precision}`; + } +} + +export type Precision = 0 | 1 | 2 | 3 | 4 | 5 | 6; + +export interface CockroachTimestampConfig { + mode?: TMode; + precision?: Precision; + withTimezone?: boolean; +} + +export function timestamp( + config?: CockroachTimestampConfig, +): Equal extends true ? CockroachTimestampStringBuilder + : CockroachTimestampBuilder; +export function timestamp( + name: string, + config?: CockroachTimestampConfig, +): Equal extends true ? CockroachTimestampStringBuilder + : CockroachTimestampBuilder; +export function timestamp(a?: string | CockroachTimestampConfig, b: CockroachTimestampConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new CockroachTimestampStringBuilder(name, config.withTimezone ?? false, config.precision); + } + return new CockroachTimestampBuilder(name, config?.withTimezone ?? false, config?.precision); +} diff --git a/drizzle-orm/src/cockroach-core/columns/utils.ts b/drizzle-orm/src/cockroach-core/columns/utils.ts new file mode 100644 index 0000000000..18a48315bb --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/utils.ts @@ -0,0 +1,47 @@ +function hexToBytes(hex: string): Uint8Array { + const bytes: number[] = []; + for (let c = 0; c < hex.length; c += 2) { + bytes.push(Number.parseInt(hex.slice(c, c + 2), 16)); + } + return new Uint8Array(bytes); +} + +function bytesToFloat64(bytes: Uint8Array, offset: number): number { + const buffer = new ArrayBuffer(8); + const view = new DataView(buffer); + for (let i = 0; i < 8; i++) { + view.setUint8(i, bytes[offset + i]!); + } + return view.getFloat64(0, true); +} + +export function parseEWKB(hex: string): { srid: number | undefined; point: [number, number] } { + const bytes = hexToBytes(hex); + + let offset = 0; + + // Byte order: 1 is little-endian, 0 is big-endian + const byteOrder = bytes[offset]; + offset += 1; + + const view = new DataView(bytes.buffer); + const geomType = view.getUint32(offset, byteOrder === 1); + offset += 4; + + let srid: number | undefined; + if (geomType & 0x20000000) { // SRID flag + srid = view.getUint32(offset, byteOrder === 1); + offset += 4; + } + + if ((geomType & 0xFFFF) === 1) { + const x = bytesToFloat64(bytes, offset); + offset += 8; + const y = bytesToFloat64(bytes, offset); + offset += 8; + + return { srid, point: [x, y] }; + } + + throw new Error('Unsupported geometry type'); +} diff --git a/drizzle-orm/src/cockroach-core/columns/uuid.ts b/drizzle-orm/src/cockroach-core/columns/uuid.ts new file mode 100644 index 0000000000..c3a2db7cef --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/uuid.ts @@ -0,0 +1,46 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { sql } from '~/sql/sql.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachUUIDBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string uuid'; + data: string; + driverParam: string; +}> { + static override readonly [entityKind]: string = 'CockroachUUIDBuilder'; + + constructor(name: string) { + super(name, 'string uuid', 'CockroachUUID'); + } + + /** + * Adds `default gen_random_uuid()` to the column definition. + */ + defaultRandom(): ReturnType { + return this.default(sql`gen_random_uuid()`) as ReturnType; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachUUID( + table, + this.config, + ); + } +} + +export class CockroachUUID> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachUUID'; + + getSQLType(): string { + return 'uuid'; + } +} + +export function uuid(name?: string) { + return new CockroachUUIDBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/cockroach-core/columns/varbit.ts b/drizzle-orm/src/cockroach-core/columns/varbit.ts new file mode 100644 index 0000000000..82e0c46db7 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/varbit.ts @@ -0,0 +1,54 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachVarbitBuilder extends CockroachColumnWithArrayBuilder<{ + dataType: 'string binary'; + data: string; + driverParam: string; +}, { length: number | undefined }> { + static override readonly [entityKind]: string = 'CockroachVarbitBuilder'; + + constructor(name: string, config: CockroachVarbitConfig) { + super(name, 'string binary', 'CockroachVarbit'); + this.config.length = config.length; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachVarbit( + table, + this.config, + ); + } +} + +export class CockroachVarbit> + extends CockroachColumn +{ + static override readonly [entityKind]: string = 'CockroachVarbit'; + + getSQLType(): string { + return this.length ? `varbit(${this.length})` : 'varbit'; + } +} + +export interface CockroachVarbitConfig { + length?: number | undefined; +} + +export function varbit( + config?: CockroachVarbitConfig, +): CockroachVarbitBuilder; +export function varbit( + name: string, + config?: CockroachVarbitConfig, +): CockroachVarbitBuilder; +export function varbit(a?: string | CockroachVarbitConfig, b: CockroachVarbitConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachVarbitBuilder(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/columns/varchar.ts b/drizzle-orm/src/cockroach-core/columns/varchar.ts new file mode 100644 index 0000000000..1caf63c997 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/varchar.ts @@ -0,0 +1,72 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnWithArrayBuilder } from './common.ts'; + +export class CockroachVarcharBuilder< + TEnum extends [string, ...string[]], +> extends CockroachColumnWithArrayBuilder< + { + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + }, + { length: number | undefined; enumValues: TEnum | undefined } +> { + static override readonly [entityKind]: string = 'CockroachVarcharBuilder'; + + constructor(name: string, config: CockroachVarcharConfig) { + super(name, config.enum?.length ? 'string enum' : 'string', 'CockroachVarchar'); + this.config.length = config.length; + this.config.enumValues = config.enum; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachVarchar( + table, + this.config, + ); + } +} + +export class CockroachVarchar< + T extends ColumnBaseConfig<'string' | 'string enum'>, +> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachVarchar'; + + override readonly enumValues = this.config.enumValues; + + getSQLType(): string { + return this.length === undefined ? `varchar` : `varchar(${this.length})`; + } +} + +export interface CockroachVarcharConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> { + enum?: TEnum; + length?: number | undefined; +} + +export function varchar< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + config?: CockroachVarcharConfig>, +): CockroachVarcharBuilder>; +export function varchar< + U extends string, + T extends Readonly<[U, ...U[]]>, +>( + name: string, + config?: CockroachVarcharConfig>, +): CockroachVarcharBuilder>; +export function varchar(a?: string | CockroachVarcharConfig, b: CockroachVarcharConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachVarcharBuilder(name, config as any); +} diff --git a/drizzle-orm/src/cockroach-core/columns/vector.ts b/drizzle-orm/src/cockroach-core/columns/vector.ts new file mode 100644 index 0000000000..74f7dc6b87 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/columns/vector.ts @@ -0,0 +1,68 @@ +import type { AnyCockroachTable } from '~/cockroach-core/table.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { CockroachColumn, CockroachColumnBuilder } from './common.ts'; + +export class CockroachVectorBuilder extends CockroachColumnBuilder< + { + dataType: 'array vector'; + data: number[]; + driverParam: string; + }, + { length: number } +> { + static override readonly [entityKind]: string = 'CockroachVectorBuilder'; + + constructor(name: string, config: CockroachVectorConfig) { + super(name, 'array vector', 'CockroachVector'); + this.config.length = config.dimensions; + } + + /** @internal */ + override build( + table: AnyCockroachTable<{ name: TTableName }>, + ) { + return new CockroachVector( + table, + this.config, + ); + } +} + +export class CockroachVector< + T extends ColumnBaseConfig<'array vector'>, +> extends CockroachColumn { + static override readonly [entityKind]: string = 'CockroachVector'; + + getSQLType(): string { + return `vector(${this.config.length})`; + } + + override mapToDriverValue(value: unknown): unknown { + return JSON.stringify(value); + } + + override mapFromDriverValue(value: string): unknown { + return value + .slice(1, -1) + .split(',') + .map((v) => Number.parseFloat(v)); + } +} + +export interface CockroachVectorConfig { + dimensions: number; +} + +export function vector( + config: CockroachVectorConfig, +): CockroachVectorBuilder; +export function vector( + name: string, + config: CockroachVectorConfig, +): CockroachVectorBuilder; +export function vector(a: string | CockroachVectorConfig, b?: CockroachVectorConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new CockroachVectorBuilder(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/db.ts b/drizzle-orm/src/cockroach-core/db.ts new file mode 100644 index 0000000000..8f4db841ee --- /dev/null +++ b/drizzle-orm/src/cockroach-core/db.ts @@ -0,0 +1,700 @@ +import type * as V1 from '~/_relations.ts'; +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import { + CockroachDeleteBase, + CockroachInsertBuilder, + CockroachSelectBuilder, + CockroachUpdateBuilder, + QueryBuilder, +} from '~/cockroach-core/query-builders/index.ts'; +import type { + CockroachQueryResultHKT, + CockroachQueryResultKind, + CockroachSession, + CockroachTransaction, + CockroachTransactionConfig, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import type { CockroachTable } from '~/cockroach-core/table.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import type { DrizzleTypeError, NeonAuthToken } from '~/utils.ts'; +import type { CockroachColumn } from './columns/index.ts'; +import { CockroachCountBuilder } from './query-builders/count.ts'; +import { RelationalQueryBuilder } from './query-builders/query.ts'; +import { CockroachRaw } from './query-builders/raw.ts'; +import { CockroachRefreshMaterializedView } from './query-builders/refresh-materialized-view.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import type { WithBuilder } from './subquery.ts'; +import type { CockroachViewBase } from './view-base.ts'; +import type { CockroachMaterializedView } from './view.ts'; + +export class CockroachDatabase< + TQueryResult extends CockroachQueryResultHKT, + TFullSchema extends Record = Record, + TSchema extends V1.TablesRelationalConfig = V1.ExtractTablesWithRelations, +> { + static readonly [entityKind]: string = 'CockroachDatabase'; + + declare readonly _: { + readonly schema: TSchema | undefined; + readonly fullSchema: TFullSchema; + readonly tableNamesMap: Record; + readonly session: CockroachSession; + }; + + _query: TFullSchema extends Record + ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> + : { + [K in keyof TSchema]: RelationalQueryBuilder; + }; + + constructor( + /** @internal */ + readonly dialect: CockroachDialect, + /** @internal */ + readonly session: CockroachSession, + schema: V1.RelationalSchemaConfig | undefined, + ) { + this._ = schema + ? { + schema: schema.schema, + fullSchema: schema.fullSchema as TFullSchema, + tableNamesMap: schema.tableNamesMap, + session, + } + : { + schema: undefined, + fullSchema: {} as TFullSchema, + tableNamesMap: {}, + session, + }; + this._query = {} as typeof this['_query']; + if (this._.schema) { + for (const [tableName, columns] of Object.entries(this._.schema)) { + (this._query as CockroachDatabase>['_query'])[tableName] = + new RelationalQueryBuilder( + schema!.fullSchema, + this._.schema, + this._.tableNamesMap, + schema!.fullSchema[tableName] as CockroachTable, + columns, + dialect, + session, + ); + } + } + } + + /** + * Creates a subquery that defines a temporary named result set as a CTE. + * + * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param alias The alias for the subquery. + * + * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. + * + * @example + * + * ```ts + * // Create a subquery with alias 'sq' and use it in the select query + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * const result = await db.with(sq).select().from(sq); + * ``` + * + * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: + * + * ```ts + * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query + * const sq = db.$with('sq').as(db.select({ + * name: sql`upper(${users.name})`.as('name'), + * }) + * .from(users)); + * + * const result = await db.with(sq).select({ name: sq.name }).from(sq); + * ``` + */ + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { + const self = this; + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } + + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ); + }; + return { as }; + }; + + $count( + source: CockroachTable | CockroachViewBase | SQL | SQLWrapper, + filters?: SQL, + ) { + return new CockroachCountBuilder({ source, filters, session: this.session }); + } + + /** + * Incorporates a previously defined CTE (using `$with`) into the main query. + * + * This method allows the main query to reference a temporary named result set. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param queries The CTEs to incorporate into the main query. + * + * @example + * + * ```ts + * // Define a subquery 'sq' as a CTE using $with + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * // Incorporate the CTE 'sq' into the main query and select from it + * const result = await db.with(sq).select().from(sq); + * ``` + */ + with(...queries: WithSubquery[]) { + const self = this; + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + function select(): CockroachSelectBuilder; + function select(fields: TSelection): CockroachSelectBuilder; + function select( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + function selectDistinct(): CockroachSelectBuilder; + function selectDistinct( + fields: TSelection, + ): CockroachSelectBuilder; + function selectDistinct( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + distinct: true, + }); + } + + /** + * Adds `distinct on` expression to the select query. + * + * Calling this method will specify how the unique rows are determined. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param on The expression defining uniqueness. + * @param fields The selection object. + * + * @example + * ```ts + * // Select the first row for each unique brand from the 'cars' table + * await db.selectDistinctOn([cars.brand]) + * .from(cars) + * .orderBy(cars.brand); + * + * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table + * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) + * .from(cars) + * .orderBy(cars.brand, cars.color); + * ``` + */ + function selectDistinctOn(on: (CockroachColumn | SQLWrapper)[]): CockroachSelectBuilder; + function selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields: TSelection, + ): CockroachSelectBuilder; + function selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + distinct: { on }, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * + * // Update with returning clause + * const updatedCar: Car[] = await db.update(cars) + * .set({ color: 'red' }) + * .where(eq(cars.id, 1)) + * .returning(); + * ``` + */ + function update(table: TTable): CockroachUpdateBuilder { + return new CockroachUpdateBuilder(table, self.session, self.dialect, queries); + } + + /** + * Creates an insert query. + * + * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. + * + * See docs: {@link https://orm.drizzle.team/docs/insert} + * + * @param table The table to insert into. + * + * @example + * + * ```ts + * // Insert one row + * await db.insert(cars).values({ brand: 'BMW' }); + * + * // Insert multiple rows + * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); + * + * // Insert with returning clause + * const insertedCar: Car[] = await db.insert(cars) + * .values({ brand: 'BMW' }) + * .returning(); + * ``` + */ + function insert(table: TTable): CockroachInsertBuilder { + return new CockroachInsertBuilder(table, self.session, self.dialect, queries); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * + * // Delete with returning clause + * const deletedCar: Car[] = await db.delete(cars) + * .where(eq(cars.id, 1)) + * .returning(); + * ``` + */ + function delete_(table: TTable): CockroachDeleteBase { + return new CockroachDeleteBase(table, self.session, self.dialect, queries); + } + + return { select, selectDistinct, selectDistinctOn, update, insert, delete: delete_ }; + } + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + select(): CockroachSelectBuilder; + select(fields: TSelection): CockroachSelectBuilder; + select(fields?: TSelection): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + selectDistinct(): CockroachSelectBuilder; + selectDistinct(fields: TSelection): CockroachSelectBuilder; + selectDistinct( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + distinct: true, + }); + } + + /** + * Adds `distinct on` expression to the select query. + * + * Calling this method will specify how the unique rows are determined. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param on The expression defining uniqueness. + * @param fields The selection object. + * + * @example + * ```ts + * // Select the first row for each unique brand from the 'cars' table + * await db.selectDistinctOn([cars.brand]) + * .from(cars) + * .orderBy(cars.brand); + * + * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table + * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) + * .from(cars) + * .orderBy(cars.brand, cars.color); + * ``` + */ + selectDistinctOn(on: (CockroachColumn | SQLWrapper)[]): CockroachSelectBuilder; + selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields: TSelection, + ): CockroachSelectBuilder; + selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + distinct: { on }, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * + * // Update with returning clause + * const updatedCar: Car[] = await db.update(cars) + * .set({ color: 'red' }) + * .where(eq(cars.id, 1)) + * .returning(); + * ``` + */ + update(table: TTable): CockroachUpdateBuilder { + return new CockroachUpdateBuilder(table, this.session, this.dialect); + } + + /** + * Creates an insert query. + * + * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. + * + * See docs: {@link https://orm.drizzle.team/docs/insert} + * + * @param table The table to insert into. + * + * @example + * + * ```ts + * // Insert one row + * await db.insert(cars).values({ brand: 'BMW' }); + * + * // Insert multiple rows + * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); + * + * // Insert with returning clause + * const insertedCar: Car[] = await db.insert(cars) + * .values({ brand: 'BMW' }) + * .returning(); + * ``` + */ + insert(table: TTable): CockroachInsertBuilder { + return new CockroachInsertBuilder(table, this.session, this.dialect); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * + * // Delete with returning clause + * const deletedCar: Car[] = await db.delete(cars) + * .where(eq(cars.id, 1)) + * .returning(); + * ``` + */ + delete(table: TTable): CockroachDeleteBase { + return new CockroachDeleteBase(table, this.session, this.dialect); + } + + refreshMaterializedView( + view: TView, + ): CockroachRefreshMaterializedView { + return new CockroachRefreshMaterializedView(view, this.session, this.dialect); + } + + protected authToken?: NeonAuthToken; + + execute = Record>( + query: SQLWrapper | string, + ): CockroachRaw> { + const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); + const builtQuery = this.dialect.sqlToQuery(sequel); + const prepared = this.session.prepareQuery< + PreparedQueryConfig & { execute: CockroachQueryResultKind } + >( + builtQuery, + undefined, + undefined, + false, + ); + return new CockroachRaw( + () => prepared.execute(undefined, this.authToken), + sequel, + builtQuery, + (result) => prepared.mapResult(result, true), + ); + } + + transaction( + transaction: (tx: CockroachTransaction) => Promise, + config?: CockroachTransactionConfig, + ): Promise { + return this.session.transaction(transaction, config); + } +} + +export type CockroachWithReplicas = Q & { $primary: Q }; + +export const withReplicas = < + HKT extends CockroachQueryResultHKT, + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, + Q extends CockroachDatabase< + HKT, + TFullSchema, + TSchema extends Record ? V1.ExtractTablesWithRelations : TSchema + >, +>( + primary: Q, + replicas: [Q, ...Q[]], + getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, +): CockroachWithReplicas => { + const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); + const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const selectDistinctOn: Q['selectDistinctOn'] = (...args: [any]) => getReplica(replicas).selectDistinctOn(...args); + const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); + const _with: Q['with'] = (...args: any) => getReplica(replicas).with(...args); + const $with: Q['$with'] = (arg: any) => getReplica(replicas).$with(arg) as any; + + const update: Q['update'] = (...args: [any]) => primary.update(...args); + const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); + const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); + const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); + const transaction: Q['transaction'] = (...args: [any]) => primary.transaction(...args); + const refreshMaterializedView: Q['refreshMaterializedView'] = (...args: [any]) => + primary.refreshMaterializedView(...args); + + return { + ...primary, + update, + insert, + delete: $delete, + execute, + transaction, + refreshMaterializedView, + $primary: primary, + $replicas: replicas, + select, + selectDistinct, + selectDistinctOn, + $count, + $with, + with: _with, + get _query() { + return getReplica(replicas)._query; + }, + }; +}; diff --git a/drizzle-orm/src/cockroach-core/dialect.ts b/drizzle-orm/src/cockroach-core/dialect.ts new file mode 100644 index 0000000000..cbbe9f19d0 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/dialect.ts @@ -0,0 +1,1435 @@ +import * as V1 from '~/_relations.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; +import { CasingCache } from '~/casing.ts'; +import { CockroachColumn } from '~/cockroach-core/columns/index.ts'; +import type { + AnyCockroachSelectQueryBuilder, + CockroachDeleteConfig, + CockroachInsertConfig, + CockroachSelectJoinConfig, + CockroachUpdateConfig, +} from '~/cockroach-core/query-builders/index.ts'; +import type { CockroachSelectConfig, SelectedFieldsOrdered } from '~/cockroach-core/query-builders/select.types.ts'; +import { CockroachTable } from '~/cockroach-core/table.ts'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import { DrizzleError } from '~/errors.ts'; +import type { MigrationConfig, MigrationMeta, MigratorInitFailResponse } from '~/migrator.ts'; +import { and, eq, View } from '~/sql/index.ts'; +import { type Name, Param, type QueryWithTypings, SQL, sql, type SQLChunk } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; +import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import type { CockroachSession } from './session.ts'; +import { CockroachViewBase } from './view-base.ts'; +import type { CockroachMaterializedView } from './view.ts'; + +export interface CockroachDialectConfig { + casing?: Casing; +} + +export class CockroachDialect { + static readonly [entityKind]: string = 'CockroachDialect'; + + /** @internal */ + readonly casing: CasingCache; + + constructor(config?: CockroachDialectConfig) { + this.casing = new CasingCache(config?.casing); + } + + async migrate( + migrations: MigrationMeta[], + session: CockroachSession, + config: string | MigrationConfig, + ): Promise { + const migrationsTable = typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + const migrationsSchema = typeof config === 'string' ? 'drizzle' : config.migrationsSchema ?? 'drizzle'; + const migrationTableCreate = sql` + CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)} ( + id INT GENERATED ALWAYS AS IDENTITY, + hash text NOT NULL, + created_at bigint + ) + `; + await session.execute(sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`); + await session.execute(migrationTableCreate); + + const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( + sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } order by created_at desc limit 1`, + ); + + if (typeof config === 'object' && config.init) { + if (dbMigrations.length) { + return { exitCode: 'databaseMigrations' as const }; + } + + if (migrations.length > 1) { + return { exitCode: 'localMigrations' as const }; + } + + const [migration] = migrations; + + if (!migration) return; + + await session.execute( + sql`insert into ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, + ); + + return; + } + + const lastDbMigration = dbMigrations[0]; + await session.transaction(async (tx) => { + for await (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + for (const stmt of migration.sql) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`insert into ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, + ); + } + } + }); + } + + escapeName(name: string): string { + return `"${name}"`; + } + + escapeParam(num: number): string { + return `$${num + 1}`; + } + + escapeString(str: string): string { + return `'${str.replace(/'/g, "''")}'`; + } + + private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { + if (!queries?.length) return undefined; + + const withSqlChunks = [sql`with `]; + for (const [i, w] of queries.entries()) { + withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); + if (i < queries.length - 1) { + withSqlChunks.push(sql`, `); + } + } + withSqlChunks.push(sql` `); + return sql.join(withSqlChunks); + } + + buildDeleteQuery({ table, where, returning, withList }: CockroachDeleteConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; + } + + buildUpdateSet(table: CockroachTable, set: UpdateSet): SQL { + const tableColumns = table[Table.Symbol.Columns]; + + const columnNames = Object.keys(tableColumns).filter((colName) => + set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined + ); + + const setSize = columnNames.length; + return sql.join(columnNames.flatMap((colName, i) => { + const col = tableColumns[colName]!; + + const onUpdateFnResult = col.onUpdateFn?.(); + const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); + const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; + + if (i < setSize - 1) { + return [res, sql.raw(', ')]; + } + return [res]; + })); + } + + buildUpdateQuery({ table, set, where, returning, withList, from, joins }: CockroachUpdateConfig): SQL { + const withSql = this.buildWithCTE(withList); + + const tableName = table[CockroachTable.Symbol.Name]; + const tableSchema = table[CockroachTable.Symbol.Schema]; + const origTableName = table[CockroachTable.Symbol.OriginalName]; + const alias = tableName === origTableName ? undefined : tableName; + const tableSql = sql`${tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined}${ + sql.identifier(origTableName) + }${alias && sql` ${sql.identifier(alias)}`}`; + + const setSql = this.buildUpdateSet(table, set); + + const fromSql = from && sql.join([sql.raw(' from '), this.buildFromTable(from)]); + + const joinsSql = this.buildJoins(joins); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: !from })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`${withSql}update ${tableSql} set ${setSql}${fromSql}${joinsSql}${whereSql}${returningSql}`; + } + + /** + * Builds selection SQL with provided fields/expressions + * + * Examples: + * + * `select from` + * + * `insert ... returning ` + * + * If `isSingleTable` is true, then columns won't be prefixed with table name + */ + private buildSelection( + fields: SelectedFieldsOrdered, + { isSingleTable = false }: { isSingleTable?: boolean } = {}, + ): SQL { + const columnsLen = fields.length; + + const chunks = fields + .flatMap(({ field }, i) => { + const chunk: SQLChunk[] = []; + + if (is(field, SQL.Aliased) && field.isSelectionField) { + chunk.push(sql.identifier(field.fieldAlias)); + } else if (is(field, SQL.Aliased) || is(field, SQL)) { + const query = is(field, SQL.Aliased) ? field.sql : field; + + if (isSingleTable) { + chunk.push( + new SQL( + query.queryChunks.map((c) => { + if (is(c, CockroachColumn)) { + return sql.identifier(this.casing.getColumnCasing(c)); + } + return c; + }), + ), + ); + } else { + chunk.push(query); + } + + if (is(field, SQL.Aliased)) { + chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); + } + } else if (is(field, Column)) { + if (isSingleTable) { + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); + } else { + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); + } + } else if (is(field, Subquery)) { + const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; + + if (entries.length === 1) { + const entry = entries[0]![1]; + + const fieldDecoder = is(entry, SQL) + ? entry.decoder + : is(entry, Column) + ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } + : entry.sql.decoder; + + if (fieldDecoder) { + field._.sql.decoder = fieldDecoder; + } + } + chunk.push(field); + } + + if (i < columnsLen - 1) { + chunk.push(sql`, `); + } + + return chunk; + }); + + return sql.join(chunks); + } + + private buildJoins(joins: CockroachSelectJoinConfig[] | undefined): SQL | undefined { + if (!joins || joins.length === 0) { + return undefined; + } + + const joinsArray: SQL[] = []; + + for (const [index, joinMeta] of joins.entries()) { + if (index === 0) { + joinsArray.push(sql` `); + } + const table = joinMeta.table; + const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; + const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; + + if (is(table, CockroachTable)) { + const tableName = table[CockroachTable.Symbol.Name]; + const tableSchema = table[CockroachTable.Symbol.Schema]; + const origTableName = table[CockroachTable.Symbol.OriginalName]; + const alias = tableName === origTableName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined + }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, + ); + } else if (is(table, View)) { + const viewName = table[ViewBaseConfig].name; + const viewSchema = table[ViewBaseConfig].schema; + const origViewName = table[ViewBaseConfig].originalName; + const alias = viewName === origViewName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined + }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, + ); + } else { + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table}${onSql}`, + ); + } + if (index < joins.length - 1) { + joinsArray.push(sql` `); + } + } + + return sql.join(joinsArray); + } + + private buildFromTable( + table: SQL | Subquery | CockroachViewBase | CockroachTable | undefined, + ): SQL | Subquery | CockroachViewBase | CockroachTable | undefined { + if (is(table, Table) && table[Table.Symbol.IsAlias]) { + let fullName = sql`${sql.identifier(table[Table.Symbol.OriginalName])}`; + if (table[Table.Symbol.Schema]) { + fullName = sql`${sql.identifier(table[Table.Symbol.Schema]!)}.${fullName}`; + } + return sql`${fullName} ${sql.identifier(table[Table.Symbol.Name])}`; + } + + return table; + } + + buildSelectQuery( + { + withList, + fields, + fieldsFlat, + where, + having, + table, + joins, + orderBy, + groupBy, + limit, + offset, + lockingClause, + distinct, + setOperators, + }: CockroachSelectConfig, + ): SQL { + const fieldsList = fieldsFlat ?? orderSelectedFields(fields); + for (const f of fieldsList) { + if ( + is(f.field, Column) + && getTableName(f.field.table) + !== (is(table, Subquery) + ? table._.alias + : is(table, CockroachViewBase) + ? table[ViewBaseConfig].name + : is(table, SQL) + ? undefined + : getTableName(table)) + && !((table) => + joins?.some(({ alias }) => + alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) + ))(f.field.table) + ) { + const tableName = getTableName(f.field.table); + throw new Error( + `Your "${ + f.path.join('->') + }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, + ); + } + } + + const isSingleTable = !joins || joins.length === 0; + + const withSql = this.buildWithCTE(withList); + + let distinctSql: SQL | undefined; + if (distinct) { + distinctSql = distinct === true ? sql` distinct` : sql` distinct on (${sql.join(distinct.on, sql`, `)})`; + } + + const selection = this.buildSelection(fieldsList, { isSingleTable }); + + const tableSql = this.buildFromTable(table); + + const joinsSql = this.buildJoins(joins); + + const whereSql = where ? sql` where ${where}` : undefined; + + const havingSql = having ? sql` having ${having}` : undefined; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; + } + + let groupBySql; + if (groupBy && groupBy.length > 0) { + groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; + } + + const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + const lockingClauseSql = sql.empty(); + if (lockingClause) { + const clauseSql = sql` for ${sql.raw(lockingClause.strength)}`; + if (lockingClause.config.of) { + clauseSql.append( + sql` of ${ + sql.join( + Array.isArray(lockingClause.config.of) ? lockingClause.config.of : [lockingClause.config.of], + sql`, `, + ) + }`, + ); + } + if (lockingClause.config.noWait) { + clauseSql.append(sql` nowait`); + } else if (lockingClause.config.skipLocked) { + clauseSql.append(sql` skip locked`); + } + lockingClauseSql.append(clauseSql); + } + const finalQuery = + sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClauseSql}`; + + if (setOperators.length > 0) { + return this.buildSetOperations(finalQuery, setOperators); + } + + return finalQuery; + } + + buildSetOperations(leftSelect: SQL, setOperators: CockroachSelectConfig['setOperators']): SQL { + const [setOperator, ...rest] = setOperators; + + if (!setOperator) { + throw new Error('Cannot pass undefined values to any set operator'); + } + + if (rest.length === 0) { + return this.buildSetOperationQuery({ leftSelect, setOperator }); + } + + // Some recursive magic here + return this.buildSetOperations( + this.buildSetOperationQuery({ leftSelect, setOperator }), + rest, + ); + } + + buildSetOperationQuery({ + leftSelect, + setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, + }: { leftSelect: SQL; setOperator: CockroachSelectConfig['setOperators'][number] }): SQL { + const leftChunk = sql`(${leftSelect.getSQL()}) `; + const rightChunk = sql`(${rightSelect.getSQL()})`; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + const orderByValues: (SQL | Name)[] = []; + + // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` + // which is invalid Sql syntax, Table from one of the SELECTs cannot be used in global ORDER clause + for (const singleOrderBy of orderBy) { + if (is(singleOrderBy, CockroachColumn)) { + orderByValues.push(sql.identifier(singleOrderBy.name)); + } else if (is(singleOrderBy, SQL)) { + for (let i = 0; i < singleOrderBy.queryChunks.length; i++) { + const chunk = singleOrderBy.queryChunks[i]; + + if (is(chunk, CockroachColumn)) { + singleOrderBy.queryChunks[i] = sql.identifier(chunk.name); + } + } + + orderByValues.push(sql`${singleOrderBy}`); + } else { + orderByValues.push(sql`${singleOrderBy}`); + } + } + + orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; + } + + const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) + ? sql` limit ${limit}` + : undefined; + + const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); + + const offsetSql = offset ? sql` offset ${offset}` : undefined; + + return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; + } + + buildInsertQuery( + { table, values: valuesOrSelect, onConflict, returning, withList, select }: CockroachInsertConfig, + ): SQL { + const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; + const columns: Record = table[Table.Symbol.Columns]; + + const colEntries: [string, CockroachColumn][] = Object.entries(columns).filter(([_, col]) => + !col.shouldDisableInsert() + ); + + const insertOrder = colEntries.map( + ([, column]) => sql.identifier(this.casing.getColumnCasing(column)), + ); + + if (select) { + const select = valuesOrSelect as AnyCockroachSelectQueryBuilder | SQL; + + if (is(select, SQL)) { + valuesSqlList.push(select); + } else { + valuesSqlList.push(select.getSQL()); + } + } else { + const values = valuesOrSelect as Record[]; + valuesSqlList.push(sql.raw('values ')); + + for (const [valueIndex, value] of values.entries()) { + const valueList: (SQLChunk | SQL)[] = []; + for (const [fieldName, col] of colEntries) { + const colValue = value[fieldName]; + if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { + // eslint-disable-next-line unicorn/no-negated-condition + if (col.defaultFn !== undefined) { + const defaultFnResult = col.defaultFn(); + const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); + valueList.push(defaultValue); + // eslint-disable-next-line unicorn/no-negated-condition + } else if (!col.default && col.onUpdateFn !== undefined) { + const onUpdateFnResult = col.onUpdateFn(); + const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); + valueList.push(newValue); + } else { + valueList.push(sql`default`); + } + } else { + valueList.push(colValue); + } + } + + valuesSqlList.push(valueList); + if (valueIndex < values.length - 1) { + valuesSqlList.push(sql`, `); + } + } + } + + const withSql = this.buildWithCTE(withList); + + const valuesSql = sql.join(valuesSqlList); + + const returningSql = returning + ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` + : undefined; + + const onConflictSql = onConflict ? sql` on conflict ${onConflict}` : undefined; + + return sql`${withSql}insert into ${table} ${insertOrder} ${valuesSql}${onConflictSql}${returningSql}`; + } + + buildRefreshMaterializedViewQuery( + { view, concurrently, withNoData }: { + view: CockroachMaterializedView; + concurrently?: boolean; + withNoData?: boolean; + }, + ): SQL { + const concurrentlySql = concurrently ? sql` concurrently` : undefined; + const withNoDataSql = withNoData ? sql` with no data` : undefined; + + return sql`refresh materialized view${concurrentlySql} ${view}${withNoDataSql}`; + } + + sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { + return sql.toQuery({ + casing: this.casing, + escapeName: this.escapeName, + escapeParam: this.escapeParam, + escapeString: this.escapeString, + invokeSource, + }); + } + + // buildRelationalQueryWithPK({ + // fullSchema, + // schema, + // tableNamesMap, + // table, + // tableConfig, + // queryConfig: config, + // tableAlias, + // isRoot = false, + // joinOn, + // }: { + // fullSchema: Record; + // schema: TablesRelationalConfig; + // tableNamesMap: Record; + // table: CockroachTable; + // tableConfig: TableRelationalConfig; + // queryConfig: true | DBQueryConfig<'many', true>; + // tableAlias: string; + // isRoot?: boolean; + // joinOn?: SQL; + // }): BuildRelationalQueryResult { + // // For { "": true }, return a table with selection of all columns + // if (config === true) { + // const selectionEntries = Object.entries(tableConfig.columns); + // const selection: BuildRelationalQueryResult['selection'] = selectionEntries.map(( + // [key, value], + // ) => ({ + // dbKey: value.name, + // tsKey: key, + // field: value as CockroachColumn, + // relationTableTsKey: undefined, + // isJson: false, + // selection: [], + // })); + + // return { + // tableTsKey: tableConfig.tsName, + // sql: table, + // selection, + // }; + // } + + // // let selection: BuildRelationalQueryResult['selection'] = []; + // // let selectionForBuild = selection; + + // const aliasedColumns = Object.fromEntries( + // Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), + // ); + + // const aliasedRelations = Object.fromEntries( + // Object.entries(tableConfig.relations).map(([key, value]) => [key, aliasedRelation(value, tableAlias)]), + // ); + + // const aliasedFields = Object.assign({}, aliasedColumns, aliasedRelations); + + // let where, hasUserDefinedWhere; + // if (config.where) { + // const whereSql = typeof config.where === 'function' ? config.where(aliasedFields, operators) : config.where; + // where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); + // hasUserDefinedWhere = !!where; + // } + // where = and(joinOn, where); + + // // const fieldsSelection: { tsKey: string; value: CockroachColumn | SQL.Aliased; isExtra?: boolean }[] = []; + // let joins: Join[] = []; + // let selectedColumns: string[] = []; + + // // Figure out which columns to select + // if (config.columns) { + // let isIncludeMode = false; + + // for (const [field, value] of Object.entries(config.columns)) { + // if (value === undefined) { + // continue; + // } + + // if (field in tableConfig.columns) { + // if (!isIncludeMode && value === true) { + // isIncludeMode = true; + // } + // selectedColumns.push(field); + // } + // } + + // if (selectedColumns.length > 0) { + // selectedColumns = isIncludeMode + // ? selectedColumns.filter((c) => config.columns?.[c] === true) + // : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); + // } + // } else { + // // Select all columns if selection is not specified + // selectedColumns = Object.keys(tableConfig.columns); + // } + + // // for (const field of selectedColumns) { + // // const column = tableConfig.columns[field]! as CockroachColumn; + // // fieldsSelection.push({ tsKey: field, value: column }); + // // } + + // let initiallySelectedRelations: { + // tsKey: string; + // queryConfig: true | DBQueryConfig<'many', false>; + // relation: Relation; + // }[] = []; + + // // let selectedRelations: BuildRelationalQueryResult['selection'] = []; + + // // Figure out which relations to select + // if (config.with) { + // initiallySelectedRelations = Object.entries(config.with) + // .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) + // .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); + // } + + // const manyRelations = initiallySelectedRelations.filter((r) => + // is(r.relation, Many) + // && (schema[tableNamesMap[r.relation.referencedTable[Table.Symbol.Name]]!]?.primaryKey.length ?? 0) > 0 + // ); + // // If this is the last Many relation (or there are no Many relations), we are on the innermost subquery level + // const isInnermostQuery = manyRelations.length < 2; + + // const selectedExtras: { + // tsKey: string; + // value: SQL.Aliased; + // }[] = []; + + // // Figure out which extras to select + // if (isInnermostQuery && config.extras) { + // const extras = typeof config.extras === 'function' + // ? config.extras(aliasedFields, { sql }) + // : config.extras; + // for (const [tsKey, value] of Object.entries(extras)) { + // selectedExtras.push({ + // tsKey, + // value: mapColumnsInAliasedSQLToAlias(value, tableAlias), + // }); + // } + // } + + // // Transform `fieldsSelection` into `selection` + // // `fieldsSelection` shouldn't be used after this point + // // for (const { tsKey, value, isExtra } of fieldsSelection) { + // // selection.push({ + // // dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, + // // tsKey, + // // field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, + // // relationTableTsKey: undefined, + // // isJson: false, + // // isExtra, + // // selection: [], + // // }); + // // } + + // let orderByOrig = typeof config.orderBy === 'function' + // ? config.orderBy(aliasedFields, orderByOperators) + // : config.orderBy ?? []; + // if (!Array.isArray(orderByOrig)) { + // orderByOrig = [orderByOrig]; + // } + // const orderBy = orderByOrig.map((orderByValue) => { + // if (is(orderByValue, Column)) { + // return aliasedTableColumn(orderByValue, tableAlias) as CockroachColumn; + // } + // return mapColumnsInSQLToAlias(orderByValue, tableAlias); + // }); + + // const limit = isInnermostQuery ? config.limit : undefined; + // const offset = isInnermostQuery ? config.offset : undefined; + + // // For non-root queries without additional config except columns, return a table with selection + // if ( + // !isRoot + // && initiallySelectedRelations.length === 0 + // && selectedExtras.length === 0 + // && !where + // && orderBy.length === 0 + // && limit === undefined + // && offset === undefined + // ) { + // return { + // tableTsKey: tableConfig.tsName, + // sql: table, + // selection: selectedColumns.map((key) => ({ + // dbKey: tableConfig.columns[key]!.name, + // tsKey: key, + // field: tableConfig.columns[key] as CockroachColumn, + // relationTableTsKey: undefined, + // isJson: false, + // selection: [], + // })), + // }; + // } + + // const selectedRelationsWithoutPK: + + // // Process all relations without primary keys, because they need to be joined differently and will all be on the same query level + // for ( + // const { + // tsKey: selectedRelationTsKey, + // queryConfig: selectedRelationConfigValue, + // relation, + // } of initiallySelectedRelations + // ) { + // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); + // const relationTableName = relation.referencedTable[Table.Symbol.Name]; + // const relationTableTsName = tableNamesMap[relationTableName]!; + // const relationTable = schema[relationTableTsName]!; + + // if (relationTable.primaryKey.length > 0) { + // continue; + // } + + // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + // const joinOn = and( + // ...normalizedRelation.fields.map((field, i) => + // eq( + // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + // aliasedTableColumn(field, tableAlias), + // ) + // ), + // ); + // const builtRelation = this.buildRelationalQueryWithoutPK({ + // fullSchema, + // schema, + // tableNamesMap, + // table: fullSchema[relationTableTsName] as CockroachTable, + // tableConfig: schema[relationTableTsName]!, + // queryConfig: selectedRelationConfigValue, + // tableAlias: relationTableAlias, + // joinOn, + // nestedQueryRelation: relation, + // }); + // const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); + // joins.push({ + // on: sql`true`, + // table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), + // alias: relationTableAlias, + // joinType: 'left', + // lateral: true, + // }); + // selectedRelations.push({ + // dbKey: selectedRelationTsKey, + // tsKey: selectedRelationTsKey, + // field, + // relationTableTsKey: relationTableTsName, + // isJson: true, + // selection: builtRelation.selection, + // }); + // } + + // const oneRelations = initiallySelectedRelations.filter((r): r is typeof r & { relation: One } => + // is(r.relation, One) + // ); + + // // Process all One relations with PKs, because they can all be joined on the same level + // for ( + // const { + // tsKey: selectedRelationTsKey, + // queryConfig: selectedRelationConfigValue, + // relation, + // } of oneRelations + // ) { + // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); + // const relationTableName = relation.referencedTable[Table.Symbol.Name]; + // const relationTableTsName = tableNamesMap[relationTableName]!; + // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + // const relationTable = schema[relationTableTsName]!; + + // if (relationTable.primaryKey.length === 0) { + // continue; + // } + + // const joinOn = and( + // ...normalizedRelation.fields.map((field, i) => + // eq( + // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + // aliasedTableColumn(field, tableAlias), + // ) + // ), + // ); + // const builtRelation = this.buildRelationalQueryWithPK({ + // fullSchema, + // schema, + // tableNamesMap, + // table: fullSchema[relationTableTsName] as CockroachTable, + // tableConfig: schema[relationTableTsName]!, + // queryConfig: selectedRelationConfigValue, + // tableAlias: relationTableAlias, + // joinOn, + // }); + // const field = sql`case when ${sql.identifier(relationTableAlias)} is null then null else json_build_array(${ + // sql.join( + // builtRelation.selection.map(({ field }) => + // is(field, SQL.Aliased) + // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` + // : is(field, Column) + // ? aliasedTableColumn(field, relationTableAlias) + // : field + // ), + // sql`, `, + // ) + // }) end`.as(selectedRelationTsKey); + // const isLateralJoin = is(builtRelation.sql, SQL); + // joins.push({ + // on: isLateralJoin ? sql`true` : joinOn, + // table: is(builtRelation.sql, SQL) + // ? new Subquery(builtRelation.sql, {}, relationTableAlias) + // : aliasedTable(builtRelation.sql, relationTableAlias), + // alias: relationTableAlias, + // joinType: 'left', + // lateral: is(builtRelation.sql, SQL), + // }); + // selectedRelations.push({ + // dbKey: selectedRelationTsKey, + // tsKey: selectedRelationTsKey, + // field, + // relationTableTsKey: relationTableTsName, + // isJson: true, + // selection: builtRelation.selection, + // }); + // } + + // let distinct: CockroachSelectConfig['distinct']; + // let tableFrom: CockroachTable | Subquery = table; + + // // Process first Many relation - each one requires a nested subquery + // const manyRelation = manyRelations[0]; + // if (manyRelation) { + // const { + // tsKey: selectedRelationTsKey, + // queryConfig: selectedRelationQueryConfig, + // relation, + // } = manyRelation; + + // distinct = { + // on: tableConfig.primaryKey.map((c) => aliasedTableColumn(c as CockroachColumn, tableAlias)), + // }; + + // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); + // const relationTableName = relation.referencedTable[Table.Symbol.Name]; + // const relationTableTsName = tableNamesMap[relationTableName]!; + // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + // const joinOn = and( + // ...normalizedRelation.fields.map((field, i) => + // eq( + // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + // aliasedTableColumn(field, tableAlias), + // ) + // ), + // ); + + // const builtRelationJoin = this.buildRelationalQueryWithPK({ + // fullSchema, + // schema, + // tableNamesMap, + // table: fullSchema[relationTableTsName] as CockroachTable, + // tableConfig: schema[relationTableTsName]!, + // queryConfig: selectedRelationQueryConfig, + // tableAlias: relationTableAlias, + // joinOn, + // }); + + // const builtRelationSelectionField = sql`case when ${ + // sql.identifier(relationTableAlias) + // } is null then '[]' else json_agg(json_build_array(${ + // sql.join( + // builtRelationJoin.selection.map(({ field }) => + // is(field, SQL.Aliased) + // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` + // : is(field, Column) + // ? aliasedTableColumn(field, relationTableAlias) + // : field + // ), + // sql`, `, + // ) + // })) over (partition by ${sql.join(distinct.on, sql`, `)}) end`.as(selectedRelationTsKey); + // const isLateralJoin = is(builtRelationJoin.sql, SQL); + // joins.push({ + // on: isLateralJoin ? sql`true` : joinOn, + // table: isLateralJoin + // ? new Subquery(builtRelationJoin.sql as SQL, {}, relationTableAlias) + // : aliasedTable(builtRelationJoin.sql as CockroachTable, relationTableAlias), + // alias: relationTableAlias, + // joinType: 'left', + // lateral: isLateralJoin, + // }); + + // // Build the "from" subquery with the remaining Many relations + // const builtTableFrom = this.buildRelationalQueryWithPK({ + // fullSchema, + // schema, + // tableNamesMap, + // table, + // tableConfig, + // queryConfig: { + // ...config, + // where: undefined, + // orderBy: undefined, + // limit: undefined, + // offset: undefined, + // with: manyRelations.slice(1).reduce>( + // (result, { tsKey, queryConfig: configValue }) => { + // result[tsKey] = configValue; + // return result; + // }, + // {}, + // ), + // }, + // tableAlias, + // }); + + // selectedRelations.push({ + // dbKey: selectedRelationTsKey, + // tsKey: selectedRelationTsKey, + // field: builtRelationSelectionField, + // relationTableTsKey: relationTableTsName, + // isJson: true, + // selection: builtRelationJoin.selection, + // }); + + // // selection = builtTableFrom.selection.map((item) => + // // is(item.field, SQL.Aliased) + // // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } + // // : item + // // ); + // // selectionForBuild = [{ + // // dbKey: '*', + // // tsKey: '*', + // // field: sql`${sql.identifier(tableAlias)}.*`, + // // selection: [], + // // isJson: false, + // // relationTableTsKey: undefined, + // // }]; + // // const newSelectionItem: (typeof selection)[number] = { + // // dbKey: selectedRelationTsKey, + // // tsKey: selectedRelationTsKey, + // // field, + // // relationTableTsKey: relationTableTsName, + // // isJson: true, + // // selection: builtRelationJoin.selection, + // // }; + // // selection.push(newSelectionItem); + // // selectionForBuild.push(newSelectionItem); + + // tableFrom = is(builtTableFrom.sql, CockroachTable) + // ? builtTableFrom.sql + // : new Subquery(builtTableFrom.sql, {}, tableAlias); + // } + + // if (selectedColumns.length === 0 && selectedRelations.length === 0 && selectedExtras.length === 0) { + // throw new DrizzleError(`No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")`); + // } + + // let selection: BuildRelationalQueryResult['selection']; + + // function prepareSelectedColumns() { + // return selectedColumns.map((key) => ({ + // dbKey: tableConfig.columns[key]!.name, + // tsKey: key, + // field: tableConfig.columns[key] as CockroachColumn, + // relationTableTsKey: undefined, + // isJson: false, + // selection: [], + // })); + // } + + // function prepareSelectedExtras() { + // return selectedExtras.map((item) => ({ + // dbKey: item.value.fieldAlias, + // tsKey: item.tsKey, + // field: item.value, + // relationTableTsKey: undefined, + // isJson: false, + // selection: [], + // })); + // } + + // if (isRoot) { + // selection = [ + // ...prepareSelectedColumns(), + // ...prepareSelectedExtras(), + // ]; + // } + + // if (hasUserDefinedWhere || orderBy.length > 0) { + // tableFrom = new Subquery( + // this.buildSelectQuery({ + // table: is(tableFrom, CockroachTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, + // fields: {}, + // fieldsFlat: selectionForBuild.map(({ field }) => ({ + // path: [], + // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + // })), + // joins, + // distinct, + // }), + // {}, + // tableAlias, + // ); + // selectionForBuild = selection.map((item) => + // is(item.field, SQL.Aliased) + // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } + // : item + // ); + // joins = []; + // distinct = undefined; + // } + + // const result = this.buildSelectQuery({ + // table: is(tableFrom, CockroachTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, + // fields: {}, + // fieldsFlat: selectionForBuild.map(({ field }) => ({ + // path: [], + // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + // })), + // where, + // limit, + // offset, + // joins, + // orderBy, + // distinct, + // }); + + // return { + // tableTsKey: tableConfig.tsName, + // sql: result, + // selection, + // }; + // } + + buildRelationalQueryWithoutPK({ + fullSchema, + schema, + tableNamesMap, + table, + tableConfig, + queryConfig: config, + tableAlias, + nestedQueryRelation, + joinOn, + }: { + fullSchema: Record; + schema: V1.TablesRelationalConfig; + tableNamesMap: Record; + table: CockroachTable; + tableConfig: V1.TableRelationalConfig; + queryConfig: true | V1.DBQueryConfig<'many', true>; + tableAlias: string; + nestedQueryRelation?: V1.Relation; + joinOn?: SQL; + }): V1.BuildRelationalQueryResult { + let selection: V1.BuildRelationalQueryResult['selection'] = []; + let limit, offset, orderBy: NonNullable = [], where; + const joins: CockroachSelectJoinConfig[] = []; + + if (config === true) { + const selectionEntries = Object.entries(tableConfig.columns); + selection = selectionEntries.map(( + [key, value], + ) => ({ + dbKey: value.name, + tsKey: key, + field: aliasedTableColumn(value as CockroachColumn, tableAlias), + relationTableTsKey: undefined, + isJson: false, + selection: [], + })); + } else { + const aliasedColumns = Object.fromEntries( + Object.entries(tableConfig.columns).map(( + [key, value], + ) => [key, aliasedTableColumn(value, tableAlias)]), + ); + + if (config.where) { + const whereSql = typeof config.where === 'function' + ? config.where(aliasedColumns, V1.getOperators()) + : config.where; + where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); + } + + const fieldsSelection: { tsKey: string; value: CockroachColumn | SQL.Aliased }[] = []; + let selectedColumns: string[] = []; + + // Figure out which columns to select + if (config.columns) { + let isIncludeMode = false; + + for (const [field, value] of Object.entries(config.columns)) { + if (value === undefined) { + continue; + } + + if (field in tableConfig.columns) { + if (!isIncludeMode && value === true) { + isIncludeMode = true; + } + selectedColumns.push(field); + } + } + + if (selectedColumns.length > 0) { + selectedColumns = isIncludeMode + ? selectedColumns.filter((c) => config.columns?.[c] === true) + : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); + } + } else { + // Select all columns if selection is not specified + selectedColumns = Object.keys(tableConfig.columns); + } + + for (const field of selectedColumns) { + const column = tableConfig.columns[field]! as CockroachColumn; + fieldsSelection.push({ tsKey: field, value: column }); + } + + let selectedRelations: { + tsKey: string; + queryConfig: true | V1.DBQueryConfig<'many', false>; + relation: V1.Relation; + }[] = []; + + // Figure out which relations to select + if (config.with) { + selectedRelations = Object.entries(config.with) + .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) + .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); + } + + let extras; + + // Figure out which extras to select + if (config.extras) { + extras = typeof config.extras === 'function' + ? config.extras(aliasedColumns, { sql }) + : config.extras; + for (const [tsKey, value] of Object.entries(extras)) { + fieldsSelection.push({ + tsKey, + value: mapColumnsInAliasedSQLToAlias(value, tableAlias), + }); + } + } + + // Transform `fieldsSelection` into `selection` + // `fieldsSelection` shouldn't be used after this point + for (const { tsKey, value } of fieldsSelection) { + selection.push({ + dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, + tsKey, + field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, + relationTableTsKey: undefined, + isJson: false, + selection: [], + }); + } + + let orderByOrig = typeof config.orderBy === 'function' + ? config.orderBy(aliasedColumns, V1.getOrderByOperators()) + : config.orderBy ?? []; + if (!Array.isArray(orderByOrig)) { + orderByOrig = [orderByOrig]; + } + orderBy = orderByOrig.map((orderByValue) => { + if (is(orderByValue, Column)) { + return aliasedTableColumn(orderByValue, tableAlias) as CockroachColumn; + } + return mapColumnsInSQLToAlias(orderByValue, tableAlias); + }); + + limit = config.limit; + offset = config.offset; + + // Process all relations + for ( + const { + tsKey: selectedRelationTsKey, + queryConfig: selectedRelationConfigValue, + relation, + } of selectedRelations + ) { + const normalizedRelation = V1.normalizeRelation(schema, tableNamesMap, relation); + const relationTableName = getTableUniqueName(relation.referencedTable); + const relationTableTsName = tableNamesMap[relationTableName]!; + const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + const joinOn = and( + ...normalizedRelation.fields.map((field, i) => + eq( + aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + aliasedTableColumn(field, tableAlias), + ) + ), + ); + const builtRelation = this.buildRelationalQueryWithoutPK({ + fullSchema, + schema, + tableNamesMap, + table: fullSchema[relationTableTsName] as CockroachTable, + tableConfig: schema[relationTableTsName]!, + queryConfig: is(relation, V1.One) + ? (selectedRelationConfigValue === true + ? { limit: 1 } + : { ...selectedRelationConfigValue, limit: 1 }) + : selectedRelationConfigValue, + tableAlias: relationTableAlias, + joinOn, + nestedQueryRelation: relation, + }); + const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); + joins.push({ + on: sql`true`, + table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), + alias: relationTableAlias, + joinType: 'left', + lateral: true, + }); + selection.push({ + dbKey: selectedRelationTsKey, + tsKey: selectedRelationTsKey, + field, + relationTableTsKey: relationTableTsName, + isJson: true, + selection: builtRelation.selection, + }); + } + } + + if (selection.length === 0) { + throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); + } + + let result; + + where = and(joinOn, where); + + if (nestedQueryRelation) { + let field = sql`json_build_array(${ + sql.join( + selection.map(({ field, tsKey, isJson }) => + isJson + ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` + : is(field, SQL.Aliased) + ? field.sql + : field + ), + sql`, `, + ) + })`; + if (is(nestedQueryRelation, V1.Many)) { + field = sql`coalesce(json_agg(${field}${ + orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined + }), '[]'::json)`; + // orderBy = []; + } + const nestedSelection = [{ + dbKey: 'data', + tsKey: 'data', + field: field.as('data'), + isJson: true, + relationTableTsKey: tableConfig.tsName, + selection, + }]; + + const needsSubquery = limit !== undefined || offset !== undefined || orderBy.length > 0; + + if (needsSubquery) { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: [{ + path: [], + field: sql.raw('*'), + }], + where, + limit, + offset, + orderBy, + setOperators: [], + }); + + where = undefined; + limit = undefined; + offset = undefined; + orderBy = []; + } else { + result = aliasedTable(table, tableAlias); + } + + result = this.buildSelectQuery({ + table: is(result, CockroachTable) ? result : new Subquery(result, {}, tableAlias), + fields: {}, + fieldsFlat: nestedSelection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } else { + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: selection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + joins, + where, + limit, + offset, + orderBy, + setOperators: [], + }); + } + + return { + tableTsKey: tableConfig.tsName, + sql: result, + selection, + }; + } +} diff --git a/drizzle-orm/src/cockroach-core/expressions.ts b/drizzle-orm/src/cockroach-core/expressions.ts new file mode 100644 index 0000000000..3e3781eb48 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/expressions.ts @@ -0,0 +1,25 @@ +import type { CockroachColumn } from '~/cockroach-core/columns/index.ts'; +import { bindIfParam } from '~/sql/expressions/index.ts'; +import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; +import { sql } from '~/sql/sql.ts'; + +export * from '~/sql/expressions/index.ts'; + +export function concat(column: CockroachColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { + return sql`${column} || ${bindIfParam(value, column)}`; +} + +export function substring( + column: CockroachColumn | SQL.Aliased, + { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, +): SQL { + const chunks: SQLChunk[] = [sql`substring(`, column]; + if (from !== undefined) { + chunks.push(sql` from `, bindIfParam(from, column)); + } + if (_for !== undefined) { + chunks.push(sql` for `, bindIfParam(_for, column)); + } + chunks.push(sql`)`); + return sql.join(chunks); +} diff --git a/drizzle-orm/src/cockroach-core/foreign-keys.ts b/drizzle-orm/src/cockroach-core/foreign-keys.ts new file mode 100644 index 0000000000..2d339da5fe --- /dev/null +++ b/drizzle-orm/src/cockroach-core/foreign-keys.ts @@ -0,0 +1,119 @@ +import { entityKind } from '~/entity.ts'; +import type { AnyCockroachColumn, CockroachColumn } from './columns/index.ts'; +import type { CockroachTable } from './table.ts'; + +export type UpdateDeleteAction = 'cascade' | 'restrict' | 'no action' | 'set null' | 'set default'; + +export type Reference = () => { + readonly name?: string; + readonly columns: CockroachColumn[]; + readonly foreignTable: CockroachTable; + readonly foreignColumns: CockroachColumn[]; +}; + +export class ForeignKeyBuilder { + static readonly [entityKind]: string = 'CockroachForeignKeyBuilder'; + + /** @internal */ + reference: Reference; + + /** @internal */ + _onUpdate: UpdateDeleteAction | undefined = 'no action'; + + /** @internal */ + _onDelete: UpdateDeleteAction | undefined = 'no action'; + + constructor( + config: () => { + name?: string; + columns: CockroachColumn[]; + foreignColumns: CockroachColumn[]; + }, + actions?: { + onUpdate?: UpdateDeleteAction; + onDelete?: UpdateDeleteAction; + } | undefined, + ) { + this.reference = () => { + const { name, columns, foreignColumns } = config(); + return { name, columns, foreignTable: foreignColumns[0]!.table as CockroachTable, foreignColumns }; + }; + if (actions) { + this._onUpdate = actions.onUpdate; + this._onDelete = actions.onDelete; + } + } + + onUpdate(action: UpdateDeleteAction): this { + this._onUpdate = action === undefined ? 'no action' : action; + return this; + } + + onDelete(action: UpdateDeleteAction): this { + this._onDelete = action === undefined ? 'no action' : action; + return this; + } + + /** @internal */ + build(table: CockroachTable): ForeignKey { + return new ForeignKey(table, this); + } +} + +export type AnyForeignKeyBuilder = ForeignKeyBuilder; + +export class ForeignKey { + static readonly [entityKind]: string = 'CockroachForeignKey'; + + readonly reference: Reference; + readonly onUpdate: UpdateDeleteAction | undefined; + readonly onDelete: UpdateDeleteAction | undefined; + readonly name?: string; + + constructor(readonly table: CockroachTable, builder: ForeignKeyBuilder) { + this.reference = builder.reference; + this.onUpdate = builder._onUpdate; + this.onDelete = builder._onDelete; + } + + getName(): string | undefined { + const { name } = this.reference(); + + return name; + } + + isNameExplicit() { + return !!this.reference().name; + } +} + +type ColumnsWithTable< + TTableName extends string, + TColumns extends CockroachColumn[], +> = { [Key in keyof TColumns]: AnyCockroachColumn<{ tableName: TTableName }> }; + +export function foreignKey< + TTableName extends string, + TForeignTableName extends string, + TColumns extends [ + AnyCockroachColumn<{ tableName: TTableName }>, + ...AnyCockroachColumn<{ tableName: TTableName }>[], + ], +>( + config: { + name?: string; + columns: TColumns; + foreignColumns: ColumnsWithTable; + }, +): ForeignKeyBuilder { + function mappedConfig() { + const { name, columns, foreignColumns } = config; + return { + name, + columns, + foreignColumns, + }; + } + + return new ForeignKeyBuilder(mappedConfig); +} diff --git a/drizzle-orm/src/cockroach-core/index.ts b/drizzle-orm/src/cockroach-core/index.ts new file mode 100644 index 0000000000..28542c0f7f --- /dev/null +++ b/drizzle-orm/src/cockroach-core/index.ts @@ -0,0 +1,20 @@ +export * from './alias.ts'; +export * from './checks.ts'; +export * from './columns/index.ts'; +export * from './db.ts'; +export * from './dialect.ts'; +export * from './foreign-keys.ts'; +export * from './indexes.ts'; +export * from './policies.ts'; +export * from './primary-keys.ts'; +export * from './query-builders/index.ts'; +export * from './roles.ts'; +export * from './schema.ts'; +export * from './sequence.ts'; +export * from './session.ts'; +export * from './subquery.ts'; +export * from './table.ts'; +export * from './unique-constraint.ts'; +export * from './utils.ts'; +export * from './utils/index.ts'; +export * from './view.ts'; diff --git a/drizzle-orm/src/cockroach-core/indexes.ts b/drizzle-orm/src/cockroach-core/indexes.ts new file mode 100644 index 0000000000..ba95910f64 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/indexes.ts @@ -0,0 +1,172 @@ +import { SQL } from '~/sql/sql.ts'; + +import { entityKind, is } from '~/entity.ts'; +import type { CockroachColumn, ExtraConfigColumn } from './columns/index.ts'; +import { IndexedColumn } from './columns/index.ts'; +import type { CockroachTable } from './table.ts'; + +interface IndexConfig { + name?: string; + + columns: Partial[]; + + /** + * If true, the index will be created as `create unique index` instead of `create index`. + */ + unique: boolean; + + /** + * If true, the index will be created as `create index ... on only
` instead of `create index ... on
`. + */ + only: boolean; + + /** + * Condition for partial index. + */ + where?: SQL; + + /** + * The optional USING clause method for the index + */ + method?: 'btree' | string; +} + +export type IndexColumn = CockroachColumn; + +export type CockroachIndexMethod = + | 'btree' + | 'hash' + | 'gin' + | 'cspann'; + +export class IndexBuilderOn { + static readonly [entityKind]: string = 'CockroachIndexBuilderOn'; + + constructor(private unique: boolean, private name?: string) {} + + on(...columns: [Partial | SQL, ...Partial[]]): IndexBuilder { + return new IndexBuilder( + columns.map((it) => { + if (is(it, SQL)) { + return it; + } + it = it as ExtraConfigColumn; + const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); + it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); + return clonedIndexedColumn; + }), + this.unique, + false, + this.name, + ); + } + + onOnly(...columns: [Partial, ...Partial[]]): IndexBuilder { + return new IndexBuilder( + columns.map((it) => { + if (is(it, SQL)) { + return it; + } + it = it as ExtraConfigColumn; + const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); + it.indexConfig = it.defaultConfig; + return clonedIndexedColumn; + }), + this.unique, + true, + this.name, + ); + } + + /** + * Specify what index method to use. Choices are `btree`, `hash`, `gin`, `cspann`. The default method is `btree`. + * + * @param method The name of the index method to be used + * @param columns + * @returns + */ + using( + method: CockroachIndexMethod, + ...columns: [Partial, ...Partial[]] + ): IndexBuilder { + return new IndexBuilder( + columns.map((it) => { + if (is(it, SQL)) { + return it; + } + it = it as ExtraConfigColumn; + const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); + it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); + return clonedIndexedColumn; + }), + this.unique, + true, + this.name, + method, + ); + } +} + +export interface AnyIndexBuilder { + build(table: CockroachTable): Index; +} + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IndexBuilder extends AnyIndexBuilder {} + +export class IndexBuilder implements AnyIndexBuilder { + static readonly [entityKind]: string = 'CockroachIndexBuilder'; + + /** @internal */ + config: IndexConfig; + + constructor( + columns: Partial[], + unique: boolean, + only: boolean, + name?: string, + method: string = 'btree', + ) { + this.config = { + name, + columns, + unique, + only, + method, + }; + } + + where(condition: SQL): this { + this.config.where = condition; + return this; + } + + /** @internal */ + build(table: CockroachTable): Index { + return new Index(this.config, table); + } +} + +export class Index { + static readonly [entityKind]: string = 'CockroachIndex'; + + readonly config: IndexConfig & { table: CockroachTable }; + readonly isNameExplicit: boolean; + + constructor(config: IndexConfig, table: CockroachTable) { + this.config = { ...config, table }; + this.isNameExplicit = !!config.name; + } +} + +export type GetColumnsTableName = TColumns extends CockroachColumn ? TColumns['_']['name'] + : TColumns extends CockroachColumn[] ? TColumns[number]['_']['name'] + : never; + +export function index(name?: string): IndexBuilderOn { + return new IndexBuilderOn(false, name); +} + +export function uniqueIndex(name?: string): IndexBuilderOn { + return new IndexBuilderOn(true, name); +} diff --git a/drizzle-orm/src/cockroach-core/policies.ts b/drizzle-orm/src/cockroach-core/policies.ts new file mode 100644 index 0000000000..f2c076c598 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/policies.ts @@ -0,0 +1,55 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { CockroachRole } from './roles.ts'; +import type { CockroachTable } from './table.ts'; + +export type CockroachPolicyToOption = + | 'public' + | 'current_user' + | 'session_user' + | (string & {}) + | CockroachPolicyToOption[] + | CockroachRole; + +export interface CockroachPolicyConfig { + as?: 'permissive' | 'restrictive'; + for?: 'all' | 'select' | 'insert' | 'update' | 'delete'; + to?: CockroachPolicyToOption; + using?: SQL; + withCheck?: SQL; +} + +export class CockroachPolicy implements CockroachPolicyConfig { + static readonly [entityKind]: string = 'CockroachPolicy'; + + readonly as: CockroachPolicyConfig['as']; + readonly for: CockroachPolicyConfig['for']; + readonly to: CockroachPolicyConfig['to']; + readonly using: CockroachPolicyConfig['using']; + readonly withCheck: CockroachPolicyConfig['withCheck']; + + /** @internal */ + _linkedTable?: CockroachTable; + + constructor( + readonly name: string, + config?: CockroachPolicyConfig, + ) { + if (config) { + this.as = config.as; + this.for = config.for; + this.to = config.to; + this.using = config.using; + this.withCheck = config.withCheck; + } + } + + link(table: CockroachTable): this { + this._linkedTable = table; + return this; + } +} + +export function cockroachPolicy(name: string, config?: CockroachPolicyConfig) { + return new CockroachPolicy(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/primary-keys.ts b/drizzle-orm/src/cockroach-core/primary-keys.ts new file mode 100644 index 0000000000..e89352fbd2 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/primary-keys.ts @@ -0,0 +1,52 @@ +import { entityKind } from '~/entity.ts'; +import type { AnyCockroachColumn, CockroachColumn } from './columns/index.ts'; +import type { CockroachTable } from './table.ts'; + +export function primaryKey< + TTableName extends string, + TColumn extends AnyCockroachColumn<{ tableName: TTableName }>, + TColumns extends AnyCockroachColumn<{ tableName: TTableName }>[], +>(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder { + return new PrimaryKeyBuilder(config.columns, config.name); +} + +export class PrimaryKeyBuilder { + static readonly [entityKind]: string = 'CockroachPrimaryKeyBuilder'; + + /** @internal */ + columns: CockroachColumn[]; + + /** @internal */ + name?: string; + + constructor( + columns: CockroachColumn[], + name?: string, + ) { + this.columns = columns; + this.name = name; + } + + /** @internal */ + build(table: CockroachTable): PrimaryKey { + return new PrimaryKey(table, this.columns, this.name); + } +} + +export class PrimaryKey { + static readonly [entityKind]: string = 'CockroachPrimaryKey'; + + readonly columns: AnyCockroachColumn<{}>[]; + readonly name?: string; + readonly isNameExplicit: boolean; + + constructor(readonly table: CockroachTable, columns: AnyCockroachColumn<{}>[], name?: string) { + this.columns = columns; + this.name = name; + this.isNameExplicit = !!name; + } + + getName(): string | undefined { + return this.name; + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/count.ts b/drizzle-orm/src/cockroach-core/query-builders/count.ts new file mode 100644 index 0000000000..640bc96c54 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/count.ts @@ -0,0 +1,87 @@ +import { entityKind } from '~/entity.ts'; +import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; +import type { NeonAuthToken } from '~/utils.ts'; +import type { CockroachSession } from '../session.ts'; +import type { CockroachTable } from '../table.ts'; +import type { CockroachViewBase } from '../view-base.ts'; + +export class CockroachCountBuilder< + TSession extends CockroachSession, +> extends SQL implements Promise, SQLWrapper { + private sql: SQL; + private token?: NeonAuthToken; + + static override readonly [entityKind]: string = 'CockroachCountBuilder'; + [Symbol.toStringTag] = 'CockroachCountBuilder'; + + private session: TSession; + + private static buildEmbeddedCount( + source: CockroachTable | CockroachViewBase | SQL | SQLWrapper, + filters?: SQL, + ): SQL { + return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; + } + + private static buildCount( + source: CockroachTable | CockroachViewBase | SQL | SQLWrapper, + filters?: SQL, + ): SQL { + return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters};`; + } + + constructor( + readonly params: { + source: CockroachTable | CockroachViewBase | SQL | SQLWrapper; + filters?: SQL; + session: TSession; + }, + ) { + super(CockroachCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); + + this.mapWith(Number); + + this.session = params.session; + + this.sql = CockroachCountBuilder.buildCount( + params.source, + params.filters, + ); + } + + /** @intrnal */ + setToken(token?: NeonAuthToken) { + this.token = token; + return this; + } + + then( + onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, + onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, + ): Promise { + return Promise.resolve(this.session.count(this.sql, this.token)) + .then( + onfulfilled, + onrejected, + ); + } + + catch( + onRejected?: ((reason: any) => any) | null | undefined, + ): Promise { + return this.then(undefined, onRejected); + } + + finally(onFinally?: (() => void) | null | undefined): Promise { + return this.then( + (value) => { + onFinally?.(); + return value; + }, + (reason) => { + onFinally?.(); + throw reason; + }, + ); + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/delete.ts b/drizzle-orm/src/cockroach-core/query-builders/delete.ts new file mode 100644 index 0000000000..d1cddcb0d9 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/delete.ts @@ -0,0 +1,293 @@ +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import type { + CockroachPreparedQuery, + CockroachQueryResultHKT, + CockroachQueryResultKind, + CockroachSession, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import type { CockroachTable } from '~/cockroach-core/table.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { SelectResultFields } from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import { getTableName, Table } from '~/table.ts'; +import { tracer } from '~/tracing.ts'; +import { type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; +import type { CockroachColumn } from '../columns/common.ts'; +import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; + +export type CockroachDeleteWithout< + T extends AnyCockroachDeleteBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + CockroachDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['selectedFields'], + T['_']['returning'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type CockroachDelete< + TTable extends CockroachTable = CockroachTable, + TQueryResult extends CockroachQueryResultHKT = CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = Record | undefined, +> = CockroachDeleteBase; + +export interface CockroachDeleteConfig { + where?: SQL | undefined; + table: CockroachTable; + returningFields?: SelectedFieldsFlat; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type CockroachDeleteReturningAll< + T extends AnyCockroachDeleteBase, + TDynamic extends boolean, +> = CockroachDeleteWithout< + CockroachDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['table']['_']['columns'], + T['_']['table']['$inferSelect'], + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'returning' +>; + +export type CockroachDeleteReturning< + T extends AnyCockroachDeleteBase, + TDynamic extends boolean, + TSelectedFields extends SelectedFieldsFlat, +> = CockroachDeleteWithout< + CockroachDeleteBase< + T['_']['table'], + T['_']['queryResult'], + TSelectedFields, + SelectResultFields, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'returning' +>; + +export type CockroachDeletePrepare = CockroachPreparedQuery< + PreparedQueryConfig & { + execute: T['_']['returning'] extends undefined ? CockroachQueryResultKind + : T['_']['returning'][]; + } +>; + +export type CockroachDeleteDynamic = CockroachDelete< + T['_']['table'], + T['_']['queryResult'], + T['_']['selectedFields'], + T['_']['returning'] +>; + +export type AnyCockroachDeleteBase = CockroachDeleteBase; + +export interface CockroachDeleteBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? CockroachQueryResultKind : TReturning[] + >, + QueryPromise : TReturning[]>, + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + readonly _: { + readonly dialect: 'cockroach'; + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly selectedFields: TSelectedFields; + readonly returning: TReturning; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TReturning extends undefined ? CockroachQueryResultKind : TReturning[]; + }; +} + +export class CockroachDeleteBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TReturning[]> + implements + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? CockroachQueryResultKind : TReturning[] + >, + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + static override readonly [entityKind]: string = 'CockroachDelete'; + + private config: CockroachDeleteConfig; + + constructor( + table: TTable, + private session: CockroachSession, + private dialect: CockroachDialect, + withList?: Subquery[], + ) { + super(); + this.config = { table, withList }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Delete all cars with green color + * await db.delete(cars).where(eq(cars.color, 'green')); + * // or + * await db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Delete all BMW cars with a green color + * await db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Delete all cars with the green or blue color + * await db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): CockroachDeleteWithout { + this.config.where = where; + return this as any; + } + + /** + * Adds a `returning` clause to the query. + * + * Calling this method will return the specified fields of the deleted rows. If no fields are specified, all fields will be returned. + * + * See docs: {@link https://orm.drizzle.team/docs/delete#delete-with-return} + * + * @example + * ```ts + * // Delete all cars with the green color and return all fields + * const deletedCars: Car[] = await db.delete(cars) + * .where(eq(cars.color, 'green')) + * .returning(); + * + * // Delete all cars with the green color and return only their id and brand fields + * const deletedCarsIdsAndBrands: { id: number, brand: string }[] = await db.delete(cars) + * .where(eq(cars.color, 'green')) + * .returning({ id: cars.id, brand: cars.brand }); + * ``` + */ + returning(): CockroachDeleteReturningAll; + returning( + fields: TSelectedFields, + ): CockroachDeleteReturning; + returning( + fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], + ): CockroachDeleteReturning | CockroachDeleteReturningAll { + this.config.returningFields = fields; + this.config.returning = orderSelectedFields(fields); + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDeleteQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + /** @internal */ + _prepare(name?: string): CockroachDeletePrepare { + return tracer.startActiveSpan('drizzle.prepareQuery', () => { + return this.session.prepareQuery< + PreparedQueryConfig & { + execute: TReturning extends undefined ? CockroachQueryResultKind : TReturning[]; + } + >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true); + }); + } + + prepare(name: string): CockroachDeletePrepare { + return this._prepare(name); + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return tracer.startActiveSpan('drizzle.operation', () => { + return this._prepare().execute(placeholderValues, this.authToken); + }); + }; + + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + + $dynamic(): CockroachDeleteDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/index.ts b/drizzle-orm/src/cockroach-core/query-builders/index.ts new file mode 100644 index 0000000000..c4821e51d5 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/index.ts @@ -0,0 +1,7 @@ +export * from './delete.ts'; +export * from './insert.ts'; +export * from './query-builder.ts'; +export * from './refresh-materialized-view.ts'; +export * from './select.ts'; +export * from './select.types.ts'; +export * from './update.ts'; diff --git a/drizzle-orm/src/cockroach-core/query-builders/insert.ts b/drizzle-orm/src/cockroach-core/query-builders/insert.ts new file mode 100644 index 0000000000..4b20ed71d7 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/insert.ts @@ -0,0 +1,447 @@ +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import type { IndexColumn } from '~/cockroach-core/indexes.ts'; +import type { + CockroachPreparedQuery, + CockroachQueryResultHKT, + CockroachQueryResultKind, + CockroachSession, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import type { CockroachTable, TableConfig } from '~/cockroach-core/table.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { SelectResultFields } from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { Param, SQL, sql } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { InferInsertModel } from '~/table.ts'; +import { getTableName, Table, TableColumns } from '~/table.ts'; +import { tracer } from '~/tracing.ts'; +import { haveSameKeys, mapUpdateSet, type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; +import type { AnyCockroachColumn, CockroachColumn } from '../columns/common.ts'; +import { QueryBuilder } from './query-builder.ts'; +import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; +import type { CockroachUpdateSetSource } from './update.ts'; + +export interface CockroachInsertConfig { + table: TTable; + values: Record[] | CockroachInsertSelectQueryBuilder | SQL; + withList?: Subquery[]; + onConflict?: SQL; + returningFields?: SelectedFieldsFlat; + returning?: SelectedFieldsOrdered; + select?: boolean; +} + +export type CockroachInsertValue< + TTable extends CockroachTable, + OverrideT extends boolean = false, + TModel extends Record = InferInsertModel, +> = + & { + [Key in keyof TModel]: + | TModel[Key] + | SQL + | Placeholder; + } + & {}; + +export type CockroachInsertSelectQueryBuilder< + TTable extends CockroachTable, + TModel extends Record = InferInsertModel, +> = TypedQueryBuilder< + { [K in keyof TModel]: AnyCockroachColumn | SQL | SQL.Aliased | TModel[K] } +>; + +export class CockroachInsertBuilder< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + OverrideT extends boolean = false, +> { + static readonly [entityKind]: string = 'CockroachInsertBuilder'; + + constructor( + private table: TTable, + private session: CockroachSession, + private dialect: CockroachDialect, + private withList?: Subquery[], + ) {} + + values(value: CockroachInsertValue): CockroachInsertBase; + values(values: CockroachInsertValue[]): CockroachInsertBase; + values( + values: CockroachInsertValue | CockroachInsertValue[], + ): CockroachInsertBase { + values = Array.isArray(values) ? values : [values]; + if (values.length === 0) { + throw new Error('values() must be called with at least one value'); + } + const mappedValues = values.map((entry) => { + const result: Record = {}; + const cols = this.table[Table.Symbol.Columns]; + for (const colKey of Object.keys(entry)) { + const colValue = entry[colKey as keyof typeof entry]; + result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); + } + return result; + }); + + return new CockroachInsertBase( + this.table, + mappedValues, + this.session, + this.dialect, + this.withList, + false, + ) as any; + } + + select( + selectQuery: (qb: QueryBuilder) => CockroachInsertSelectQueryBuilder, + ): CockroachInsertBase; + select(selectQuery: (qb: QueryBuilder) => SQL): CockroachInsertBase; + select(selectQuery: SQL): CockroachInsertBase; + select(selectQuery: CockroachInsertSelectQueryBuilder): CockroachInsertBase; + select( + selectQuery: + | SQL + | CockroachInsertSelectQueryBuilder + | ((qb: QueryBuilder) => CockroachInsertSelectQueryBuilder | SQL), + ): CockroachInsertBase { + const select = typeof selectQuery === 'function' ? selectQuery(new QueryBuilder()) : selectQuery; + + if ( + !is(select, SQL) + && !haveSameKeys(this.table[TableColumns], select._.selectedFields) + ) { + throw new Error( + 'Insert select error: selected fields are not the same or are in a different order compared to the table definition', + ); + } + + return new CockroachInsertBase(this.table, select, this.session, this.dialect, this.withList, true); + } +} + +export type CockroachInsertWithout< + T extends AnyCockroachInsert, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + CockroachInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['selectedFields'], + T['_']['returning'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type CockroachInsertReturning< + T extends AnyCockroachInsert, + TDynamic extends boolean, + TSelectedFields extends SelectedFieldsFlat, +> = CockroachInsertBase< + T['_']['table'], + T['_']['queryResult'], + TSelectedFields, + SelectResultFields, + TDynamic, + T['_']['excludedMethods'] +>; + +export type CockroachInsertReturningAll = CockroachInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['table']['_']['columns'], + T['_']['table']['$inferSelect'], + TDynamic, + T['_']['excludedMethods'] +>; + +export interface CockroachInsertOnConflictDoUpdateConfig { + target: IndexColumn | IndexColumn[]; + /** @deprecated use either `targetWhere` or `setWhere` */ + where?: SQL; + // TODO: add tests for targetWhere and setWhere + targetWhere?: SQL; + setWhere?: SQL; + set: CockroachUpdateSetSource; +} + +export type CockroachInsertPrepare = CockroachPreparedQuery< + PreparedQueryConfig & { + execute: T['_']['returning'] extends undefined ? CockroachQueryResultKind + : T['_']['returning'][]; + } +>; + +export type CockroachInsertDynamic = CockroachInsert< + T['_']['table'], + T['_']['queryResult'], + T['_']['returning'] +>; + +export type AnyCockroachInsert = CockroachInsertBase; + +export type CockroachInsert< + TTable extends CockroachTable = CockroachTable, + TQueryResult extends CockroachQueryResultHKT = CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = ColumnsSelection | undefined, + TReturning extends Record | undefined = Record | undefined, +> = CockroachInsertBase; + +export interface CockroachInsertBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? CockroachQueryResultKind : TReturning[] + >, + QueryPromise : TReturning[]>, + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + readonly _: { + readonly dialect: 'cockroach'; + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly selectedFields: TSelectedFields; + readonly returning: TReturning; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TReturning extends undefined ? CockroachQueryResultKind : TReturning[]; + }; +} + +export class CockroachInsertBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TReturning[]> + implements + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? CockroachQueryResultKind : TReturning[] + >, + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + static override readonly [entityKind]: string = 'CockroachInsert'; + + private config: CockroachInsertConfig; + + constructor( + table: TTable, + values: CockroachInsertConfig['values'], + private session: CockroachSession, + private dialect: CockroachDialect, + withList?: Subquery[], + select?: boolean, + ) { + super(); + this.config = { table, values: values as any, withList, select }; + } + + /** + * Adds a `returning` clause to the query. + * + * Calling this method will return the specified fields of the inserted rows. If no fields are specified, all fields will be returned. + * + * See docs: {@link https://orm.drizzle.team/docs/insert#insert-returning} + * + * @example + * ```ts + * // Insert one row and return all fields + * const insertedCar: Car[] = await db.insert(cars) + * .values({ brand: 'BMW' }) + * .returning(); + * + * // Insert one row and return only the id + * const insertedCarId: { id: number }[] = await db.insert(cars) + * .values({ brand: 'BMW' }) + * .returning({ id: cars.id }); + * ``` + */ + returning(): CockroachInsertWithout, TDynamic, 'returning'>; + returning( + fields: TSelectedFields, + ): CockroachInsertWithout, TDynamic, 'returning'>; + returning( + fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], + ): CockroachInsertWithout { + this.config.returningFields = fields; + this.config.returning = orderSelectedFields(fields); + return this as any; + } + + /** + * Adds an `on conflict do nothing` clause to the query. + * + * Calling this method simply avoids inserting a row as its alternative action. + * + * See docs: {@link https://orm.drizzle.team/docs/insert#on-conflict-do-nothing} + * + * @param config The `target` and `where` clauses. + * + * @example + * ```ts + * // Insert one row and cancel the insert if there's a conflict + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onConflictDoNothing(); + * + * // Explicitly specify conflict target + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onConflictDoNothing({ target: cars.id }); + * ``` + */ + onConflictDoNothing( + config: { target?: IndexColumn | IndexColumn[]; where?: SQL } = {}, + ): CockroachInsertWithout { + if (config.target === undefined) { + this.config.onConflict = sql`do nothing`; + } else { + let targetColumn = ''; + targetColumn = Array.isArray(config.target) + ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') + : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); + + const whereSql = config.where ? sql` where ${config.where}` : undefined; + this.config.onConflict = sql`(${sql.raw(targetColumn)})${whereSql} do nothing`; + } + return this as any; + } + + /** + * Adds an `on conflict do update` clause to the query. + * + * Calling this method will update the existing row that conflicts with the row proposed for insertion as its alternative action. + * + * See docs: {@link https://orm.drizzle.team/docs/insert#upserts-and-conflicts} + * + * @param config The `target`, `set` and `where` clauses. + * + * @example + * ```ts + * // Update the row if there's a conflict + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onConflictDoUpdate({ + * target: cars.id, + * set: { brand: 'Porsche' } + * }); + * + * // Upsert with 'where' clause + * await db.insert(cars) + * .values({ id: 1, brand: 'BMW' }) + * .onConflictDoUpdate({ + * target: cars.id, + * set: { brand: 'newBMW' }, + * targetWhere: sql`${cars.createdAt} > '2023-01-01'::date`, + * }); + * ``` + */ + onConflictDoUpdate( + config: CockroachInsertOnConflictDoUpdateConfig, + ): CockroachInsertWithout { + if (config.where && (config.targetWhere || config.setWhere)) { + throw new Error( + 'You cannot use both "where" and "targetWhere"/"setWhere" at the same time - "where" is deprecated, use "targetWhere" or "setWhere" instead.', + ); + } + const whereSql = config.where ? sql` where ${config.where}` : undefined; + const targetWhereSql = config.targetWhere ? sql` where ${config.targetWhere}` : undefined; + const setWhereSql = config.setWhere ? sql` where ${config.setWhere}` : undefined; + const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); + let targetColumn = ''; + targetColumn = Array.isArray(config.target) + ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') + : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); + this.config.onConflict = sql`(${ + sql.raw(targetColumn) + })${targetWhereSql} do update set ${setSql}${whereSql}${setWhereSql}`; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildInsertQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + /** @internal */ + _prepare(name?: string): CockroachInsertPrepare { + return tracer.startActiveSpan('drizzle.prepareQuery', () => { + return this.session.prepareQuery< + PreparedQueryConfig & { + execute: TReturning extends undefined ? CockroachQueryResultKind : TReturning[]; + } + >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true); + }); + } + + prepare(name: string): CockroachInsertPrepare { + return this._prepare(name); + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return tracer.startActiveSpan('drizzle.operation', () => { + return this._prepare().execute(placeholderValues, this.authToken); + }); + }; + + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + + $dynamic(): CockroachInsertDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/query-builder.ts b/drizzle-orm/src/cockroach-core/query-builders/query-builder.ts new file mode 100644 index 0000000000..12b00d10f8 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/query-builder.ts @@ -0,0 +1,150 @@ +import type { CockroachDialectConfig } from '~/cockroach-core/dialect.ts'; +import { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import type { CockroachColumn } from '../columns/index.ts'; +import type { WithBuilder } from '../subquery.ts'; +import { CockroachSelectBuilder } from './select.ts'; +import type { SelectedFields } from './select.types.ts'; + +export class QueryBuilder { + static readonly [entityKind]: string = 'CockroachQueryBuilder'; + + private dialect: CockroachDialect | undefined; + private dialectConfig: CockroachDialectConfig | undefined; + + constructor(dialect?: CockroachDialect | CockroachDialectConfig) { + this.dialect = is(dialect, CockroachDialect) ? dialect : undefined; + this.dialectConfig = is(dialect, CockroachDialect) ? undefined : dialect; + } + + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { + const queryBuilder = this; + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } + + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as any; + }; + return { as }; + }; + + with(...queries: WithSubquery[]) { + const self = this; + + function select(): CockroachSelectBuilder; + function select(fields: TSelection): CockroachSelectBuilder; + function select( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + }); + } + + function selectDistinct(): CockroachSelectBuilder; + function selectDistinct( + fields: TSelection, + ): CockroachSelectBuilder; + function selectDistinct( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + distinct: true, + }); + } + + function selectDistinctOn(on: (CockroachColumn | SQLWrapper)[]): CockroachSelectBuilder; + function selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields: TSelection, + ): CockroachSelectBuilder; + function selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + distinct: { on }, + }); + } + + return { select, selectDistinct, selectDistinctOn }; + } + + select(): CockroachSelectBuilder; + select(fields: TSelection): CockroachSelectBuilder; + select( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + }); + } + + selectDistinct(): CockroachSelectBuilder; + selectDistinct(fields: TSelection): CockroachSelectBuilder; + selectDistinct( + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + distinct: true, + }); + } + + selectDistinctOn(on: (CockroachColumn | SQLWrapper)[]): CockroachSelectBuilder; + selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields: TSelection, + ): CockroachSelectBuilder; + selectDistinctOn( + on: (CockroachColumn | SQLWrapper)[], + fields?: TSelection, + ): CockroachSelectBuilder { + return new CockroachSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + distinct: { on }, + }); + } + + // Lazy load dialect to avoid circular dependency + private getDialect() { + if (!this.dialect) { + this.dialect = new CockroachDialect(this.dialectConfig); + } + + return this.dialect; + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/query.ts b/drizzle-orm/src/cockroach-core/query-builders/query.ts new file mode 100644 index 0000000000..df8b834aba --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/query.ts @@ -0,0 +1,157 @@ +import { + type BuildQueryResult, + type BuildRelationalQueryResult, + type DBQueryConfig, + mapRelationalRow, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { tracer } from '~/tracing.ts'; +import type { KnownKeysOnly, NeonAuthToken } from '~/utils.ts'; +import type { CockroachDialect } from '../dialect.ts'; +import type { CockroachPreparedQuery, CockroachSession, PreparedQueryConfig } from '../session.ts'; +import type { CockroachTable } from '../table.ts'; + +export class RelationalQueryBuilder { + static readonly [entityKind]: string = 'CockroachRelationalQueryBuilder'; + + constructor( + private fullSchema: Record, + private schema: TSchema, + private tableNamesMap: Record, + private table: CockroachTable, + private tableConfig: TableRelationalConfig, + private dialect: CockroachDialect, + private session: CockroachSession, + ) {} + + findMany>( + config?: KnownKeysOnly>, + ): CockroachRelationalQuery[]> { + return new CockroachRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? (config as DBQueryConfig<'many', true>) : {}, + 'many', + ); + } + + findFirst, 'limit'>>( + config?: KnownKeysOnly, 'limit'>>, + ): CockroachRelationalQuery | undefined> { + return new CockroachRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, + 'first', + ); + } +} + +export class CockroachRelationalQuery extends QueryPromise + implements RunnableQuery, SQLWrapper +{ + static override readonly [entityKind]: string = 'CockroachRelationalQuery'; + + declare readonly _: { + readonly dialect: 'pg'; + readonly result: TResult; + }; + + constructor( + private fullSchema: Record, + private schema: TablesRelationalConfig, + private tableNamesMap: Record, + private table: CockroachTable, + private tableConfig: TableRelationalConfig, + private dialect: CockroachDialect, + private session: CockroachSession, + private config: DBQueryConfig<'many', true> | true, + private mode: 'many' | 'first', + ) { + super(); + } + + /** @internal */ + _prepare(name?: string): CockroachPreparedQuery { + return tracer.startActiveSpan('drizzle.prepareQuery', () => { + const { query, builtQuery } = this._toSQL(); + + return this.session.prepareQuery( + builtQuery, + undefined, + name, + true, + (rawRows, mapColumnValue) => { + const rows = rawRows.map((row) => + mapRelationalRow(this.schema, this.tableConfig, row, query.selection, mapColumnValue) + ); + if (this.mode === 'first') { + return rows[0] as TResult; + } + return rows as TResult; + }, + ); + }); + } + + prepare(name: string): CockroachPreparedQuery { + return this._prepare(name); + } + + private _getQuery() { + return this.dialect.buildRelationalQueryWithoutPK({ + fullSchema: this.fullSchema, + schema: this.schema, + tableNamesMap: this.tableNamesMap, + table: this.table, + tableConfig: this.tableConfig, + queryConfig: this.config, + tableAlias: this.tableConfig.tsName, + }); + } + + /** @internal */ + getSQL(): SQL { + return this._getQuery().sql as SQL; + } + + private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: Query } { + const query = this._getQuery(); + + const { typings: _typings, ...builtQuery } = this.dialect.sqlToQuery(query.sql as SQL); + + return { query, builtQuery }; + } + + toSQL(): Query { + return this._toSQL().builtQuery; + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + override execute(): Promise { + return tracer.startActiveSpan('drizzle.operation', () => { + return this._prepare().execute(undefined, this.authToken); + }); + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/raw.ts b/drizzle-orm/src/cockroach-core/query-builders/raw.ts new file mode 100644 index 0000000000..6eecbf096d --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/raw.ts @@ -0,0 +1,49 @@ +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import type { PreparedQuery } from '~/session.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; + +export interface CockroachRaw extends QueryPromise, RunnableQuery, SQLWrapper {} + +export class CockroachRaw extends QueryPromise + implements RunnableQuery, SQLWrapper, PreparedQuery +{ + static override readonly [entityKind]: string = 'CockroachRaw'; + + declare readonly _: { + readonly dialect: 'cockroach'; + readonly result: TResult; + }; + + constructor( + public execute: () => Promise, + private sql: SQL, + private query: Query, + private mapBatchResult: (result: unknown) => unknown, + ) { + super(); + } + + /** @internal */ + getSQL() { + return this.sql; + } + + getQuery() { + return this.query; + } + + mapResult(result: unknown, isFromBatch?: boolean) { + return isFromBatch ? this.mapBatchResult(result) : result; + } + + _prepare(): PreparedQuery { + return this; + } + + /** @internal */ + isResponseInArrayMode() { + return false; + } +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/refresh-materialized-view.ts b/drizzle-orm/src/cockroach-core/query-builders/refresh-materialized-view.ts new file mode 100644 index 0000000000..d8b7a871ab --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/refresh-materialized-view.ts @@ -0,0 +1,108 @@ +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import type { + CockroachPreparedQuery, + CockroachQueryResultHKT, + CockroachQueryResultKind, + CockroachSession, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import type { CockroachMaterializedView } from '~/cockroach-core/view.ts'; +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { tracer } from '~/tracing.ts'; +import type { NeonAuthToken } from '~/utils'; + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface CockroachRefreshMaterializedView + extends + QueryPromise>, + RunnableQuery, 'cockroach'>, + SQLWrapper +{ + readonly _: { + readonly dialect: 'cockroach'; + readonly result: CockroachQueryResultKind; + }; +} + +export class CockroachRefreshMaterializedView + extends QueryPromise> + implements RunnableQuery, 'cockroach'>, SQLWrapper +{ + static override readonly [entityKind]: string = 'CockroachRefreshMaterializedView'; + + private config: { + view: CockroachMaterializedView; + concurrently?: boolean; + withNoData?: boolean; + }; + + constructor( + view: CockroachMaterializedView, + private session: CockroachSession, + private dialect: CockroachDialect, + ) { + super(); + this.config = { view }; + } + + concurrently(): this { + if (this.config.withNoData !== undefined) { + throw new Error('Cannot use concurrently and withNoData together'); + } + this.config.concurrently = true; + return this; + } + + withNoData(): this { + if (this.config.concurrently !== undefined) { + throw new Error('Cannot use concurrently and withNoData together'); + } + this.config.withNoData = true; + return this; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildRefreshMaterializedViewQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + /** @internal */ + _prepare(name?: string): CockroachPreparedQuery< + PreparedQueryConfig & { + execute: CockroachQueryResultKind; + } + > { + return tracer.startActiveSpan('drizzle.prepareQuery', () => { + return this.session.prepareQuery(this.dialect.sqlToQuery(this.getSQL()), undefined, name, true); + }); + } + + prepare(name: string): CockroachPreparedQuery< + PreparedQueryConfig & { + execute: CockroachQueryResultKind; + } + > { + return this._prepare(name); + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token: NeonAuthToken) { + this.authToken = token; + return this; + } + + execute: ReturnType['execute'] = (placeholderValues) => { + return tracer.startActiveSpan('drizzle.operation', () => { + return this._prepare().execute(placeholderValues, this.authToken); + }); + }; +} diff --git a/drizzle-orm/src/cockroach-core/query-builders/select.ts b/drizzle-orm/src/cockroach-core/query-builders/select.ts new file mode 100644 index 0000000000..1e05f94d4a --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/select.ts @@ -0,0 +1,1312 @@ +import type { CockroachColumn } from '~/cockroach-core/columns/index.ts'; +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import type { CockroachSession, PreparedQueryConfig } from '~/cockroach-core/session.ts'; +import type { SubqueryWithSelection } from '~/cockroach-core/subquery.ts'; +import type { CockroachTable } from '~/cockroach-core/table.ts'; +import { CockroachViewBase } from '~/cockroach-core/view-base.ts'; +import { entityKind, is } from '~/entity.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + BuildSubquerySelection, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import { SQL, View } from '~/sql/sql.ts'; +import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import { tracer } from '~/tracing.ts'; +import { + applyMixins, + type DrizzleTypeError, + getTableColumns, + getTableLikeName, + haveSameKeys, + type NeonAuthToken, + orderSelectedFields, + type ValueOrArray, +} from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import type { + AnyCockroachSelect, + CockroachCreateSetOperatorFn, + CockroachSelectConfig, + CockroachSelectCrossJoinFn, + CockroachSelectDynamic, + CockroachSelectHKT, + CockroachSelectHKTBase, + CockroachSelectJoinFn, + CockroachSelectPrepare, + CockroachSelectWithout, + CockroachSetOperatorExcludedMethods, + CockroachSetOperatorWithResult, + CreateCockroachSelectFromBuilderMode, + GetCockroachSetOperators, + LockConfig, + LockStrength, + SelectedFields, + SetOperatorRightSelect, + TableLikeHasEmptySelection, +} from './select.types.ts'; + +export class CockroachSelectBuilder< + TSelection extends SelectedFields | undefined, + TBuilderMode extends 'db' | 'qb' = 'db', +> { + static readonly [entityKind]: string = 'CockroachSelectBuilder'; + + private fields: TSelection; + private session: CockroachSession | undefined; + private dialect: CockroachDialect; + private withList: Subquery[] = []; + private distinct: boolean | { + on: (CockroachColumn | SQLWrapper)[]; + } | undefined; + + constructor( + config: { + fields: TSelection; + session: CockroachSession | undefined; + dialect: CockroachDialect; + withList?: Subquery[]; + distinct?: boolean | { + on: (CockroachColumn | SQLWrapper)[]; + }; + }, + ) { + this.fields = config.fields; + this.session = config.session; + this.dialect = config.dialect; + if (config.withList) { + this.withList = config.withList; + } + this.distinct = config.distinct; + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + /** + * Specify the table, subquery, or other target that you're + * building a select query against. + * + * {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FROM | Postgres from documentation} + */ + from( + source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TFrom, + ): CreateCockroachSelectFromBuilderMode< + TBuilderMode, + GetSelectTableName, + TSelection extends undefined ? GetSelectTableSelection : TSelection, + TSelection extends undefined ? 'single' : 'partial' + > { + const isPartialSelect = !!this.fields; + const src = source as TFrom; + + let fields: SelectedFields; + if (this.fields) { + fields = this.fields; + } else if (is(src, Subquery)) { + // This is required to use the proxy handler to get the correct field values from the subquery + fields = Object.fromEntries( + Object.keys(src._.selectedFields).map(( + key, + ) => [key, src[key as unknown as keyof typeof src] as unknown as SelectedFields[string]]), + ); + } else if (is(src, CockroachViewBase)) { + fields = src[ViewBaseConfig].selectedFields as SelectedFields; + } else if (is(src, SQL)) { + fields = {}; + } else { + fields = getTableColumns(src); + } + + return (new CockroachSelectBase({ + table: src, + fields, + isPartialSelect, + session: this.session, + dialect: this.dialect, + withList: this.withList, + distinct: this.distinct, + }).setToken(this.authToken)) as any; + } +} + +export abstract class CockroachSelectQueryBuilderBase< + THKT extends CockroachSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends TypedQueryBuilder { + static override readonly [entityKind]: string = 'CockroachSelectQueryBuilder'; + + override readonly _: { + readonly dialect: 'cockroach'; + readonly hkt: THKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; + + protected config: CockroachSelectConfig; + protected joinsNotNullableMap: Record; + private tableName: string | undefined; + private isPartialSelect: boolean; + protected session: CockroachSession | undefined; + protected dialect: CockroachDialect; + + constructor( + { table, fields, isPartialSelect, session, dialect, withList, distinct }: { + table: CockroachSelectConfig['table']; + fields: CockroachSelectConfig['fields']; + isPartialSelect: boolean; + session: CockroachSession | undefined; + dialect: CockroachDialect; + withList: Subquery[]; + distinct: boolean | { + on: (CockroachColumn | SQLWrapper)[]; + } | undefined; + }, + ) { + super(); + this.config = { + withList, + table, + fields: { ...fields }, + distinct, + setOperators: [], + }; + this.isPartialSelect = isPartialSelect; + this.session = session; + this.dialect = dialect; + this._ = { + selectedFields: fields as TSelectedFields, + } as this['_']; + this.tableName = getTableLikeName(table); + this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; + } + + private createJoin< + TJoinType extends JoinType, + TIsLateral extends (TJoinType extends 'full' | 'right' ? false : boolean), + >( + joinType: TJoinType, + lateral: TIsLateral, + ): 'cross' extends TJoinType ? CockroachSelectCrossJoinFn + : CockroachSelectJoinFn + { + return (( + table: TIsLateral extends true ? Subquery | SQL : CockroachTable | Subquery | CockroachViewBase | SQL, + on?: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, + ) => { + const baseTableName = this.tableName; + const tableName = getTableLikeName(table); + + if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { + throw new Error(`Alias "${tableName}" is already used in this query`); + } + + if (!this.isPartialSelect) { + // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object + if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { + this.config.fields = { + [baseTableName]: this.config.fields, + }; + } + if (typeof tableName === 'string' && !is(table, SQL)) { + const selection = is(table, Subquery) + ? table._.selectedFields + : is(table, View) + ? table[ViewBaseConfig].selectedFields + : table[Table.Symbol.Columns]; + this.config.fields[tableName] = selection; + } + } + + if (typeof on === 'function') { + on = on( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + + if (!this.config.joins) { + this.config.joins = []; + } + + this.config.joins.push({ on, table, joinType, alias: tableName, lateral }); + + if (typeof tableName === 'string') { + switch (joinType) { + case 'left': { + this.joinsNotNullableMap[tableName] = false; + break; + } + case 'right': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'cross': + case 'inner': { + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'full': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = false; + break; + } + } + } + + return this as any; + }) as any; + } + + /** + * Executes a `left join` operation by adding another table to the current query. + * + * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + leftJoin = this.createJoin('left', false); + + /** + * Executes a `left join lateral` operation by adding subquery to the current query. + * + * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. + * + * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#left-join-lateral} + * + * @param table the subquery to join. + * @param on the `on` clause. + */ + leftJoinLateral = this.createJoin('left', true); + + /** + * Executes a `right join` operation by adding another table to the current query. + * + * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + rightJoin = this.createJoin('right', false); + + /** + * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. + * + * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + innerJoin = this.createJoin('inner', false); + + /** + * Executes an `inner join lateral` operation, creating a new table by combining rows from two queries that have matching values. + * + * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. + * + * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join-lateral} + * + * @param table the subquery to join. + * @param on the `on` clause. + */ + innerJoinLateral = this.createJoin('inner', true); + + /** + * Executes a `full join` operation by combining rows from two tables into a new table. + * + * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet | null; }[] = await db.select() + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number | null; }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + fullJoin = this.createJoin('full', false); + + /** + * Executes a `cross join` operation by combining rows from two tables into a new table. + * + * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} + * + * @param table the table to join. + * + * @example + * + * ```ts + * // Select all users, each user with every pet + * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() + * .from(users) + * .crossJoin(pets) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .crossJoin(pets) + * ``` + */ + crossJoin = this.createJoin('cross', false); + + /** + * Executes a `cross join lateral` operation by combining rows from two queries into a new table. + * + * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. + * + * Calling this method retrieves all rows from both main and joined queries, merging all rows from each query. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join-lateral} + * + * @param table the query to join. + */ + crossJoinLateral = this.createJoin('cross', true); + + private createSetOperator( + type: SetOperator, + isAll: boolean, + ): >( + rightSelection: + | ((setOperators: GetCockroachSetOperators) => SetOperatorRightSelect) + | SetOperatorRightSelect, + ) => CockroachSelectWithout< + this, + TDynamic, + CockroachSetOperatorExcludedMethods, + true + > { + return (rightSelection) => { + const rightSelect = (typeof rightSelection === 'function' + ? rightSelection(getCockroachSetOperators()) + : rightSelection) as TypedQueryBuilder< + any, + TResult + >; + + if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + + this.config.setOperators.push({ type, isAll, rightSelect }); + return this as any; + }; + } + + /** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * import { union } from 'drizzle-orm/cockroach-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ + union = this.createSetOperator('union', false); + + /** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * import { unionAll } from 'drizzle-orm/cockroach-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ + unionAll = this.createSetOperator('union', true); + + /** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { intersect } from 'drizzle-orm/cockroach-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + intersect = this.createSetOperator('intersect', false); + + /** + * Adds `intersect all` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets including all duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} + * + * @example + * + * ```ts + * // Select all products and quantities that are ordered by both regular and VIP customers + * await db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders) + * .intersectAll( + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * // or + * import { intersectAll } from 'drizzle-orm/cockroach-core' + * + * await intersectAll( + * db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders), + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * ``` + */ + intersectAll = this.createSetOperator('intersect', true); + + /** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { except } from 'drizzle-orm/cockroach-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + except = this.createSetOperator('except', false); + + /** + * Adds `except all` set operator to the query. + * + * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} + * + * @example + * + * ```ts + * // Select all products that are ordered by regular customers but not by VIP customers + * await db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered, + * }) + * .from(regularCustomerOrders) + * .exceptAll( + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered, + * }) + * .from(vipCustomerOrders) + * ); + * // or + * import { exceptAll } from 'drizzle-orm/cockroach-core' + * + * await exceptAll( + * db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders), + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * ``` + */ + exceptAll = this.createSetOperator('except', true); + + /** @internal */ + addSetOperators(setOperators: CockroachSelectConfig['setOperators']): CockroachSelectWithout< + this, + TDynamic, + CockroachSetOperatorExcludedMethods, + true + > { + this.config.setOperators.push(...setOperators); + return this as any; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#filtering} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be selected. + * + * ```ts + * // Select all cars with green color + * await db.select().from(cars).where(eq(cars.color, 'green')); + * // or + * await db.select().from(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Select all BMW cars with a green color + * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Select all cars with the green or blue color + * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where( + where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): CockroachSelectWithout { + if (typeof where === 'function') { + where = where( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.where = where; + return this as any; + } + + /** + * Adds a `having` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @param having the `having` clause. + * + * @example + * + * ```ts + * // Select all brands with more than one car + * await db.select({ + * brand: cars.brand, + * count: sql`cast(count(${cars.id}) as int)`, + * }) + * .from(cars) + * .groupBy(cars.brand) + * .having(({ count }) => gt(count, 1)); + * ``` + */ + having( + having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): CockroachSelectWithout { + if (typeof having === 'function') { + having = having( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.having = having; + return this as any; + } + + /** + * Adds a `group by` clause to the query. + * + * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @example + * + * ```ts + * // Group and count people by their last names + * await db.select({ + * lastName: people.lastName, + * count: sql`cast(count(*) as int)` + * }) + * .from(people) + * .groupBy(people.lastName); + * ``` + */ + groupBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): CockroachSelectWithout; + groupBy(...columns: (CockroachColumn | SQL | SQL.Aliased)[]): CockroachSelectWithout; + groupBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (CockroachColumn | SQL | SQL.Aliased)[] + ): CockroachSelectWithout { + if (typeof columns[0] === 'function') { + const groupBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; + } else { + this.config.groupBy = columns as (CockroachColumn | SQL | SQL.Aliased)[]; + } + return this as any; + } + + /** + * Adds an `order by` clause to the query. + * + * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. + * + * See docs: {@link https://orm.drizzle.team/docs/select#order-by} + * + * @example + * + * ``` + * // Select cars ordered by year + * await db.select().from(cars).orderBy(cars.year); + * ``` + * + * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. + * + * ```ts + * // Select cars ordered by year in descending order + * await db.select().from(cars).orderBy(desc(cars.year)); + * + * // Select cars ordered by year and price + * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); + * ``` + */ + orderBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): CockroachSelectWithout; + orderBy(...columns: (CockroachColumn | SQL | SQL.Aliased)[]): CockroachSelectWithout; + orderBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (CockroachColumn | SQL | SQL.Aliased)[] + ): CockroachSelectWithout { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } else { + const orderByArray = columns as (CockroachColumn | SQL | SQL.Aliased)[]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } + return this as any; + } + + /** + * Adds a `limit` clause to the query. + * + * Calling this method will set the maximum number of rows that will be returned by this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param limit the `limit` clause. + * + * @example + * + * ```ts + * // Get the first 10 people from this query. + * await db.select().from(people).limit(10); + * ``` + */ + limit(limit: number | Placeholder): CockroachSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.limit = limit; + } else { + this.config.limit = limit; + } + return this as any; + } + + /** + * Adds an `offset` clause to the query. + * + * Calling this method will skip a number of rows when returning results from this query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} + * + * @param offset the `offset` clause. + * + * @example + * + * ```ts + * // Get the 10th-20th people from this query. + * await db.select().from(people).offset(10).limit(10); + * ``` + */ + offset(offset: number | Placeholder): CockroachSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.offset = offset; + } else { + this.config.offset = offset; + } + return this as any; + } + + /** + * Adds a `for` clause to the query. + * + * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. + * + * See docs: {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FOR-UPDATE-SHARE} + * + * @param strength the lock strength. + * @param config the lock configuration. + */ + for(strength: LockStrength, config: LockConfig = {}): CockroachSelectWithout { + this.config.lockingClause = { strength, config }; + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildSelectQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + as( + alias: TAlias, + ): SubqueryWithSelection { + return new Proxy( + new Subquery(this.getSQL(), this.config.fields, alias), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as SubqueryWithSelection; + } + + /** @internal */ + override getSelectedFields(): this['_']['selectedFields'] { + return new Proxy( + this.config.fields, + new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as this['_']['selectedFields']; + } + + $dynamic(): CockroachSelectDynamic { + return this; + } +} + +export interface CockroachSelectBase< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends + CockroachSelectQueryBuilderBase< + CockroachSelectHKT, + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + QueryPromise, + SQLWrapper +{} + +export class CockroachSelectBase< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> extends CockroachSelectQueryBuilderBase< + CockroachSelectHKT, + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields +> implements RunnableQuery, SQLWrapper { + static override readonly [entityKind]: string = 'CockroachSelect'; + + /** @internal */ + _prepare(name?: string): CockroachSelectPrepare { + const { session, config, dialect, joinsNotNullableMap, authToken } = this; + if (!session) { + throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); + } + return tracer.startActiveSpan('drizzle.prepareQuery', () => { + const fieldsList = orderSelectedFields(config.fields); + const query = session.prepareQuery< + PreparedQueryConfig & { execute: TResult } + >(dialect.sqlToQuery(this.getSQL()), fieldsList, name, true); + query.joinsNotNullableMap = joinsNotNullableMap; + + return query.setToken(authToken); + }); + } + + /** + * Create a prepared statement for this query. This allows + * the database to remember this query for the given session + * and call it by name, rather than specifying the full query. + * + * {@link https://www.postgresql.org/docs/current/sql-prepare.html | Postgres prepare documentation} + */ + prepare(name: string): CockroachSelectPrepare { + return this._prepare(name); + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + execute: ReturnType['execute'] = (placeholderValues) => { + return tracer.startActiveSpan('drizzle.operation', () => { + return this._prepare().execute(placeholderValues, this.authToken); + }); + }; +} + +applyMixins(CockroachSelectBase, [QueryPromise]); + +function createSetOperator(type: SetOperator, isAll: boolean): CockroachCreateSetOperatorFn { + return (leftSelect, rightSelect, ...restSelects) => { + const setOperators = [rightSelect, ...restSelects].map((select) => ({ + type, + isAll, + rightSelect: select as AnyCockroachSelect, + })); + + for (const setOperator of setOperators) { + if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + } + + return (leftSelect as AnyCockroachSelect).addSetOperators(setOperators) as any; + }; +} + +const getCockroachSetOperators = () => ({ + union, + unionAll, + intersect, + intersectAll, + except, + exceptAll, +}); + +/** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * import { union } from 'drizzle-orm/cockroach-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ +export const union = createSetOperator('union', false); + +/** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * import { unionAll } from 'drizzle-orm/cockroach-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ +export const unionAll = createSetOperator('union', true); + +/** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * import { intersect } from 'drizzle-orm/cockroach-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const intersect = createSetOperator('intersect', false); + +/** + * Adds `intersect all` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets including all duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} + * + * @example + * + * ```ts + * // Select all products and quantities that are ordered by both regular and VIP customers + * import { intersectAll } from 'drizzle-orm/cockroach-core' + * + * await intersectAll( + * db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders), + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * // or + * await db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders) + * .intersectAll( + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * ``` + */ +export const intersectAll = createSetOperator('intersect', true); + +/** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { except } from 'drizzle-orm/cockroach-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const except = createSetOperator('except', false); + +/** + * Adds `except all` set operator to the query. + * + * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} + * + * @example + * + * ```ts + * // Select all products that are ordered by regular customers but not by VIP customers + * import { exceptAll } from 'drizzle-orm/cockroach-core' + * + * await exceptAll( + * db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered + * }) + * .from(regularCustomerOrders), + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered + * }) + * .from(vipCustomerOrders) + * ); + * // or + * await db.select({ + * productId: regularCustomerOrders.productId, + * quantityOrdered: regularCustomerOrders.quantityOrdered, + * }) + * .from(regularCustomerOrders) + * .exceptAll( + * db.select({ + * productId: vipCustomerOrders.productId, + * quantityOrdered: vipCustomerOrders.quantityOrdered, + * }) + * .from(vipCustomerOrders) + * ); + * ``` + */ +export const exceptAll = createSetOperator('except', true); diff --git a/drizzle-orm/src/cockroach-core/query-builders/select.types.ts b/drizzle-orm/src/cockroach-core/query-builders/select.types.ts new file mode 100644 index 0000000000..a62f544b54 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/select.types.ts @@ -0,0 +1,456 @@ +import type { CockroachColumn } from '~/cockroach-core/columns/index.ts'; +import type { CockroachTable, CockroachTableWithColumns } from '~/cockroach-core/table.ts'; +import type { CockroachViewBase } from '~/cockroach-core/view-base.ts'; +import type { CockroachViewWithSelection } from '~/cockroach-core/view.ts'; +import type { + SelectedFields as SelectedFieldsBase, + SelectedFieldsFlat as SelectedFieldsFlatBase, + SelectedFieldsOrdered as SelectedFieldsOrderedBase, +} from '~/operations.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + AppendToNullabilityMap, + AppendToResult, + BuildSubquerySelection, + GetSelectTableName, + JoinNullability, + JoinType, + MapColumnsToTableAlias, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import type { ColumnsSelection, Placeholder, SQL, SQLWrapper, View } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { Table, UpdateTableConfig } from '~/table.ts'; +import type { Assume, DrizzleTypeError, Equal, ValidateShape, ValueOrArray } from '~/utils.ts'; +import type { CockroachPreparedQuery, PreparedQueryConfig } from '../session.ts'; +import type { CockroachSelectBase, CockroachSelectQueryBuilderBase } from './select.ts'; + +export interface CockroachSelectJoinConfig { + on: SQL | undefined; + table: CockroachTable | Subquery | CockroachViewBase | SQL; + alias: string | undefined; + joinType: JoinType; + lateral?: boolean; +} + +export type BuildAliasTable = TTable extends Table + ? CockroachTableWithColumns< + UpdateTableConfig; + }> + > + : TTable extends View ? CockroachViewWithSelection< + TAlias, + TTable['_']['existing'], + MapColumnsToTableAlias + > + : never; + +export interface CockroachSelectConfig { + withList?: Subquery[]; + // Either fields or fieldsFlat must be defined + fields: Record; + fieldsFlat?: SelectedFieldsOrdered; + where?: SQL; + having?: SQL; + table: CockroachTable | Subquery | CockroachViewBase | SQL; + limit?: number | Placeholder; + offset?: number | Placeholder; + joins?: CockroachSelectJoinConfig[]; + orderBy?: (CockroachColumn | SQL | SQL.Aliased)[]; + groupBy?: (CockroachColumn | SQL | SQL.Aliased)[]; + lockingClause?: { + strength: LockStrength; + config: LockConfig; + }; + distinct?: boolean | { + on: (CockroachColumn | SQLWrapper)[]; + }; + setOperators: { + rightSelect: TypedQueryBuilder; + type: SetOperator; + isAll: boolean; + orderBy?: (CockroachColumn | SQL | SQL.Aliased)[]; + limit?: number | Placeholder; + offset?: number | Placeholder; + }[]; +} + +export type TableLikeHasEmptySelection = T extends + Subquery ? Equal extends true ? true : false + : false; + +export type CockroachSelectJoin< + T extends AnyCockroachSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends CockroachTable | Subquery | CockroachViewBase | SQL, + TJoinedName extends GetSelectTableName = GetSelectTableName, +> = T extends any ? CockroachSelectWithout< + CockroachSelectKind< + T['_']['hkt'], + T['_']['tableName'], + AppendToResult< + T['_']['tableName'], + T['_']['selection'], + TJoinedName, + TJoinedTable extends Table ? TJoinedTable['_']['columns'] + : TJoinedTable extends Subquery | View ? Assume + : never, + T['_']['selectMode'] + >, + T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', + AppendToNullabilityMap, + T['_']['dynamic'], + T['_']['excludedMethods'] + >, + TDynamic, + T['_']['excludedMethods'] + > + : never; + +export type CockroachSelectJoinFn< + T extends AnyCockroachSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TIsLateral extends boolean, +> = < + TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : CockroachTable | Subquery | CockroachViewBase | SQL), + TJoinedName extends GetSelectTableName = GetSelectTableName, +>( + table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TJoinedTable, + on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, +) => CockroachSelectJoin; + +export type CockroachSelectCrossJoinFn< + T extends AnyCockroachSelectQueryBuilder, + TDynamic extends boolean, + TIsLateral extends boolean, +> = < + TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : CockroachTable | Subquery | CockroachViewBase | SQL), + TJoinedName extends GetSelectTableName = GetSelectTableName, +>( + table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TJoinedTable, +) => CockroachSelectJoin; + +export type SelectedFieldsFlat = SelectedFieldsFlatBase; + +export type SelectedFields = SelectedFieldsBase; + +export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; + +export type LockStrength = 'update' | 'no key update' | 'share' | 'key share'; + +export type LockConfig = + & { + of?: ValueOrArray; + } + & ({ + noWait: true; + skipLocked?: undefined; + } | { + noWait?: undefined; + skipLocked: true; + } | { + noWait?: undefined; + skipLocked?: undefined; + }); + +export interface CockroachSelectHKTBase { + tableName: string | undefined; + selection: unknown; + selectMode: SelectMode; + nullabilityMap: unknown; + dynamic: boolean; + excludedMethods: string; + result: unknown; + selectedFields: unknown; + _type: unknown; +} + +export type CockroachSelectKind< + T extends CockroachSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record, + TDynamic extends boolean, + TExcludedMethods extends string, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> = (T & { + tableName: TTableName; + selection: TSelection; + selectMode: TSelectMode; + nullabilityMap: TNullabilityMap; + dynamic: TDynamic; + excludedMethods: TExcludedMethods; + result: TResult; + selectedFields: TSelectedFields; +})['_type']; + +export interface CockroachSelectQueryBuilderHKT extends CockroachSelectHKTBase { + _type: CockroachSelectQueryBuilderBase< + CockroachSelectQueryBuilderHKT, + this['tableName'], + Assume, + this['selectMode'], + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export interface CockroachSelectHKT extends CockroachSelectHKTBase { + _type: CockroachSelectBase< + this['tableName'], + Assume, + this['selectMode'], + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export type CreateCockroachSelectFromBuilderMode< + TBuilderMode extends 'db' | 'qb', + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, +> = TBuilderMode extends 'db' ? CockroachSelectBase + : CockroachSelectQueryBuilderBase; + +export type CockroachSetOperatorExcludedMethods = + | 'leftJoin' + | 'rightJoin' + | 'innerJoin' + | 'fullJoin' + | 'where' + | 'having' + | 'groupBy' + | 'for'; + +export type CockroachSelectWithout< + T extends AnyCockroachSelectQueryBuilder, + TDynamic extends boolean, + K extends keyof T & string, + TResetExcluded extends boolean = false, +> = TDynamic extends true ? T : Omit< + CockroachSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['nullabilityMap'], + TDynamic, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, + T['_']['result'], + T['_']['selectedFields'] + >, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K +>; + +export type CockroachSelectPrepare = CockroachPreparedQuery< + PreparedQueryConfig & { + execute: T['_']['result']; + } +>; + +export type CockroachSelectDynamic = CockroachSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['nullabilityMap'], + true, + never, + T['_']['result'], + T['_']['selectedFields'] +>; + +export type CockroachSelectQueryBuilder< + THKT extends CockroachSelectHKTBase = CockroachSelectQueryBuilderHKT, + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = ColumnsSelection, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, + TResult extends any[] = unknown[], + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> = CockroachSelectQueryBuilderBase< + THKT, + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + true, + never, + TResult, + TSelectedFields +>; + +export type AnyCockroachSelectQueryBuilder = CockroachSelectQueryBuilderBase< + any, + any, + any, + any, + any, + any, + any, + any, + any +>; + +export type AnyCockroachSetOperatorInterface = CockroachSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + any +>; + +export interface CockroachSetOperatorInterface< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> { + _: { + readonly hkt: CockroachSelectHKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; +} + +export type CockroachSetOperatorWithResult = CockroachSetOperatorInterface< + any, + any, + any, + any, + any, + any, + TResult, + any +>; + +export type CockroachSelect< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, +> = CockroachSelectBase; + +export type AnyCockroachSelect = CockroachSelectBase; + +export type CockroachSetOperator< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, +> = CockroachSelectBase< + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + true, + CockroachSetOperatorExcludedMethods +>; + +export type SetOperatorRightSelect< + TValue extends CockroachSetOperatorWithResult, + TResult extends any[], +> = TValue extends CockroachSetOperatorInterface ? ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + > + : TValue; + +export type SetOperatorRestSelect< + TValue extends readonly CockroachSetOperatorWithResult[], + TResult extends any[], +> = TValue extends [infer First, ...infer Rest] + ? First extends CockroachSetOperatorInterface + ? Rest extends AnyCockroachSetOperatorInterface[] ? [ + ValidateShape>, + ...SetOperatorRestSelect, + ] + : ValidateShape[]> + : never + : TValue; + +export type CockroachCreateSetOperatorFn = < + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TValue extends CockroachSetOperatorWithResult, + TRest extends CockroachSetOperatorWithResult[], + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +>( + leftSelect: CockroachSetOperatorInterface< + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + rightSelect: SetOperatorRightSelect, + ...restSelects: SetOperatorRestSelect +) => CockroachSelectWithout< + CockroachSelectBase< + TTableName, + TSelection, + TSelectMode, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + false, + CockroachSetOperatorExcludedMethods, + true +>; + +export type GetCockroachSetOperators = { + union: CockroachCreateSetOperatorFn; + intersect: CockroachCreateSetOperatorFn; + except: CockroachCreateSetOperatorFn; + unionAll: CockroachCreateSetOperatorFn; + intersectAll: CockroachCreateSetOperatorFn; + exceptAll: CockroachCreateSetOperatorFn; +}; diff --git a/drizzle-orm/src/cockroach-core/query-builders/update.ts b/drizzle-orm/src/cockroach-core/query-builders/update.ts new file mode 100644 index 0000000000..e92f6f2527 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/query-builders/update.ts @@ -0,0 +1,634 @@ +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import type { + CockroachPreparedQuery, + CockroachQueryResultHKT, + CockroachQueryResultKind, + CockroachSession, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import { CockroachTable } from '~/cockroach-core/table.ts'; +import type { GetColumnData } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + AppendToNullabilityMap, + AppendToResult, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, +} from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { RunnableQuery } from '~/runnable-query.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import { type ColumnsSelection, type Query, SQL, type SQLWrapper } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { getTableName, type InferInsertModel, Table } from '~/table.ts'; +import { + type Assume, + type DrizzleTypeError, + type Equal, + getTableLikeName, + mapUpdateSet, + type NeonAuthToken, + orderSelectedFields, + type Simplify, + type UpdateSet, +} from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import type { CockroachColumn } from '../columns/common.ts'; +import type { CockroachViewBase } from '../view-base.ts'; +import type { + CockroachSelectJoinConfig, + SelectedFields, + SelectedFieldsOrdered, + TableLikeHasEmptySelection, +} from './select.types.ts'; + +export interface CockroachUpdateConfig { + where?: SQL | undefined; + set: UpdateSet; + table: CockroachTable; + from?: CockroachTable | Subquery | CockroachViewBase | SQL; + joins: CockroachSelectJoinConfig[]; + returningFields?: SelectedFields; + returning?: SelectedFieldsOrdered; + withList?: Subquery[]; +} + +export type CockroachUpdateSetSource = + & { + [Key in keyof InferInsertModel]?: + | GetColumnData + | SQL + | CockroachColumn + | undefined; + } + & {}; + +export class CockroachUpdateBuilder { + static readonly [entityKind]: string = 'CockroachUpdateBuilder'; + + declare readonly _: { + readonly table: TTable; + }; + + constructor( + private table: TTable, + private session: CockroachSession, + private dialect: CockroachDialect, + private withList?: Subquery[], + ) {} + + set( + values: CockroachUpdateSetSource, + ): CockroachUpdateWithout< + CockroachUpdateBase, + false, + 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' + > { + return new CockroachUpdateBase( + this.table, + mapUpdateSet(this.table, values), + this.session, + this.dialect, + this.withList, + ); + } +} + +export type CockroachUpdateWithout< + T extends AnyCockroachUpdate, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T : Omit< + CockroachUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['from'], + T['_']['selectedFields'], + T['_']['returning'], + T['_']['nullabilityMap'], + T['_']['joins'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K +>; + +export type CockroachUpdateWithJoins< + T extends AnyCockroachUpdate, + TDynamic extends boolean, + TFrom extends CockroachTable | Subquery | CockroachViewBase | SQL, +> = TDynamic extends true ? T : Omit< + CockroachUpdateBase< + T['_']['table'], + T['_']['queryResult'], + TFrom, + T['_']['selectedFields'], + T['_']['returning'], + AppendToNullabilityMap, 'inner'>, + [...T['_']['joins'], { + name: GetSelectTableName; + joinType: 'inner'; + table: TFrom; + }], + TDynamic, + Exclude + >, + Exclude +>; + +export type CockroachUpdateJoinFn< + T extends AnyCockroachUpdate, + TDynamic extends boolean, + TJoinType extends JoinType, +> = < + TJoinedTable extends CockroachTable | Subquery | CockroachViewBase | SQL, +>( + table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TJoinedTable, + on: + | ( + ( + updateTable: T['_']['table']['_']['columns'], + from: T['_']['from'] extends CockroachTable ? T['_']['from']['_']['columns'] + : T['_']['from'] extends Subquery | CockroachViewBase ? T['_']['from']['_']['selectedFields'] + : never, + ) => SQL | undefined + ) + | SQL + | undefined, +) => CockroachUpdateJoin; + +export type CockroachUpdateJoin< + T extends AnyCockroachUpdate, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends CockroachTable | Subquery | CockroachViewBase | SQL, +> = TDynamic extends true ? T : CockroachUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['from'], + T['_']['selectedFields'], + T['_']['returning'], + AppendToNullabilityMap, TJoinType>, + [...T['_']['joins'], { + name: GetSelectTableName; + joinType: TJoinType; + table: TJoinedTable; + }], + TDynamic, + T['_']['excludedMethods'] +>; + +type Join = { + name: string | undefined; + joinType: JoinType; + table: CockroachTable | Subquery | CockroachViewBase | SQL; +}; + +type AccumulateToResult< + T extends AnyCockroachUpdate, + TSelectMode extends SelectMode, + TJoins extends Join[], + TSelectedFields extends ColumnsSelection, +> = TJoins extends [infer TJoin extends Join, ...infer TRest extends Join[]] ? AccumulateToResult< + T, + TSelectMode extends 'partial' ? TSelectMode : 'multiple', + TRest, + AppendToResult< + T['_']['table']['_']['name'], + TSelectedFields, + TJoin['name'], + TJoin['table'] extends Table ? TJoin['table']['_']['columns'] + : TJoin['table'] extends Subquery ? Assume + : never, + TSelectMode extends 'partial' ? TSelectMode : 'multiple' + > + > + : TSelectedFields; + +export type CockroachUpdateReturningAll = + CockroachUpdateWithout< + CockroachUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['from'], + Equal extends true ? T['_']['table']['_']['columns'] : Simplify< + & Record + & { + [K in keyof T['_']['joins'] as T['_']['joins'][K]['table']['_']['name']]: + T['_']['joins'][K]['table']['_']['columns']; + } + >, + SelectResult< + AccumulateToResult< + T, + 'single', + T['_']['joins'], + GetSelectTableSelection + >, + 'partial', + T['_']['nullabilityMap'] + >, + T['_']['nullabilityMap'], + T['_']['joins'], + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'returning' + >; + +export type CockroachUpdateReturning< + T extends AnyCockroachUpdate, + TDynamic extends boolean, + TSelectedFields extends SelectedFields, +> = CockroachUpdateWithout< + CockroachUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['from'], + TSelectedFields, + SelectResult< + AccumulateToResult< + T, + 'partial', + T['_']['joins'], + TSelectedFields + >, + 'partial', + T['_']['nullabilityMap'] + >, + T['_']['nullabilityMap'], + T['_']['joins'], + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'returning' +>; + +export type CockroachUpdatePrepare = CockroachPreparedQuery< + PreparedQueryConfig & { + execute: T['_']['returning'] extends undefined ? CockroachQueryResultKind + : T['_']['returning'][]; + } +>; + +export type CockroachUpdateDynamic = CockroachUpdate< + T['_']['table'], + T['_']['queryResult'], + T['_']['from'], + T['_']['returning'], + T['_']['nullabilityMap'] +>; + +export type CockroachUpdate< + TTable extends CockroachTable = CockroachTable, + TQueryResult extends CockroachQueryResultHKT = CockroachQueryResultHKT, + TFrom extends CockroachTable | Subquery | CockroachViewBase | SQL | undefined = undefined, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = Record | undefined, + TNullabilityMap extends Record = Record, + TJoins extends Join[] = [], +> = CockroachUpdateBase< + TTable, + TQueryResult, + TFrom, + TSelectedFields, + TReturning, + TNullabilityMap, + TJoins, + true, + never +>; + +export type AnyCockroachUpdate = CockroachUpdateBase; + +export interface CockroachUpdateBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TFrom extends CockroachTable | Subquery | CockroachViewBase | SQL | undefined = undefined, + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + TNullabilityMap extends Record = Record, + TJoins extends Join[] = [], + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? CockroachQueryResultKind : TReturning[] + >, + QueryPromise : TReturning[]>, + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + readonly _: { + readonly dialect: 'cockroach'; + readonly table: TTable; + readonly joins: TJoins; + readonly nullabilityMap: TNullabilityMap; + readonly queryResult: TQueryResult; + readonly from: TFrom; + readonly selectedFields: TSelectedFields; + readonly returning: TReturning; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TReturning extends undefined ? CockroachQueryResultKind : TReturning[]; + }; +} + +export class CockroachUpdateBase< + TTable extends CockroachTable, + TQueryResult extends CockroachQueryResultHKT, + TFrom extends CockroachTable | Subquery | CockroachViewBase | SQL | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TSelectedFields extends ColumnsSelection | undefined = undefined, + TReturning extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TNullabilityMap extends Record = Record, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TJoins extends Join[] = [], + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TReturning[]> + implements + RunnableQuery< + TReturning extends undefined ? CockroachQueryResultKind : TReturning[], + 'cockroach' + >, + SQLWrapper +{ + static override readonly [entityKind]: string = 'CockroachUpdate'; + + private config: CockroachUpdateConfig; + private tableName: string | undefined; + private joinsNotNullableMap: Record; + + constructor( + table: TTable, + set: UpdateSet, + private session: CockroachSession, + private dialect: CockroachDialect, + withList?: Subquery[], + ) { + super(); + this.config = { set, table, withList, joins: [] }; + this.tableName = getTableLikeName(table); + this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; + } + + from( + source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TFrom, + ): CockroachUpdateWithJoins { + const src = source as TFrom; + const tableName = getTableLikeName(src); + if (typeof tableName === 'string') { + this.joinsNotNullableMap[tableName] = true; + } + this.config.from = src; + return this as any; + } + + private getTableLikeFields(table: CockroachTable | Subquery | CockroachViewBase): Record { + if (is(table, CockroachTable)) { + return table[Table.Symbol.Columns]; + } else if (is(table, Subquery)) { + return table._.selectedFields; + } + return table[ViewBaseConfig].selectedFields; + } + + private createJoin( + joinType: TJoinType, + ): CockroachUpdateJoinFn { + return (( + table: CockroachTable | Subquery | CockroachViewBase | SQL, + on: ((updateTable: TTable, from: TFrom) => SQL | undefined) | SQL | undefined, + ) => { + const tableName = getTableLikeName(table); + + if (typeof tableName === 'string' && this.config.joins.some((join) => join.alias === tableName)) { + throw new Error(`Alias "${tableName}" is already used in this query`); + } + + if (typeof on === 'function') { + const from = this.config.from && !is(this.config.from, SQL) + ? this.getTableLikeFields(this.config.from) + : undefined; + on = on( + new Proxy( + this.config.table[Table.Symbol.Columns], + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as any, + from && new Proxy( + from, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as any, + ); + } + + this.config.joins.push({ on, table, joinType, alias: tableName }); + + if (typeof tableName === 'string') { + switch (joinType) { + case 'left': { + this.joinsNotNullableMap[tableName] = false; + break; + } + case 'right': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'inner': { + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'full': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = false; + break; + } + } + } + + return this as any; + }) as any; + } + + leftJoin = this.createJoin('left'); + + rightJoin = this.createJoin('right'); + + innerJoin = this.createJoin('inner'); + + fullJoin = this.createJoin('full'); + + /** + * Adds a 'where' clause to the query. + * + * Calling this method will update only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param where the 'where' clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be updated. + * + * ```ts + * // Update all cars with green color + * await db.update(cars).set({ color: 'red' }) + * .where(eq(cars.color, 'green')); + * // or + * await db.update(cars).set({ color: 'red' }) + * .where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Update all BMW cars with a green color + * await db.update(cars).set({ color: 'red' }) + * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Update all cars with the green or blue color + * await db.update(cars).set({ color: 'red' }) + * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): CockroachUpdateWithout { + this.config.where = where; + return this as any; + } + + /** + * Adds a `returning` clause to the query. + * + * Calling this method will return the specified fields of the updated rows. If no fields are specified, all fields will be returned. + * + * See docs: {@link https://orm.drizzle.team/docs/update#update-with-returning} + * + * @example + * ```ts + * // Update all cars with the green color and return all fields + * const updatedCars: Car[] = await db.update(cars) + * .set({ color: 'red' }) + * .where(eq(cars.color, 'green')) + * .returning(); + * + * // Update all cars with the green color and return only their id and brand fields + * const updatedCarsIdsAndBrands: { id: number, brand: string }[] = await db.update(cars) + * .set({ color: 'red' }) + * .where(eq(cars.color, 'green')) + * .returning({ id: cars.id, brand: cars.brand }); + * ``` + */ + returning(): CockroachUpdateReturningAll; + returning( + fields: TSelectedFields, + ): CockroachUpdateReturning; + returning( + fields?: SelectedFields, + ): CockroachUpdateWithout { + if (!fields) { + fields = Object.assign({}, this.config.table[Table.Symbol.Columns]); + + if (this.config.from) { + const tableName = getTableLikeName(this.config.from); + + if (typeof tableName === 'string' && this.config.from && !is(this.config.from, SQL)) { + const fromFields = this.getTableLikeFields(this.config.from); + fields[tableName] = fromFields as any; + } + + for (const join of this.config.joins) { + const tableName = getTableLikeName(join.table); + + if (typeof tableName === 'string' && !is(join.table, SQL)) { + const fromFields = this.getTableLikeFields(join.table); + fields[tableName] = fromFields as any; + } + } + } + } + + this.config.returningFields = fields; + this.config.returning = orderSelectedFields(fields); + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildUpdateQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + /** @internal */ + _prepare(name?: string): CockroachUpdatePrepare { + const query = this.session.prepareQuery< + PreparedQueryConfig & { execute: TReturning[] } + >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true); + query.joinsNotNullableMap = this.joinsNotNullableMap; + return query; + } + + prepare(name: string): CockroachUpdatePrepare { + return this._prepare(name); + } + + private authToken?: NeonAuthToken; + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + override execute: ReturnType['execute'] = (placeholderValues) => { + return this._prepare().execute(placeholderValues, this.authToken); + }; + + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + + $dynamic(): CockroachUpdateDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/cockroach-core/roles.ts b/drizzle-orm/src/cockroach-core/roles.ts new file mode 100644 index 0000000000..3af61c0211 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/roles.ts @@ -0,0 +1,37 @@ +import { entityKind } from '~/entity.ts'; + +export interface CockroachRoleConfig { + createDb?: boolean; + createRole?: boolean; +} + +export class CockroachRole implements CockroachRoleConfig { + static readonly [entityKind]: string = 'CockroachRole'; + + /** @internal */ + _existing?: boolean; + + /** @internal */ + readonly createDb: CockroachRoleConfig['createDb']; + /** @internal */ + readonly createRole: CockroachRoleConfig['createRole']; + + constructor( + readonly name: string, + config?: CockroachRoleConfig, + ) { + if (config) { + this.createDb = config.createDb; + this.createRole = config.createRole; + } + } + + existing(): this { + this._existing = true; + return this; + } +} + +export function cockroachRole(name: string, config?: CockroachRoleConfig) { + return new CockroachRole(name, config); +} diff --git a/drizzle-orm/src/cockroach-core/schema.ts b/drizzle-orm/src/cockroach-core/schema.ts new file mode 100644 index 0000000000..d61487f2bc --- /dev/null +++ b/drizzle-orm/src/cockroach-core/schema.ts @@ -0,0 +1,98 @@ +import { entityKind, is } from '~/entity.ts'; +import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; +import type { NonArray, Writable } from '~/utils.ts'; +import { + type CockroachEnum, + type CockroachEnumObject, + cockroachEnumObjectWithSchema, + cockroachEnumWithSchema, +} from './columns/enum.ts'; +import { type cockroachSequence, cockroachSequenceWithSchema } from './sequence.ts'; +import { type CockroachTableFn, type CockroachTableFnInternal, cockroachTableWithSchema, EnableRLS } from './table.ts'; +import { + type cockroachMaterializedView, + cockroachMaterializedViewWithSchema, + type cockroachView, + cockroachViewWithSchema, +} from './view.ts'; + +export class CockroachSchema implements SQLWrapper { + static readonly [entityKind]: string = 'CockroachSchema'; + + isExisting: boolean = false; + constructor( + public readonly schemaName: TName, + ) { + this.table = Object.assign(this.table, { + withRLS: ((name, columns, extraConfig) => { + const table = cockroachTableWithSchema(name, columns, extraConfig, this.schemaName); + table[EnableRLS] = true; + + return table; + }) as CockroachTableFnInternal, + }); + } + + table: CockroachTableFn = ((name, columns, extraConfig) => { + return cockroachTableWithSchema(name, columns, extraConfig, this.schemaName); + }) as CockroachTableFn; + + view = ((name, columns) => { + return cockroachViewWithSchema(name, columns, this.schemaName); + }) as typeof cockroachView; + + materializedView = ((name, columns) => { + return cockroachMaterializedViewWithSchema(name, columns, this.schemaName); + }) as typeof cockroachMaterializedView; + + public enum>( + enumName: string, + values: T | Writable, + ): CockroachEnum>; + + public enum>( + enumName: string, + enumObj: NonArray, + ): CockroachEnumObject; + + public enum(enumName: any, input: any): any { + return Array.isArray(input) + ? cockroachEnumWithSchema( + enumName, + [...input] as [string, ...string[]], + this.schemaName, + ) + : cockroachEnumObjectWithSchema(enumName, input, this.schemaName); + } + + sequence: typeof cockroachSequence = ((name, options) => { + return cockroachSequenceWithSchema(name, options, this.schemaName); + }); + + getSQL(): SQL { + return new SQL([sql.identifier(this.schemaName)]); + } + + shouldOmitSQLParens(): boolean { + return true; + } + + existing(): this { + this.isExisting = true; + return this; + } +} + +export function isCockroachSchema(obj: unknown): obj is CockroachSchema { + return is(obj, CockroachSchema); +} + +export function cockroachSchema(name: T) { + if (name === 'public') { + throw new Error( + `You can't specify 'public' as schema name. Postgres is using public schema by default. If you want to use 'public' schema, just use cockroachTable() instead of creating a schema`, + ); + } + + return new CockroachSchema(name); +} diff --git a/drizzle-orm/src/cockroach-core/sequence.ts b/drizzle-orm/src/cockroach-core/sequence.ts new file mode 100644 index 0000000000..b71b16596d --- /dev/null +++ b/drizzle-orm/src/cockroach-core/sequence.ts @@ -0,0 +1,40 @@ +import { entityKind, is } from '~/entity.ts'; + +export type CockroachSequenceOptions = { + increment?: number | string; + minValue?: number | string; + maxValue?: number | string; + startWith?: number | string; + cache?: number | string; +}; + +export class CockroachSequence { + static readonly [entityKind]: string = 'CockroachSequence'; + + constructor( + public readonly seqName: string, + public readonly seqOptions: CockroachSequenceOptions | undefined, + public readonly schema: string | undefined, + ) { + } +} + +export function cockroachSequence( + name: string, + options?: CockroachSequenceOptions, +): CockroachSequence { + return cockroachSequenceWithSchema(name, options, undefined); +} + +/** @internal */ +export function cockroachSequenceWithSchema( + name: string, + options?: CockroachSequenceOptions, + schema?: string, +): CockroachSequence { + return new CockroachSequence(name, options, schema); +} + +export function isCockroachSequence(obj: unknown): obj is CockroachSequence { + return is(obj, CockroachSequence); +} diff --git a/drizzle-orm/src/cockroach-core/session.ts b/drizzle-orm/src/cockroach-core/session.ts new file mode 100644 index 0000000000..c4edbe8fb6 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/session.ts @@ -0,0 +1,180 @@ +import type * as V1 from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import { TransactionRollbackError } from '~/errors.ts'; +import type { PreparedQuery } from '~/session.ts'; +import { type Query, type SQL, sql } from '~/sql/index.ts'; +import { tracer } from '~/tracing.ts'; +import type { NeonAuthToken } from '~/utils.ts'; +import { CockroachDatabase } from './db.ts'; +import type { CockroachDialect } from './dialect.ts'; +import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; + +export interface PreparedQueryConfig { + execute: unknown; + all: unknown; + values: unknown; +} + +export abstract class CockroachPreparedQuery implements PreparedQuery { + constructor(protected query: Query) {} + + protected authToken?: NeonAuthToken; + + getQuery(): Query { + return this.query; + } + + mapResult(response: unknown, _isFromBatch?: boolean): unknown { + return response; + } + + /** @internal */ + setToken(token?: NeonAuthToken) { + this.authToken = token; + return this; + } + + static readonly [entityKind]: string = 'CockroachPreparedQuery'; + + /** @internal */ + joinsNotNullableMap?: Record; + + abstract execute(placeholderValues?: Record): Promise; + /** @internal */ + abstract execute(placeholderValues?: Record, token?: NeonAuthToken): Promise; + /** @internal */ + abstract execute(placeholderValues?: Record, token?: NeonAuthToken): Promise; + + /** @internal */ + abstract all(placeholderValues?: Record): Promise; + + /** @internal */ + abstract isResponseInArrayMode(): boolean; +} + +export interface CockroachTransactionConfig { + isolationLevel?: 'read uncommitted' | 'read committed' | 'repeatable read' | 'serializable'; + accessMode?: 'read only' | 'read write'; + deferrable?: boolean; +} + +export abstract class CockroachSession< + TQueryResult extends CockroachQueryResultHKT = CockroachQueryResultHKT, + TFullSchema extends Record = Record, + TSchema extends V1.TablesRelationalConfig = Record, +> { + static readonly [entityKind]: string = 'CockroachSession'; + + constructor(protected dialect: CockroachDialect) {} + + abstract prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + name: string | undefined, + isResponseInArrayMode: boolean, + customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => T['execute'], + ): CockroachPreparedQuery; + + execute(query: SQL): Promise; + /** @internal */ + execute(query: SQL, token?: NeonAuthToken): Promise; + /** @internal */ + execute(query: SQL, token?: NeonAuthToken): Promise { + return tracer.startActiveSpan('drizzle.operation', () => { + const prepared = tracer.startActiveSpan('drizzle.prepareQuery', () => { + return this.prepareQuery( + this.dialect.sqlToQuery(query), + undefined, + undefined, + false, + ); + }); + + return prepared.setToken(token).execute(undefined, token); + }); + } + + all(query: SQL): Promise { + return this.prepareQuery( + this.dialect.sqlToQuery(query), + undefined, + undefined, + false, + ).all(); + } + + async count(sql: SQL): Promise; + /** @internal */ + async count(sql: SQL, token?: NeonAuthToken): Promise; + /** @internal */ + async count(sql: SQL, token?: NeonAuthToken): Promise { + const res = await this.execute<[{ count: string }]>(sql, token); + + return Number( + res[0]['count'], + ); + } + + abstract transaction( + transaction: (tx: CockroachTransaction) => Promise, + config?: CockroachTransactionConfig, + ): Promise; +} + +export abstract class CockroachTransaction< + TQueryResult extends CockroachQueryResultHKT, + TFullSchema extends Record = Record, + TSchema extends V1.TablesRelationalConfig = Record, +> extends CockroachDatabase { + static override readonly [entityKind]: string = 'CockroachTransaction'; + + constructor( + dialect: CockroachDialect, + session: CockroachSession, + protected schema: { + fullSchema: Record; + schema: TSchema; + tableNamesMap: Record; + } | undefined, + protected readonly nestedIndex = 0, + ) { + super(dialect, session, schema); + } + + rollback(): never { + throw new TransactionRollbackError(); + } + + /** @internal */ + getTransactionConfigSQL(config: CockroachTransactionConfig): SQL { + const chunks: string[] = []; + if (config.isolationLevel) { + chunks.push(`isolation level ${config.isolationLevel}`); + } + if (config.accessMode) { + chunks.push(config.accessMode); + } + if (typeof config.deferrable === 'boolean') { + chunks.push(config.deferrable ? 'deferrable' : 'not deferrable'); + } + return sql.raw(chunks.join(' ')); + } + + setTransaction(config: CockroachTransactionConfig): Promise { + return this.session.execute(sql`set transaction ${this.getTransactionConfigSQL(config)}`); + } + + abstract override transaction( + transaction: (tx: CockroachTransaction) => Promise, + ): Promise; +} + +export interface CockroachQueryResultHKT { + readonly $brand: 'CockroachQueryResultHKT'; + readonly row: unknown; + readonly type: unknown; +} + +export type CockroachQueryResultKind = (TKind & { + readonly row: TRow; +})['type']; diff --git a/drizzle-orm/src/cockroach-core/subquery.ts b/drizzle-orm/src/cockroach-core/subquery.ts new file mode 100644 index 0000000000..ca02f0c6f8 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/subquery.ts @@ -0,0 +1,29 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; + +export type SubqueryWithSelection = + & Subquery> + & AddAliasToSelection; + +export type WithSubqueryWithSelection = + & WithSubquery> + & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/cockroach-core/table.ts b/drizzle-orm/src/cockroach-core/table.ts new file mode 100644 index 0000000000..cd6cfbd32a --- /dev/null +++ b/drizzle-orm/src/cockroach-core/table.ts @@ -0,0 +1,228 @@ +import type { BuildColumns, BuildExtraConfigColumns, ColumnBuilderBase } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { + type InferTableColumnsModels, + Table, + type TableConfig as TableConfigBase, + type UpdateTableConfig, +} from '~/table.ts'; +import type { CheckBuilder } from './checks.ts'; +import { type CockroachColumnsBuilders, getCockroachColumnBuilders } from './columns/all.ts'; +import type { + CockroachColumn, + CockroachColumns, + CockroachColumnWithArrayBuilder, + ExtraConfigColumn, +} from './columns/common.ts'; +import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; +import type { AnyIndexBuilder } from './indexes.ts'; +import type { CockroachPolicy } from './policies.ts'; +import type { PrimaryKeyBuilder } from './primary-keys.ts'; +import type { UniqueConstraintBuilder } from './unique-constraint.ts'; + +export type CockroachTableExtraConfigValue = + | AnyIndexBuilder + | CheckBuilder + | ForeignKeyBuilder + | PrimaryKeyBuilder + | UniqueConstraintBuilder + | CockroachPolicy; + +export type CockroachTableExtraConfig = Record< + string, + CockroachTableExtraConfigValue +>; + +export type TableConfig = TableConfigBase; + +/** @internal */ +export const InlineForeignKeys = Symbol.for('drizzle:CockroachInlineForeignKeys'); +/** @internal */ +export const EnableRLS = Symbol.for('drizzle:EnableRLS'); + +export class CockroachTable extends Table { + static override readonly [entityKind]: string = 'CockroachTable'; + + /** @internal */ + static override readonly Symbol = Object.assign({}, Table.Symbol, { + InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, + EnableRLS: EnableRLS as typeof EnableRLS, + }); + + /**@internal */ + [InlineForeignKeys]: ForeignKey[] = []; + + /** @internal */ + [EnableRLS]: boolean = false; + + /** @internal */ + override [Table.Symbol.ExtraConfigBuilder]: + | ((self: Record) => CockroachTableExtraConfig) + | undefined = undefined; + + /** @internal */ + override [Table.Symbol.ExtraConfigColumns]: Record = {}; +} + +export type AnyCockroachTable = {}> = CockroachTable< + UpdateTableConfig +>; + +export type CockroachTableWithColumns = + & CockroachTable + & T['columns'] + & InferTableColumnsModels + & { + /** @deprecated use `cockroachTable.withRLS()` instead*/ + enableRLS: () => Omit< + CockroachTableWithColumns, + 'enableRLS' + >; + }; + +/** @internal */ +export function cockroachTableWithSchema< + TTableName extends string, + TSchemaName extends string | undefined, + TColumnsMap extends Record, +>( + name: TTableName, + columns: TColumnsMap | ((columnTypes: CockroachColumnsBuilders) => TColumnsMap), + extraConfig: + | (( + self: BuildExtraConfigColumns, + ) => CockroachTableExtraConfig | CockroachTableExtraConfigValue[]) + | undefined, + schema: TSchemaName, + baseName = name, +): CockroachTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'cockroach'; +}> { + const rawTable = new CockroachTable<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'cockroach'; + }>(name, schema, baseName); + + const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getCockroachColumnBuilders()) : columns; + + const builtColumns = Object.fromEntries( + Object.entries(parsedColumns).map(([name, colBuilderBase]) => { + const colBuilder = colBuilderBase as CockroachColumnWithArrayBuilder; + colBuilder.setName(name); + const column = colBuilder.build(rawTable); + rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); + return [name, column]; + }), + ) as unknown as BuildColumns; + + const builtColumnsForExtraConfig = Object.fromEntries( + Object.entries(parsedColumns).map(([name, colBuilderBase]) => { + const colBuilder = colBuilderBase as CockroachColumnWithArrayBuilder; + colBuilder.setName(name); + const column = colBuilder.buildExtraConfigColumn(rawTable); + return [name, column]; + }), + ) as unknown as BuildExtraConfigColumns; + + const table = Object.assign(rawTable, builtColumns); + + table[Table.Symbol.Columns] = builtColumns; + table[Table.Symbol.ExtraConfigColumns] = builtColumnsForExtraConfig; + + if (extraConfig) { + table[CockroachTable.Symbol.ExtraConfigBuilder] = extraConfig as any; + } + + return Object.assign(table, { + enableRLS: () => { + table[CockroachTable.Symbol.EnableRLS] = true; + return table as CockroachTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'cockroach'; + }>; + }, + }) as any; +} + +export interface CockroachTableFnInternal { + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: TColumnsMap, + extraConfig?: ( + self: BuildExtraConfigColumns, + ) => CockroachTableExtraConfigValue[], + ): CockroachTableWithColumns<{ + name: TTableName; + schema: TSchema; + columns: BuildColumns; + dialect: 'cockroach'; + }>; + + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: (columnTypes: CockroachColumnsBuilders) => TColumnsMap, + extraConfig?: ( + self: BuildExtraConfigColumns, + ) => CockroachTableExtraConfigValue[], + ): CockroachTableWithColumns<{ + name: TTableName; + schema: TSchema; + columns: BuildColumns; + dialect: 'cockroach'; + }>; +} + +export interface CockroachTableFn + extends CockroachTableFnInternal +{ + withRLS: CockroachTableFnInternal; +} + +const cockroachTableInternal: CockroachTableFnInternal = (name, columns, extraConfig) => { + return cockroachTableWithSchema(name, columns, extraConfig, undefined); +}; + +const cockroachTableWithRLS: CockroachTableFn['withRLS'] = (name, columns, extraConfig) => { + const table = cockroachTableWithSchema(name, columns, extraConfig, undefined); + table[EnableRLS] = true; + + return table; +}; + +export const cockroachTable: CockroachTableFn = Object.assign(cockroachTableInternal, { + withRLS: cockroachTableWithRLS, +}); + +export function cockroachTableCreator(customizeTableName: (name: string) => string): CockroachTableFn { + const fn: CockroachTableFnInternal = (name, columns, extraConfig) => { + return cockroachTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); + }; + + return Object.assign(fn, { + withRLS: ((name, columns, extraConfig) => { + const table = cockroachTableWithSchema( + customizeTableName(name) as typeof name, + columns, + extraConfig, + undefined, + name, + ); + table[EnableRLS] = true; + + return table; + }) as CockroachTableFnInternal, + }); +} diff --git a/drizzle-orm/src/cockroach-core/unique-constraint.ts b/drizzle-orm/src/cockroach-core/unique-constraint.ts new file mode 100644 index 0000000000..83140a0583 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/unique-constraint.ts @@ -0,0 +1,65 @@ +import { entityKind } from '~/entity.ts'; +import type { CockroachColumn } from './columns/index.ts'; +import type { CockroachTable } from './table.ts'; + +export function unique(name?: string): UniqueOnConstraintBuilder { + return new UniqueOnConstraintBuilder(name); +} + +export class UniqueConstraintBuilder { + static readonly [entityKind]: string = 'CockroachUniqueConstraintBuilder'; + + /** @internal */ + columns: CockroachColumn[]; + + constructor( + columns: CockroachColumn[], + private name?: string, + ) { + this.columns = columns; + } + + /** @internal */ + build(table: CockroachTable): UniqueConstraint { + return new UniqueConstraint(table, this.columns, this.name); + } +} + +export class UniqueOnConstraintBuilder { + static readonly [entityKind]: string = 'CockroachUniqueOnConstraintBuilder'; + + /** @internal */ + name?: string; + + constructor( + name?: string, + ) { + this.name = name; + } + + on(...columns: [CockroachColumn, ...CockroachColumn[]]) { + return new UniqueConstraintBuilder(columns, this.name); + } +} + +export class UniqueConstraint { + static readonly [entityKind]: string = 'CockroachUniqueConstraint'; + + readonly columns: CockroachColumn[]; + readonly name?: string; + readonly isNameExplicit: boolean; + + constructor( + readonly table: CockroachTable, + columns: CockroachColumn[], + name?: string, + ) { + this.columns = columns; + this.name = name; + this.isNameExplicit = !!name; + } + + getName(): string | undefined { + return this.name; + } +} diff --git a/drizzle-orm/src/cockroach-core/utils.ts b/drizzle-orm/src/cockroach-core/utils.ts new file mode 100644 index 0000000000..2d6d137cd3 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/utils.ts @@ -0,0 +1,86 @@ +import { CockroachTable } from '~/cockroach-core/table.ts'; +import { is } from '~/entity.ts'; +import { Table } from '~/table.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import { type Check, CheckBuilder } from './checks.ts'; +import type { AnyCockroachColumn } from './columns/index.ts'; +import { type ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; +import type { Index } from './indexes.ts'; +import { IndexBuilder } from './indexes.ts'; +import { CockroachPolicy } from './policies.ts'; +import { type PrimaryKey, PrimaryKeyBuilder } from './primary-keys.ts'; +import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; +import { type CockroachMaterializedView, CockroachMaterializedViewConfig, type CockroachView } from './view.ts'; + +export function getTableConfig(table: TTable) { + const columns = Object.values(table[Table.Symbol.Columns]); + const indexes: Index[] = []; + const checks: Check[] = []; + const primaryKeys: PrimaryKey[] = []; + const foreignKeys: ForeignKey[] = Object.values(table[CockroachTable.Symbol.InlineForeignKeys]); + const uniqueConstraints: UniqueConstraint[] = []; + const name = table[Table.Symbol.Name]; + const schema = table[Table.Symbol.Schema]; + const policies: CockroachPolicy[] = []; + const enableRLS: boolean = table[CockroachTable.Symbol.EnableRLS]; + + const extraConfigBuilder = table[CockroachTable.Symbol.ExtraConfigBuilder]; + + if (extraConfigBuilder !== undefined) { + const extraConfig = extraConfigBuilder(table[Table.Symbol.ExtraConfigColumns]); + const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); + for (const builder of extraValues) { + if (is(builder, IndexBuilder)) { + indexes.push(builder.build(table)); + } else if (is(builder, CheckBuilder)) { + checks.push(builder.build(table)); + } else if (is(builder, UniqueConstraintBuilder)) { + uniqueConstraints.push(builder.build(table)); + } else if (is(builder, PrimaryKeyBuilder)) { + primaryKeys.push(builder.build(table)); + } else if (is(builder, ForeignKeyBuilder)) { + foreignKeys.push(builder.build(table)); + } else if (is(builder, CockroachPolicy)) { + policies.push(builder); + } + } + } + + return { + columns, + indexes, + foreignKeys, + checks, + primaryKeys, + uniqueConstraints, + name, + schema, + policies, + enableRLS, + }; +} + +export function getViewConfig< + TName extends string = string, + TExisting extends boolean = boolean, +>(view: CockroachView) { + return { + ...view[ViewBaseConfig], + }; +} + +export function getMaterializedViewConfig< + TName extends string = string, + TExisting extends boolean = boolean, +>(view: CockroachMaterializedView) { + return { + ...view[ViewBaseConfig], + ...view[CockroachMaterializedViewConfig], + }; +} + +export type ColumnsWithTable< + TTableName extends string, + TForeignTableName extends string, + TColumns extends AnyCockroachColumn<{ tableName: TTableName }>[], +> = { [Key in keyof TColumns]: AnyCockroachColumn<{ tableName: TForeignTableName }> }; diff --git a/drizzle-orm/src/cockroach-core/utils/array.ts b/drizzle-orm/src/cockroach-core/utils/array.ts new file mode 100644 index 0000000000..14bb7819fa --- /dev/null +++ b/drizzle-orm/src/cockroach-core/utils/array.ts @@ -0,0 +1,95 @@ +function parseCockroachArrayValue(arrayString: string, startFrom: number, inQuotes: boolean): [string, number] { + for (let i = startFrom; i < arrayString.length; i++) { + const char = arrayString[i]; + + if (char === '\\') { + i++; + continue; + } + + if (char === '"') { + return [arrayString.slice(startFrom, i).replace(/\\/g, ''), i + 1]; + } + + if (inQuotes) { + continue; + } + + if (char === ',' || char === '}') { + return [arrayString.slice(startFrom, i).replace(/\\/g, ''), i]; + } + } + + return [arrayString.slice(startFrom).replace(/\\/g, ''), arrayString.length]; +} + +export function parseCockroachNestedArray(arrayString: string, startFrom = 0): [any[], number] { + const result: any[] = []; + let i = startFrom; + let lastCharIsComma = false; + + while (i < arrayString.length) { + const char = arrayString[i]; + + if (char === ',') { + if (lastCharIsComma || i === startFrom) { + result.push(''); + } + lastCharIsComma = true; + i++; + continue; + } + + lastCharIsComma = false; + + if (char === '\\') { + i += 2; + continue; + } + + if (char === '"') { + const [value, startFrom] = parseCockroachArrayValue(arrayString, i + 1, true); + result.push(value); + i = startFrom; + continue; + } + + if (char === '}') { + return [result, i + 1]; + } + + if (char === '{') { + const [value, startFrom] = parseCockroachNestedArray(arrayString, i + 1); + result.push(value); + i = startFrom; + continue; + } + + const [value, newStartFrom] = parseCockroachArrayValue(arrayString, i, false); + result.push(value); + i = newStartFrom; + } + + return [result, i]; +} + +export function parseCockroachArray(arrayString: string): any[] { + const [result] = parseCockroachNestedArray(arrayString, 1); + return result; +} + +export function makeCockroachArray(array: any[]): string { + return `{${ + array.map((item) => { + if (Array.isArray(item)) { + return makeCockroachArray(item); + } + + if (typeof item === 'string') { + return `"${item.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`; + } + + return `${item}`; + }).join(',') + }}`; +} diff --git a/drizzle-orm/src/cockroach-core/utils/index.ts b/drizzle-orm/src/cockroach-core/utils/index.ts new file mode 100644 index 0000000000..76eb91d0b0 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/utils/index.ts @@ -0,0 +1 @@ +export * from './array.ts'; diff --git a/drizzle-orm/src/cockroach-core/view-base.ts b/drizzle-orm/src/cockroach-core/view-base.ts new file mode 100644 index 0000000000..0a953085dd --- /dev/null +++ b/drizzle-orm/src/cockroach-core/view-base.ts @@ -0,0 +1,14 @@ +import { entityKind } from '~/entity.ts'; +import { type ColumnsSelection, View } from '~/sql/sql.ts'; + +export abstract class CockroachViewBase< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends View { + static override readonly [entityKind]: string = 'CockroachViewBase'; + + declare readonly _: View['_'] & { + readonly viewBrand: 'CockroachViewBase'; + }; +} diff --git a/drizzle-orm/src/cockroach-core/view.ts b/drizzle-orm/src/cockroach-core/view.ts new file mode 100644 index 0000000000..e9828dfee9 --- /dev/null +++ b/drizzle-orm/src/cockroach-core/view.ts @@ -0,0 +1,360 @@ +import type { BuildColumns, ColumnBuilderBase } from '~/column-builder.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import { getTableColumns } from '~/utils.ts'; +import type { CockroachColumn } from './columns/common.ts'; +import { QueryBuilder } from './query-builders/query-builder.ts'; +import { cockroachTable } from './table.ts'; +import { CockroachViewBase } from './view-base.ts'; + +export class DefaultViewBuilderCore { + static readonly [entityKind]: string = 'CockroachDefaultViewBuilderCore'; + + declare readonly _: { + readonly name: TConfig['name']; + readonly columns: TConfig['columns']; + }; + + constructor( + protected name: TConfig['name'], + protected schema: string | undefined, + ) {} +} + +export class ViewBuilder extends DefaultViewBuilderCore<{ name: TName }> { + static override readonly [entityKind]: string = 'CockroachViewBuilder'; + + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): CockroachViewWithSelection> { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + const selectionProxy = new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }); + const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); + return new Proxy( + new CockroachView({ + config: { + name: this.name, + schema: this.schema, + selectedFields: aliasedSelection, + query: qb.getSQL().inlineParams(), + }, + }), + selectionProxy as any, + ) as CockroachViewWithSelection>; + } +} + +export class ManualViewBuilder< + TName extends string = string, + TColumns extends Record = Record, +> extends DefaultViewBuilderCore<{ name: TName; columns: TColumns }> { + static override readonly [entityKind]: string = 'CockroachManualViewBuilder'; + + private columns: Record; + + constructor( + name: TName, + columns: TColumns, + schema: string | undefined, + ) { + super(name, schema); + this.columns = getTableColumns(cockroachTable(name, columns)); + } + + existing(): CockroachViewWithSelection> { + return new Proxy( + new CockroachView({ + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: undefined, + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as CockroachViewWithSelection>; + } + + as(query: SQL): CockroachViewWithSelection> { + return new Proxy( + new CockroachView({ + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: query.inlineParams(), + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as CockroachViewWithSelection>; + } +} + +export class MaterializedViewBuilderCore { + static readonly [entityKind]: string = 'CockroachMaterializedViewBuilderCore'; + + declare _: { + readonly name: TConfig['name']; + readonly columns: TConfig['columns']; + }; + + constructor( + protected name: TConfig['name'], + protected schema: string | undefined, + ) {} + + protected config: { + withNoData?: boolean; + } = {}; + + withNoData(): this { + this.config.withNoData = true; + return this; + } +} + +export class MaterializedViewBuilder + extends MaterializedViewBuilderCore<{ name: TName }> +{ + static override readonly [entityKind]: string = 'CockroachMaterializedViewBuilder'; + + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): CockroachMaterializedViewWithSelection< + TName, + false, + AddAliasToSelection + > { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + const selectionProxy = new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }); + const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); + return new Proxy( + new CockroachMaterializedView({ + cockroachConfig: { + withNoData: this.config.withNoData, + }, + config: { + name: this.name, + schema: this.schema, + selectedFields: aliasedSelection, + query: qb.getSQL().inlineParams(), + }, + }), + selectionProxy as any, + ) as CockroachMaterializedViewWithSelection< + TName, + false, + AddAliasToSelection + >; + } +} + +export class ManualMaterializedViewBuilder< + TName extends string = string, + TColumns extends Record = Record, +> extends MaterializedViewBuilderCore<{ name: TName; columns: TColumns }> { + static override readonly [entityKind]: string = 'CockroachManualMaterializedViewBuilder'; + + private columns: Record; + + constructor( + name: TName, + columns: TColumns, + schema: string | undefined, + ) { + super(name, schema); + this.columns = getTableColumns(cockroachTable(name, columns)); + } + + existing(): CockroachMaterializedViewWithSelection> { + return new Proxy( + new CockroachMaterializedView({ + cockroachConfig: { + withNoData: this.config.withNoData, + }, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: undefined, + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as CockroachMaterializedViewWithSelection>; + } + + as(query: SQL): CockroachMaterializedViewWithSelection> { + return new Proxy( + new CockroachMaterializedView({ + cockroachConfig: { + withNoData: this.config.withNoData, + }, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: query.inlineParams(), + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as CockroachMaterializedViewWithSelection>; + } +} + +export class CockroachView< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends CockroachViewBase { + static override readonly [entityKind]: string = 'CockroachView'; + + constructor({ config }: { + config: { + name: TName; + schema: string | undefined; + selectedFields: ColumnsSelection; + query: SQL | undefined; + }; + }) { + super(config); + } +} + +export type CockroachViewWithSelection< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> = CockroachView & TSelectedFields; + +export const CockroachMaterializedViewConfig = Symbol.for('drizzle:CockroachMaterializedViewConfig'); + +export class CockroachMaterializedView< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends CockroachViewBase { + static override readonly [entityKind]: string = 'CockroachMaterializedView'; + + readonly [CockroachMaterializedViewConfig]: { + readonly withNoData?: boolean; + } | undefined; + + constructor({ cockroachConfig, config }: { + cockroachConfig: { + withNoData: boolean | undefined; + } | undefined; + config: { + name: TName; + schema: string | undefined; + selectedFields: ColumnsSelection; + query: SQL | undefined; + }; + }) { + super(config); + this[CockroachMaterializedViewConfig] = { + withNoData: cockroachConfig?.withNoData, + }; + } +} + +export type CockroachMaterializedViewWithSelection< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> = CockroachMaterializedView & TSelectedFields; + +/** @internal */ +export function cockroachViewWithSchema( + name: string, + selection: Record | undefined, + schema: string | undefined, +): ViewBuilder | ManualViewBuilder { + if (selection) { + return new ManualViewBuilder(name, selection, schema); + } + return new ViewBuilder(name, schema); +} + +/** @internal */ +export function cockroachMaterializedViewWithSchema( + name: string, + selection: Record | undefined, + schema: string | undefined, +): MaterializedViewBuilder | ManualMaterializedViewBuilder { + if (selection) { + return new ManualMaterializedViewBuilder(name, selection, schema); + } + return new MaterializedViewBuilder(name, schema); +} + +export function cockroachView(name: TName): ViewBuilder; +export function cockroachView>( + name: TName, + columns: TColumns, +): ManualViewBuilder; +export function cockroachView( + name: string, + columns?: Record, +): ViewBuilder | ManualViewBuilder { + return cockroachViewWithSchema(name, columns, undefined); +} + +export function cockroachMaterializedView(name: TName): MaterializedViewBuilder; +export function cockroachMaterializedView< + TName extends string, + TColumns extends Record, +>( + name: TName, + columns: TColumns, +): ManualMaterializedViewBuilder; +export function cockroachMaterializedView( + name: string, + columns?: Record, +): MaterializedViewBuilder | ManualMaterializedViewBuilder { + return cockroachMaterializedViewWithSchema(name, columns, undefined); +} + +export function isCockroachView(obj: unknown): obj is CockroachView { + return is(obj, CockroachView); +} + +export function isCockroachMaterializedView(obj: unknown): obj is CockroachMaterializedView { + return is(obj, CockroachMaterializedView); +} diff --git a/drizzle-orm/src/cockroach/driver.ts b/drizzle-orm/src/cockroach/driver.ts new file mode 100644 index 0000000000..03c216e653 --- /dev/null +++ b/drizzle-orm/src/cockroach/driver.ts @@ -0,0 +1,134 @@ +import pg, { type Pool, type PoolConfig } from 'pg'; +import * as V1 from '~/_relations.ts'; +import { CockroachDatabase } from '~/cockroach-core/db.ts'; +import { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import type { NodeCockroachClient, NodeCockroachQueryResultHKT } from './session.ts'; +import { NodeCockroachSession } from './session.ts'; + +export interface CockroachDriverOptions { + logger?: Logger; +} + +export class NodeCockroachDriver { + static readonly [entityKind]: string = 'NodeCockroachDriver'; + + constructor( + private client: NodeCockroachClient, + private dialect: CockroachDialect, + private options: CockroachDriverOptions = {}, + ) { + } + + createSession( + schema: V1.RelationalSchemaConfig | undefined, + ): NodeCockroachSession, V1.TablesRelationalConfig> { + return new NodeCockroachSession(this.client, this.dialect, schema, { logger: this.options.logger }); + } +} + +export class NodeCockroachDatabase< + TSchema extends Record = Record, +> extends CockroachDatabase { + static override readonly [entityKind]: string = 'NodeCockroachDatabase'; +} + +function construct< + TSchema extends Record = Record, + TClient extends NodeCockroachClient = NodeCockroachClient, +>( + client: TClient, + config: DrizzleConfig = {}, +): NodeCockroachDatabase & { + $client: TClient; +} { + const dialect = new CockroachDialect({ casing: config.casing }); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + let schema: V1.RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = V1.extractTablesRelationalConfig( + config.schema, + V1.createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const driver = new NodeCockroachDriver(client, dialect, { logger }); + const session = driver.createSession(schema); + const db = new NodeCockroachDatabase(dialect, session, schema as any) as NodeCockroachDatabase; + ( db).$client = client; + + return db as any; +} + +export function drizzle< + TSchema extends Record = Record, + TClient extends NodeCockroachClient = Pool, +>( + ...params: + | [ + string, + ] + | [ + string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection: string | PoolConfig; + } | { + client: TClient; + }) + ), + ] +): NodeCockroachDatabase & { + $client: TClient; +} { + if (typeof params[0] === 'string') { + const instance = new pg.Pool({ + connectionString: params[0], + }); + + return construct(instance, params[1] as DrizzleConfig | undefined) as any; + } + + const { connection, client, ...drizzleConfig } = params[0] as ( + & ({ connection?: PoolConfig | string; client?: TClient }) + & DrizzleConfig + ); + + if (client) return construct(client, drizzleConfig); + + const instance = typeof connection === 'string' + ? new pg.Pool({ + connectionString: connection, + }) + : new pg.Pool(connection!); + + return construct(instance, drizzleConfig) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): NodeCockroachDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/cockroach/index.ts b/drizzle-orm/src/cockroach/index.ts new file mode 100644 index 0000000000..b1b6a52e71 --- /dev/null +++ b/drizzle-orm/src/cockroach/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/cockroach/migrator.ts b/drizzle-orm/src/cockroach/migrator.ts new file mode 100644 index 0000000000..d19aaf8f0c --- /dev/null +++ b/drizzle-orm/src/cockroach/migrator.ts @@ -0,0 +1,11 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import type { NodeCockroachDatabase } from './driver.ts'; + +export async function migrate>( + db: NodeCockroachDatabase, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + return await db.dialect.migrate(migrations, db.session, config); +} diff --git a/drizzle-orm/src/cockroach/session.ts b/drizzle-orm/src/cockroach/session.ts new file mode 100644 index 0000000000..9cbd1d2e49 --- /dev/null +++ b/drizzle-orm/src/cockroach/session.ts @@ -0,0 +1,288 @@ +import type { Client, PoolClient, QueryArrayConfig, QueryConfig, QueryResult, QueryResultRow } from 'pg'; +import pg from 'pg'; +import type * as V1 from '~/_relations.ts'; +import type { CockroachDialect } from '~/cockroach-core/dialect.ts'; +import { CockroachTransaction } from '~/cockroach-core/index.ts'; +import type { SelectedFieldsOrdered } from '~/cockroach-core/query-builders/select.types.ts'; +import type { + CockroachQueryResultHKT, + CockroachTransactionConfig, + PreparedQueryConfig, +} from '~/cockroach-core/session.ts'; +import { CockroachPreparedQuery, CockroachSession } from '~/cockroach-core/session.ts'; +import { entityKind } from '~/entity.ts'; +import { type Logger, NoopLogger } from '~/logger.ts'; +import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; +import { tracer } from '~/tracing.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; + +const { Pool, types } = pg; + +export type NodeCockroachClient = pg.Pool | PoolClient | Client; + +export class NodeCockroachPreparedQuery extends CockroachPreparedQuery { + static override readonly [entityKind]: string = 'NodeCockroachPreparedQuery'; + + private rawQueryConfig: QueryConfig; + private queryConfig: QueryArrayConfig; + + constructor( + private client: NodeCockroachClient, + queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + name: string | undefined, + private _isResponseInArrayMode: boolean, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + ) { + super({ sql: queryString, params }); + this.rawQueryConfig = { + name, + text: queryString, + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // numeric[] + if (typeId as number === 1231) { + return (val: any) => val; + } + // timestamp[] + if (typeId as number === 1115) { + return (val: any) => val; + } + // timestamp with timezone[] + if (typeId as number === 1185) { + return (val: any) => val; + } + // interval[] + if (typeId as number === 1187) { + return (val: any) => val; + } + // date[] + if (typeId as number === 1182) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, + }; + this.queryConfig = { + name, + text: queryString, + rowMode: 'array', + types: { + // @ts-ignore + getTypeParser: (typeId, format) => { + if (typeId === types.builtins.TIMESTAMPTZ) { + return (val: any) => val; + } + if (typeId === types.builtins.TIMESTAMP) { + return (val: any) => val; + } + if (typeId === types.builtins.DATE) { + return (val: any) => val; + } + if (typeId === types.builtins.INTERVAL) { + return (val: any) => val; + } + // numeric[] + if (typeId as number === 1231) { + return (val: any) => val; + } + // timestamp[] + if (typeId as number === 1115) { + return (val: any) => val; + } + // timestamp with timezone[] + if (typeId as number === 1185) { + return (val: any) => val; + } + // interval[] + if (typeId as number === 1187) { + return (val: any) => val; + } + // date[] + if (typeId as number === 1182) { + return (val: any) => val; + } + // @ts-ignore + return types.getTypeParser(typeId, format); + }, + }, + }; + } + + async execute(placeholderValues: Record | undefined = {}): Promise { + return tracer.startActiveSpan('drizzle.execute', async () => { + const params = fillPlaceholders(this.params, placeholderValues); + + this.logger.logQuery(this.rawQueryConfig.text, params); + + const { fields, rawQueryConfig: rawQuery, client, queryConfig: query, joinsNotNullableMap, customResultMapper } = + this; + if (!fields && !customResultMapper) { + return tracer.startActiveSpan('drizzle.driver.execute', async (span) => { + span?.setAttributes({ + 'drizzle.query.name': rawQuery.name, + 'drizzle.query.text': rawQuery.text, + 'drizzle.query.params': JSON.stringify(params), + }); + return client.query(rawQuery, params); + }); + } + + const result = await tracer.startActiveSpan('drizzle.driver.execute', (span) => { + span?.setAttributes({ + 'drizzle.query.name': query.name, + 'drizzle.query.text': query.text, + 'drizzle.query.params': JSON.stringify(params), + }); + return client.query(query, params); + }); + + return tracer.startActiveSpan('drizzle.mapResponse', () => { + return customResultMapper + ? customResultMapper(result.rows) + : result.rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + }); + }); + } + + all(placeholderValues: Record | undefined = {}): Promise { + return tracer.startActiveSpan('drizzle.execute', () => { + const params = fillPlaceholders(this.params, placeholderValues); + this.logger.logQuery(this.rawQueryConfig.text, params); + return tracer.startActiveSpan('drizzle.driver.execute', (span) => { + span?.setAttributes({ + 'drizzle.query.name': this.rawQueryConfig.name, + 'drizzle.query.text': this.rawQueryConfig.text, + 'drizzle.query.params': JSON.stringify(params), + }); + return this.client.query(this.rawQueryConfig, params).then((result) => result.rows); + }); + }); + } + + /** @internal */ + isResponseInArrayMode(): boolean { + return this._isResponseInArrayMode; + } +} + +export interface NodeCockroachSessionOptions { + logger?: Logger; +} + +export class NodeCockroachSession< + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, +> extends CockroachSession { + static override readonly [entityKind]: string = 'NodeCockroachSession'; + + private logger: Logger; + + constructor( + private client: NodeCockroachClient, + dialect: CockroachDialect, + private schema: V1.RelationalSchemaConfig | undefined, + private options: NodeCockroachSessionOptions = {}, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + name: string | undefined, + isResponseInArrayMode: boolean, + customResultMapper?: (rows: unknown[][]) => T['execute'], + ): CockroachPreparedQuery { + return new NodeCockroachPreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + name, + isResponseInArrayMode, + customResultMapper, + ); + } + + override async transaction( + transaction: (tx: NodeCockroachTransaction) => Promise, + config?: CockroachTransactionConfig | undefined, + ): Promise { + const session = this.client instanceof Pool // oxlint-disable-line drizzle-internal/no-instanceof + ? new NodeCockroachSession(await this.client.connect(), this.dialect, this.schema, this.options) + : this; + const tx = new NodeCockroachTransaction(this.dialect, session, this.schema); + await tx.execute(sql`begin${config ? sql` ${tx.getTransactionConfigSQL(config)}` : undefined}`); + try { + const result = await transaction(tx); + await tx.execute(sql`commit`); + return result; + } catch (error) { + await tx.execute(sql`rollback`); + throw error; + } finally { + if (this.client instanceof Pool) { // oxlint-disable-line drizzle-internal/no-instanceof + (session.client as PoolClient).release(); + } + } + } + + override async count(sql: SQL): Promise { + const res = await this.execute<{ rows: [{ count: string }] }>(sql); + return Number( + res['rows'][0]['count'], + ); + } +} + +export class NodeCockroachTransaction< + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, +> extends CockroachTransaction { + static override readonly [entityKind]: string = 'NodeCockroachTransaction'; + + override async transaction( + transaction: (tx: NodeCockroachTransaction) => Promise, + ): Promise { + const savepointName = `sp${this.nestedIndex + 1}`; + const tx = new NodeCockroachTransaction( + this.dialect, + this.session, + this.schema, + this.nestedIndex + 1, + ); + await tx.execute(sql.raw(`savepoint ${savepointName}`)); + try { + const result = await transaction(tx); + await tx.execute(sql.raw(`release savepoint ${savepointName}`)); + return result; + } catch (err) { + await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); + throw err; + } + } +} + +export interface NodeCockroachQueryResultHKT extends CockroachQueryResultHKT { + type: QueryResult>; +} diff --git a/drizzle-orm/src/column-builder.ts b/drizzle-orm/src/column-builder.ts index 9c31e1e152..14853519b7 100644 --- a/drizzle-orm/src/column-builder.ts +++ b/drizzle-orm/src/column-builder.ts @@ -1,6 +1,8 @@ import { entityKind } from '~/entity.ts'; +import type { CockroachColumn, ExtraConfigColumn as CockroachExtraConfigColumn } from './cockroach-core/index.ts'; import type { Column, ColumnBaseConfig } from './column.ts'; import type { GelColumn, GelExtraConfigColumn } from './gel-core/index.ts'; +import type { MsSqlColumn } from './mssql-core/index.ts'; import type { MySqlColumn } from './mysql-core/index.ts'; import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/index.ts'; import type { SingleStoreColumn } from './singlestore-core/index.ts'; @@ -62,7 +64,6 @@ export type ColumnDataObjectConstraint = | 'relDuration'; export type ColumnDataStringConstraint = - | 'text' | 'binary' | 'cidr' | 'date' @@ -117,9 +118,15 @@ export function extractExtendedColumnType( return { type, constraint } as any; } -export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'common' | 'gel'; +export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'mssql' | 'common' | 'gel' | 'cockroach'; -export type GeneratedStorageMode = 'virtual' | 'stored'; +// TODO update description +// 'virtual' | 'stored' for postgres +// 'stored' for mysql +// 'virtual' | 'persisted' for mssql +// We should remove this option from common Column and store it per dialect common +// Was discussed with Andrew +export type GeneratedStorageMode = 'virtual' | 'stored' | 'persisted'; export type GeneratedType = 'always' | 'byDefault'; @@ -136,7 +143,6 @@ export interface GeneratedIdentityConfig { } export interface ColumnBuilderBaseConfig { - name: string; dataType: TDataType; data: unknown; driverParam: unknown; @@ -229,7 +235,7 @@ export type $Type = T & { }; }; -export type HasGenerated = T & { +export type HasGenerated = T & { _: { hasDefault: true; generated: TGenerated; @@ -392,15 +398,18 @@ export type BuildColumn< TBuiltConfig extends ColumnBaseConfig = MakeColumnConfig, > = TDialect extends 'pg' ? PgColumn : TDialect extends 'mysql' ? MySqlColumn + : TDialect extends 'mssql' ? MsSqlColumn : TDialect extends 'sqlite' ? SQLiteColumn - : TDialect extends 'common' ? Column : TDialect extends 'singlestore' ? SingleStoreColumn : TDialect extends 'gel' ? GelColumn + : TDialect extends 'cockroach' ? CockroachColumn + : TDialect extends 'common' ? Column : never; export type BuildIndexColumn< TDialect extends Dialect, > = TDialect extends 'pg' ? ExtraConfigColumn + : TDialect extends 'cockroach' ? CockroachExtraConfigColumn : TDialect extends 'gel' ? GelExtraConfigColumn : never; @@ -444,4 +453,6 @@ export type ChangeColumnTableName< : TDialect extends 'singlestore' ? SingleStoreColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : TDialect extends 'gel' ? GelColumn> + : TDialect extends 'mssql' ? MsSqlColumn> + : TDialect extends 'cockroach' ? CockroachColumn> : never; diff --git a/drizzle-orm/src/column-common.ts b/drizzle-orm/src/column-common.ts new file mode 100644 index 0000000000..f3ac7c5bb1 --- /dev/null +++ b/drizzle-orm/src/column-common.ts @@ -0,0 +1 @@ +export const OriginalColumn = Symbol.for('drizzle:OriginalColumn'); diff --git a/drizzle-orm/src/column.ts b/drizzle-orm/src/column.ts index 009f942372..b2b85eb476 100644 --- a/drizzle-orm/src/column.ts +++ b/drizzle-orm/src/column.ts @@ -4,6 +4,7 @@ import type { GeneratedColumnConfig, GeneratedIdentityConfig, } from './column-builder.ts'; +import { OriginalColumn } from './column-common.ts'; import { entityKind } from './entity.ts'; import type { DriverValueMapper, SQL, SQLWrapper } from './sql/sql.ts'; import type { Table } from './table.ts'; @@ -31,7 +32,10 @@ export interface Column< TRuntimeConfig extends object = object, > extends DriverValueMapper, SQLWrapper { // SQLWrapper runtime implementation is defined in 'sql/sql.ts' + // `as` runtime implementation is defined in 'alias.ts' + as(alias: string): this; } + /* `Column` only accepts a full `ColumnConfig` as its generic. To infer parts of the config, use `AnyColumn` that accepts a partial config. @@ -65,6 +69,7 @@ export abstract class Column< readonly generatedIdentity: GeneratedIdentityConfig | undefined = undefined; readonly length: number | undefined; readonly isLengthExact: boolean | undefined; + readonly isAlias: boolean; /** @internal */ protected config: ColumnBuilderRuntimeConfig & TRuntimeConfig; @@ -84,6 +89,7 @@ export abstract class Column< this.table = table; this.name = config.name; + this.isAlias = false; this.keyAsName = config.keyAsName; this.notNull = config.notNull; this.default = config.default; @@ -116,6 +122,11 @@ export abstract class Column< shouldDisableInsert(): boolean { return this.config.generated !== undefined && this.config.generated.type !== 'byDefault'; } + + /** @internal */ + [OriginalColumn](): this { + return this; + } } export type UpdateColConfig< diff --git a/drizzle-orm/src/durable-sqlite/migrator.ts b/drizzle-orm/src/durable-sqlite/migrator.ts index a6150a67ae..199f6ba77f 100644 --- a/drizzle-orm/src/durable-sqlite/migrator.ts +++ b/drizzle-orm/src/durable-sqlite/migrator.ts @@ -1,25 +1,23 @@ -import type { MigrationMeta, MigratorInitFailResponse } from '~/migrator.ts'; +import { formatToMillis, type MigrationMeta, type MigratorInitFailResponse } from '~/migrator.ts'; import type { AnyRelations } from '~/relations.ts'; import { sql } from '~/sql/index.ts'; import type { DrizzleSqliteDODatabase } from './driver.ts'; interface MigrationConfig { - journal: { - entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; - }; migrations: Record; /** @internal */ init?: boolean; } -function readMigrationFiles({ journal, migrations }: MigrationConfig): MigrationMeta[] { +function readMigrationFiles({ migrations }: MigrationConfig): MigrationMeta[] { const migrationQueries: MigrationMeta[] = []; - for (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const sortedMigrations = Object.keys(migrations).sort(); + for (const key of sortedMigrations) { + const query = migrations[key]; if (!query) { - throw new Error(`Missing migration: ${journalEntry.tag}`); + throw new Error(`Missing migration: ${key}`); } try { @@ -27,14 +25,16 @@ function readMigrationFiles({ journal, migrations }: MigrationConfig): Migration return it; }); + const migrationDate = formatToMillis(key.slice(0, 14)); + migrationQueries.push({ sql: result, - bps: journalEntry.breakpoints, - folderMillis: journalEntry.when, + bps: true, + folderMillis: migrationDate, hash: '', }); } catch { - throw new Error(`Failed to parse migration: ${journalEntry.tag}`); + throw new Error(`Failed to parse migration: ${key}`); } } diff --git a/drizzle-orm/src/durable-sqlite/session.ts b/drizzle-orm/src/durable-sqlite/session.ts index 1132ba3980..00fd4e04ed 100644 --- a/drizzle-orm/src/durable-sqlite/session.ts +++ b/drizzle-orm/src/durable-sqlite/session.ts @@ -9,10 +9,10 @@ import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select. import { type PreparedQueryConfig as PreparedQueryConfigBase, type SQLiteExecuteMethod, + SQLitePreparedQuery as PreparedQueryBase, SQLiteSession, type SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; -import { SQLitePreparedQuery as PreparedQueryBase } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface SQLiteDOSessionOptions { @@ -163,8 +163,11 @@ export class SQLiteDOPreparedQuery< const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); - // oxlint-disable-next-line no-unused-expressions - params.length > 0 ? this.client.sql.exec(this.query.sql, ...params) : this.client.sql.exec(this.query.sql); + if (params.length > 0) { + this.client.sql.exec(this.query.sql, ...params); + return; + } + this.client.sql.exec(this.query.sql); } all(placeholderValues?: Record): T['all'] { diff --git a/drizzle-orm/src/entity.ts b/drizzle-orm/src/entity.ts index 2b6dfb4def..f69bde28c0 100644 --- a/drizzle-orm/src/entity.ts +++ b/drizzle-orm/src/entity.ts @@ -14,7 +14,7 @@ export function is>(value: any, type: T): valu return false; } - if (value instanceof type) { // eslint-disable-line no-instanceof/no-instanceof + if (value instanceof type) { // oxlint-disable-line drizzle-internal/no-instanceof return true; } diff --git a/drizzle-orm/src/expo-sqlite/migrator.ts b/drizzle-orm/src/expo-sqlite/migrator.ts index 47335688b9..a8b79a6c55 100644 --- a/drizzle-orm/src/expo-sqlite/migrator.ts +++ b/drizzle-orm/src/expo-sqlite/migrator.ts @@ -1,23 +1,21 @@ import { useEffect, useReducer } from 'react'; -import type { MigrationMeta } from '~/migrator.ts'; +import { formatToMillis, type MigrationMeta } from '~/migrator.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import type { ExpoSQLiteDatabase } from './driver.ts'; interface MigrationConfig { - journal: { - entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; - }; migrations: Record; } -async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { +async function readMigrationFiles({ migrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; - for await (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const sortedMigrations = Object.keys(migrations).sort(); + for (const key of sortedMigrations) { + const query = migrations[key]; if (!query) { - throw new Error(`Missing migration: ${journalEntry.tag}`); + throw new Error(`Missing migration: ${key}`); } try { @@ -25,14 +23,16 @@ async function readMigrationFiles({ journal, migrations }: MigrationConfig): Pro return it; }); + const migrationDate = formatToMillis(key.slice(0, 14)); + migrationQueries.push({ sql: result, - bps: journalEntry.breakpoints, - folderMillis: journalEntry.when, + bps: true, + folderMillis: migrationDate, hash: '', }); } catch { - throw new Error(`Failed to parse migration: ${journalEntry.tag}`); + throw new Error(`Failed to parse migration: ${key}`); } } diff --git a/drizzle-orm/src/gel-core/columns/bigint.ts b/drizzle-orm/src/gel-core/columns/bigint.ts index 252de05f68..d0921b099f 100644 --- a/drizzle-orm/src/gel-core/columns/bigint.ts +++ b/drizzle-orm/src/gel-core/columns/bigint.ts @@ -5,7 +5,6 @@ import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export class GelInt53Builder extends GelIntColumnBaseBuilder<{ - name: string; dataType: 'number int53'; data: number; driverParam: number; diff --git a/drizzle-orm/src/gel-core/columns/bigintT.ts b/drizzle-orm/src/gel-core/columns/bigintT.ts index d50737c55c..0fdcf6b86a 100644 --- a/drizzle-orm/src/gel-core/columns/bigintT.ts +++ b/drizzle-orm/src/gel-core/columns/bigintT.ts @@ -5,7 +5,6 @@ import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export class GelBigInt64Builder extends GelIntColumnBaseBuilder<{ - name: string; dataType: 'bigint int64'; data: bigint; driverParam: bigint; diff --git a/drizzle-orm/src/gel-core/columns/boolean.ts b/drizzle-orm/src/gel-core/columns/boolean.ts index be22a7486f..05ee2740e8 100644 --- a/drizzle-orm/src/gel-core/columns/boolean.ts +++ b/drizzle-orm/src/gel-core/columns/boolean.ts @@ -4,7 +4,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelBooleanBuilder extends GelColumnBuilder<{ - name: string; dataType: 'boolean'; data: boolean; driverParam: boolean; diff --git a/drizzle-orm/src/gel-core/columns/bytes.ts b/drizzle-orm/src/gel-core/columns/bytes.ts index ece174185d..f01c36fc66 100644 --- a/drizzle-orm/src/gel-core/columns/bytes.ts +++ b/drizzle-orm/src/gel-core/columns/bytes.ts @@ -4,7 +4,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelBytesBuilder extends GelColumnBuilder<{ - name: string; dataType: 'object buffer'; data: Uint8Array; driverParam: Uint8Array | Buffer; diff --git a/drizzle-orm/src/gel-core/columns/common.ts b/drizzle-orm/src/gel-core/columns/common.ts index eda0180861..82a9aad7c1 100644 --- a/drizzle-orm/src/gel-core/columns/common.ts +++ b/drizzle-orm/src/gel-core/columns/common.ts @@ -17,7 +17,6 @@ import type { AnyGelTable, GelTable } from '~/gel-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { iife } from '~/tracing-utils.ts'; import type { GelIndexOpClass } from '../indexes.ts'; -import { uniqueKeyName } from '../unique-constraint.ts'; export type GelColumns = Record>; @@ -132,9 +131,6 @@ export abstract class GelColumn< table: GelTable, config: ColumnBuilderRuntimeConfig & TRuntimeConfig, ) { - if (!config.uniqueName) { - config.uniqueName = uniqueKeyName(table, [config.name]); - } super(table, config); this.table = table; } @@ -268,7 +264,7 @@ export class GelArrayBuilder< }, {} > { - static override readonly [entityKind] = 'GelArrayBuilder'; + static override readonly [entityKind]: string = 'GelArrayBuilder'; constructor( name: string, diff --git a/drizzle-orm/src/gel-core/columns/custom.ts b/drizzle-orm/src/gel-core/columns/custom.ts index dab5a34a30..8bebf8181c 100644 --- a/drizzle-orm/src/gel-core/columns/custom.ts +++ b/drizzle-orm/src/gel-core/columns/custom.ts @@ -8,7 +8,6 @@ import { GelColumn, GelColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { - name: string; dataType: 'custom'; data: T['data']; driverParam: T['driverData']; @@ -343,6 +342,6 @@ export function customType( b?: T['config'], ): GelCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); - return new GelCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); + return new GelCustomColumnBuilder(name, config, customTypeParams); }; } diff --git a/drizzle-orm/src/gel-core/columns/date-duration.ts b/drizzle-orm/src/gel-core/columns/date-duration.ts index dcd06cce86..76a84d998e 100644 --- a/drizzle-orm/src/gel-core/columns/date-duration.ts +++ b/drizzle-orm/src/gel-core/columns/date-duration.ts @@ -5,7 +5,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelDateDurationBuilder extends GelColumnBuilder<{ - name: string; dataType: 'object dateDuration'; data: DateDuration; driverParam: DateDuration; diff --git a/drizzle-orm/src/gel-core/columns/decimal.ts b/drizzle-orm/src/gel-core/columns/decimal.ts index 7c5de6daec..8b59fecef6 100644 --- a/drizzle-orm/src/gel-core/columns/decimal.ts +++ b/drizzle-orm/src/gel-core/columns/decimal.ts @@ -5,7 +5,6 @@ import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelDecimalBuilder extends GelColumnBuilder< { - name: string; dataType: 'string numeric'; data: string; driverParam: string; diff --git a/drizzle-orm/src/gel-core/columns/double-precision.ts b/drizzle-orm/src/gel-core/columns/double-precision.ts index 94aaccc12b..f108315853 100644 --- a/drizzle-orm/src/gel-core/columns/double-precision.ts +++ b/drizzle-orm/src/gel-core/columns/double-precision.ts @@ -4,7 +4,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelDoublePrecisionBuilder extends GelColumnBuilder<{ - name: string; dataType: 'number double'; data: number; driverParam: number; diff --git a/drizzle-orm/src/gel-core/columns/duration.ts b/drizzle-orm/src/gel-core/columns/duration.ts index b2fe33d6d7..3e623c4318 100644 --- a/drizzle-orm/src/gel-core/columns/duration.ts +++ b/drizzle-orm/src/gel-core/columns/duration.ts @@ -5,7 +5,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelDurationBuilder extends GelColumnBuilder<{ - name: string; dataType: 'object duration'; data: Duration; driverParam: Duration; diff --git a/drizzle-orm/src/gel-core/columns/integer.ts b/drizzle-orm/src/gel-core/columns/integer.ts index 2fb5d0b8b6..a2fb1cf540 100644 --- a/drizzle-orm/src/gel-core/columns/integer.ts +++ b/drizzle-orm/src/gel-core/columns/integer.ts @@ -5,7 +5,6 @@ import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export class GelIntegerBuilder extends GelIntColumnBaseBuilder<{ - name: string; dataType: 'number int32'; data: number; driverParam: number; diff --git a/drizzle-orm/src/gel-core/columns/json.ts b/drizzle-orm/src/gel-core/columns/json.ts index 3d333879f8..8324dbbb8e 100644 --- a/drizzle-orm/src/gel-core/columns/json.ts +++ b/drizzle-orm/src/gel-core/columns/json.ts @@ -5,7 +5,6 @@ import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelJsonBuilder extends GelColumnBuilder< { - name: string; dataType: 'object json'; data: unknown; driverParam: unknown; diff --git a/drizzle-orm/src/gel-core/columns/localdate.ts b/drizzle-orm/src/gel-core/columns/localdate.ts index c8267a02c4..3da20392af 100644 --- a/drizzle-orm/src/gel-core/columns/localdate.ts +++ b/drizzle-orm/src/gel-core/columns/localdate.ts @@ -6,7 +6,6 @@ import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export class GelLocalDateStringBuilder extends GelLocalDateColumnBaseBuilder<{ - name: string; dataType: 'object localDate'; data: LocalDate; driverParam: LocalDate; diff --git a/drizzle-orm/src/gel-core/columns/localtime.ts b/drizzle-orm/src/gel-core/columns/localtime.ts index 771224cd47..92cdcf15ab 100644 --- a/drizzle-orm/src/gel-core/columns/localtime.ts +++ b/drizzle-orm/src/gel-core/columns/localtime.ts @@ -6,7 +6,6 @@ import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export class GelLocalTimeBuilder extends GelLocalDateColumnBaseBuilder<{ - name: string; dataType: 'object localTime'; data: LocalTime; driverParam: LocalTime; diff --git a/drizzle-orm/src/gel-core/columns/real.ts b/drizzle-orm/src/gel-core/columns/real.ts index d7f7147cb6..ee61ade47f 100644 --- a/drizzle-orm/src/gel-core/columns/real.ts +++ b/drizzle-orm/src/gel-core/columns/real.ts @@ -5,7 +5,6 @@ import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelRealBuilder extends GelColumnBuilder< { - name: string; dataType: 'number float'; data: number; driverParam: number; diff --git a/drizzle-orm/src/gel-core/columns/relative-duration.ts b/drizzle-orm/src/gel-core/columns/relative-duration.ts index 883a264d96..2ee4d7f444 100644 --- a/drizzle-orm/src/gel-core/columns/relative-duration.ts +++ b/drizzle-orm/src/gel-core/columns/relative-duration.ts @@ -5,7 +5,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelRelDurationBuilder extends GelColumnBuilder<{ - name: string; dataType: 'object relDuration'; data: RelativeDuration; driverParam: RelativeDuration; diff --git a/drizzle-orm/src/gel-core/columns/smallint.ts b/drizzle-orm/src/gel-core/columns/smallint.ts index e13b4168a3..4fbc89105d 100644 --- a/drizzle-orm/src/gel-core/columns/smallint.ts +++ b/drizzle-orm/src/gel-core/columns/smallint.ts @@ -5,7 +5,6 @@ import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export class GelSmallIntBuilder extends GelIntColumnBaseBuilder<{ - name: string; dataType: 'number int16'; data: number; driverParam: number; diff --git a/drizzle-orm/src/gel-core/columns/text.ts b/drizzle-orm/src/gel-core/columns/text.ts index da4d9cf807..dc64de954b 100644 --- a/drizzle-orm/src/gel-core/columns/text.ts +++ b/drizzle-orm/src/gel-core/columns/text.ts @@ -4,7 +4,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelTextBuilder extends GelColumnBuilder<{ - name: string; dataType: 'string'; data: string; driverParam: string; diff --git a/drizzle-orm/src/gel-core/columns/timestamp.ts b/drizzle-orm/src/gel-core/columns/timestamp.ts index b1e93cf282..28050b444e 100644 --- a/drizzle-orm/src/gel-core/columns/timestamp.ts +++ b/drizzle-orm/src/gel-core/columns/timestamp.ts @@ -7,7 +7,6 @@ import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export class GelTimestampBuilder extends GelLocalDateColumnBaseBuilder< { - name: string; dataType: 'object localDateTime'; data: LocalDateTime; driverParam: LocalDateTime; diff --git a/drizzle-orm/src/gel-core/columns/timestamptz.ts b/drizzle-orm/src/gel-core/columns/timestamptz.ts index 1453c0cfcc..0f51b334ab 100644 --- a/drizzle-orm/src/gel-core/columns/timestamptz.ts +++ b/drizzle-orm/src/gel-core/columns/timestamptz.ts @@ -6,7 +6,6 @@ import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export class GelTimestampTzBuilder extends GelLocalDateColumnBaseBuilder< { - name: string; dataType: 'object date'; data: Date; driverParam: Date; diff --git a/drizzle-orm/src/gel-core/columns/uuid.ts b/drizzle-orm/src/gel-core/columns/uuid.ts index 829bf803e4..eb5115feab 100644 --- a/drizzle-orm/src/gel-core/columns/uuid.ts +++ b/drizzle-orm/src/gel-core/columns/uuid.ts @@ -4,7 +4,6 @@ import type { GelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export class GelUUIDBuilder extends GelColumnBuilder<{ - name: string; dataType: 'string uuid'; data: string; driverParam: string; diff --git a/drizzle-orm/src/gel-core/dialect.ts b/drizzle-orm/src/gel-core/dialect.ts index 7b6b214642..02251c8cf5 100644 --- a/drizzle-orm/src/gel-core/dialect.ts +++ b/drizzle-orm/src/gel-core/dialect.ts @@ -1,5 +1,11 @@ import * as V1 from '~/_relations.ts'; -import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; @@ -264,9 +270,13 @@ export class GelDialect { // Gel throws an error when more than one similarly named columns exist within context instead of preferring the closest one // thus forcing us to be explicit about column's source // if (isSingleTable) { - // chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + // chunk.push( + // field.isAlias + // ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + // : sql.identifier(this.casing.getColumnCasing(field)), + // ); // } else { - chunk.push(field); + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); // } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; diff --git a/drizzle-orm/src/gel-core/query-builders/count.ts b/drizzle-orm/src/gel-core/query-builders/count.ts index b795d5a28b..4a59d0f915 100644 --- a/drizzle-orm/src/gel-core/query-builders/count.ts +++ b/drizzle-orm/src/gel-core/query-builders/count.ts @@ -2,26 +2,27 @@ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { GelSession } from '../session.ts'; import type { GelTable } from '../table.ts'; +import type { GelViewBase } from '../view-base.ts'; export class GelCountBuilder< TSession extends GelSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static override readonly [entityKind] = 'GelCountBuilder'; + static override readonly [entityKind]: string = 'GelCountBuilder'; [Symbol.toStringTag] = 'GelCountBuilder'; private session: TSession; private static buildEmbeddedCount( - source: GelTable | SQL | SQLWrapper, + source: GelTable | GelViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( - source: GelTable | SQL | SQLWrapper, + source: GelTable | GelViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters};`; @@ -29,7 +30,7 @@ export class GelCountBuilder< constructor( readonly params: { - source: GelTable | SQL | SQLWrapper; + source: GelTable | GelViewBase | SQL | SQLWrapper; filters?: SQL; session: TSession; }, diff --git a/drizzle-orm/src/gel-core/query-builders/insert.ts b/drizzle-orm/src/gel-core/query-builders/insert.ts index 6651f1185c..d118862330 100644 --- a/drizzle-orm/src/gel-core/query-builders/insert.ts +++ b/drizzle-orm/src/gel-core/query-builders/insert.ts @@ -36,10 +36,14 @@ export interface GelInsertConfig { overridingSystemValue_?: boolean; } -export type GelInsertValue, OverrideT extends boolean = false> = +export type GelInsertValue< + TTable extends GelTable, + OverrideT extends boolean = false, + TModel extends Record = InferInsertModel, +> = & { - [Key in keyof InferInsertModel]: - | InferInsertModel[Key] + [Key in keyof TModel]: + | TModel[Key] | SQL | Placeholder; } @@ -47,7 +51,7 @@ export type GelInsertValue, OverrideT exten export type GelInsertSelectQueryBuilder< TTable extends GelTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = TypedQueryBuilder< { [K in keyof TModel]: AnyGelColumn | SQL | SQL.Aliased | TModel[K] } >; diff --git a/drizzle-orm/src/gel-core/query-builders/select.ts b/drizzle-orm/src/gel-core/query-builders/select.ts index 2e1f0675e5..e1de2b9a17 100644 --- a/drizzle-orm/src/gel-core/query-builders/select.ts +++ b/drizzle-orm/src/gel-core/query-builders/select.ts @@ -31,9 +31,9 @@ import { getTableLikeName, haveSameKeys, type NeonAuthToken, + orderSelectedFields, type ValueOrArray, } from '~/utils.ts'; -import { orderSelectedFields } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { extractUsedTable } from '../utils.ts'; import type { diff --git a/drizzle-orm/src/gel-core/query-builders/update.ts b/drizzle-orm/src/gel-core/query-builders/update.ts index ed7bc326a7..6b2b2be0c4 100644 --- a/drizzle-orm/src/gel-core/query-builders/update.ts +++ b/drizzle-orm/src/gel-core/query-builders/update.ts @@ -51,7 +51,7 @@ export interface GelUpdateConfig { export type GelUpdateSetSource< TTable extends GelTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel & string]?: diff --git a/drizzle-orm/src/gel-core/utils.ts b/drizzle-orm/src/gel-core/utils.ts index c638de8ba8..d9c90405a3 100644 --- a/drizzle-orm/src/gel-core/utils.ts +++ b/drizzle-orm/src/gel-core/utils.ts @@ -13,8 +13,8 @@ import { type PrimaryKey, PrimaryKeyBuilder } from './primary-keys.ts'; import { GelTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { GelViewBase } from './view-base.ts'; -import { GelViewConfig } from './view-common.ts'; -import { type GelMaterializedView, GelMaterializedViewConfig, type GelView } from './view.ts'; +import { GelMaterializedViewConfig, GelViewConfig } from './view-common.ts'; +import type { GelMaterializedView, GelView } from './view.ts'; export function getTableConfig(table: TTable) { const columns = Object.values(table[Table.Symbol.Columns]); diff --git a/drizzle-orm/src/gel-core/view-common.ts b/drizzle-orm/src/gel-core/view-common.ts index 1980402f34..8a1cb21ec0 100644 --- a/drizzle-orm/src/gel-core/view-common.ts +++ b/drizzle-orm/src/gel-core/view-common.ts @@ -1 +1,3 @@ export const GelViewConfig = Symbol.for('drizzle:GelViewConfig'); + +export const GelMaterializedViewConfig = Symbol.for('drizzle:GelMaterializedViewConfig'); diff --git a/drizzle-orm/src/gel-core/view.ts b/drizzle-orm/src/gel-core/view.ts index a5a613544a..f863d418fe 100644 --- a/drizzle-orm/src/gel-core/view.ts +++ b/drizzle-orm/src/gel-core/view.ts @@ -10,7 +10,7 @@ import type { GelColumn } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { gelTable } from './table.ts'; import { GelViewBase } from './view-base.ts'; -import { GelViewConfig } from './view-common.ts'; +import { GelMaterializedViewConfig, GelViewConfig } from './view-common.ts'; export type ViewWithConfig = RequireAtLeastOne<{ checkOption: 'local' | 'cascaded'; @@ -335,8 +335,6 @@ export type GelViewWithSelection< TSelectedFields extends ColumnsSelection = ColumnsSelection, > = GelView & TSelectedFields; -export const GelMaterializedViewConfig = Symbol.for('drizzle:GelMaterializedViewConfig'); - export class GelMaterializedView< TName extends string = string, TExisting extends boolean = boolean, diff --git a/drizzle-orm/src/gel/driver.ts b/drizzle-orm/src/gel/driver.ts index 5382f848f4..c0374d1b9c 100644 --- a/drizzle-orm/src/gel/driver.ts +++ b/drizzle-orm/src/gel/driver.ts @@ -8,7 +8,7 @@ import type { GelQueryResultHKT } from '~/gel-core/session.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { GelClient } from './session.ts'; import { GelDbSession } from './session.ts'; @@ -91,8 +91,8 @@ export function drizzle< TClient extends GelClient = Client, >( ...params: - | [TClient | string] - | [TClient | string, DrizzleConfig] + | [string] + | [string, DrizzleConfig] | [ & DrizzleConfig & ( @@ -113,20 +113,16 @@ export function drizzle< return construct(instance, params[1] as DrizzleConfig | undefined) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as ( - & ({ connection?: ConnectOptions | string; client?: TClient }) - & DrizzleConfig - ); - - if (client) return construct(client, drizzleConfig); + const { connection, client, ...drizzleConfig } = params[0] as ( + & ({ connection?: ConnectOptions | string; client?: TClient }) + & DrizzleConfig + ); - const instance = createClient(connection); + if (client) return construct(client, drizzleConfig); - return construct(instance, drizzleConfig) as any; - } + const instance = createClient(connection); - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/driver.ts b/drizzle-orm/src/libsql/driver.ts index dc022ffffd..106d581631 100644 --- a/drizzle-orm/src/libsql/driver.ts +++ b/drizzle-orm/src/libsql/driver.ts @@ -1,7 +1,7 @@ import { type Client, type Config, createClient } from '@libsql/client'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; -import { construct as construct, type LibSQLDatabase } from './driver-core.ts'; +import type { DrizzleConfig } from '~/utils.ts'; +import { construct, type LibSQLDatabase } from './driver-core.ts'; export { LibSQLDatabase } from './driver-core.ts'; @@ -11,9 +11,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -36,19 +36,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/http/index.ts b/drizzle-orm/src/libsql/http/index.ts index 5c1491fa16..2fb367a3d0 100644 --- a/drizzle-orm/src/libsql/http/index.ts +++ b/drizzle-orm/src/libsql/http/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client/http'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/node/index.ts b/drizzle-orm/src/libsql/node/index.ts index 279481fb22..32978f4206 100644 --- a/drizzle-orm/src/libsql/node/index.ts +++ b/drizzle-orm/src/libsql/node/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client/node'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/session.ts b/drizzle-orm/src/libsql/session.ts index c3420b5761..40df55f139 100644 --- a/drizzle-orm/src/libsql/session.ts +++ b/drizzle-orm/src/libsql/session.ts @@ -1,6 +1,6 @@ import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client'; import type * as V1 from '~/_relations.ts'; -import type { BatchItem as BatchItem } from '~/batch.ts'; +import type { BatchItem } from '~/batch.ts'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; @@ -232,8 +232,6 @@ export class LibSQLPreparedQuery): Promise { @@ -399,9 +397,9 @@ function normalizeRow(obj: any) { } function normalizeFieldValue(value: unknown) { - if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof + if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // oxlint-disable-line drizzle-internal/no-instanceof if (typeof Buffer !== 'undefined') { - if (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof + if (!(value instanceof Buffer)) { // oxlint-disable-line drizzle-internal/no-instanceof return Buffer.from(value); } return value; diff --git a/drizzle-orm/src/libsql/sqlite3/index.ts b/drizzle-orm/src/libsql/sqlite3/index.ts index 0197ce347b..9ccd381d9a 100644 --- a/drizzle-orm/src/libsql/sqlite3/index.ts +++ b/drizzle-orm/src/libsql/sqlite3/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client/sqlite3'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/wasm/index.ts b/drizzle-orm/src/libsql/wasm/index.ts index 15497a6d7b..5bb4cfbb0b 100644 --- a/drizzle-orm/src/libsql/wasm/index.ts +++ b/drizzle-orm/src/libsql/wasm/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client-wasm'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/web/index.ts b/drizzle-orm/src/libsql/web/index.ts index e052ddce7a..74f997c2ce 100644 --- a/drizzle-orm/src/libsql/web/index.ts +++ b/drizzle-orm/src/libsql/web/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client/web'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/libsql/ws/index.ts b/drizzle-orm/src/libsql/ws/index.ts index 1ccfa783cb..806c64eab8 100644 --- a/drizzle-orm/src/libsql/ws/index.ts +++ b/drizzle-orm/src/libsql/ws/index.ts @@ -1,6 +1,6 @@ import { type Client, type Config, createClient } from '@libsql/client/ws'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< @@ -9,9 +9,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -34,19 +34,15 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); + const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/migrator.ts b/drizzle-orm/src/migrator.ts index aef9db32ce..dadb81e695 100644 --- a/drizzle-orm/src/migrator.ts +++ b/drizzle-orm/src/migrator.ts @@ -1,5 +1,6 @@ import crypto from 'node:crypto'; -import fs from 'node:fs'; +import fs, { existsSync, readdirSync } from 'node:fs'; +import { join } from 'node:path'; export interface KitConfig { out: string; @@ -26,17 +27,30 @@ export interface MigratorInitFailResponse { exitCode: 'databaseMigrations' | 'localMigrations'; } -export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] { +/** Only gets returned if migrator failed with `init: true` used by `drizzle-kit pull --init`*/ +export interface MigratorInitFailResponse { + exitCode: 'databaseMigrations' | 'localMigrations'; +} + +export function formatToMillis(dateStr: string): number { + const year = parseInt(dateStr.slice(0, 4), 10); + const month = parseInt(dateStr.slice(4, 6), 10) - 1; + const day = parseInt(dateStr.slice(6, 8), 10); + const hour = parseInt(dateStr.slice(8, 10), 10); + const minute = parseInt(dateStr.slice(10, 12), 10); + const second = parseInt(dateStr.slice(12, 14), 10); + + return Date.UTC(year, month, day, hour, minute, second); +} + +function readMigrationFilesOLD(config: MigrationConfig): MigrationMeta[] { const migrationFolderTo = config.migrationsFolder; const migrationQueries: MigrationMeta[] = []; const journalPath = `${migrationFolderTo}/meta/_journal.json`; - if (!fs.existsSync(journalPath)) { - throw new Error(`Can't find meta/_journal.json file`); - } - const journalAsString = fs.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString(); + const journalAsString = fs.readFileSync(journalPath).toString(); const journal = JSON.parse(journalAsString) as { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; @@ -65,3 +79,46 @@ export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] { return migrationQueries; } + +export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] { + if (fs.existsSync(`${config.migrationsFolder}/meta/_journal.json`)) { + // it means user has folders V2 + // we need to warn to up the folders version but still apply migrations + console.log( + '\nWarning: We detected that you have old drizzle-kit migration folders. We suggest to upgrade drizzle-kit and run "drizzle-kit up"\n', + ); + return readMigrationFilesOLD(config); + } + + const migrationFolderTo = config.migrationsFolder; + + const migrationQueries: MigrationMeta[] = []; + + const migrations = readdirSync(migrationFolderTo) + .map((subdir) => ({ path: join(migrationFolderTo, subdir, 'migration.sql'), name: subdir })) + .filter((it) => existsSync(it.path)); + + migrations.sort(); + + for (const migration of migrations) { + const migrationPath = migration.path; + const migrationDate = migration.name.slice(0, 14); + + const query = fs.readFileSync(migrationPath).toString(); + + const result = query.split('--> statement-breakpoint').map((it) => { + return it; + }); + + const millis = formatToMillis(migrationDate); + + migrationQueries.push({ + sql: result, + bps: true, + folderMillis: millis, + hash: crypto.createHash('sha256').update(query).digest('hex'), + }); + } + + return migrationQueries; +} diff --git a/drizzle-orm/src/mssql-core/alias.ts b/drizzle-orm/src/mssql-core/alias.ts new file mode 100644 index 0000000000..be646fb9c8 --- /dev/null +++ b/drizzle-orm/src/mssql-core/alias.ts @@ -0,0 +1,11 @@ +import { TableAliasProxyHandler } from '~/alias.ts'; +import type { BuildAliasTable } from './query-builders/select.types.ts'; +import type { MsSqlTable } from './table.ts'; +import type { MsSqlView } from './view.ts'; + +export function alias( + table: TTable, + alias: TAlias, +): BuildAliasTable { + return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; +} diff --git a/drizzle-orm/src/mssql-core/checks.ts b/drizzle-orm/src/mssql-core/checks.ts new file mode 100644 index 0000000000..fc580c9d8a --- /dev/null +++ b/drizzle-orm/src/mssql-core/checks.ts @@ -0,0 +1,32 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { MsSqlTable } from './table.ts'; + +export class CheckBuilder { + static readonly [entityKind]: string = 'MsSqlCheckBuilder'; + + protected brand!: 'MsSqlConstraintBuilder'; + + constructor(public name: string, public value: SQL) {} + + /** @internal */ + build(table: MsSqlTable): Check { + return new Check(table, this); + } +} + +export class Check { + static readonly [entityKind]: string = 'MsSqlCheck'; + + readonly name: string; + readonly value: SQL; + + constructor(public table: MsSqlTable, builder: CheckBuilder) { + this.name = builder.name; + this.value = builder.value; + } +} + +export function check(name: string, value: SQL): CheckBuilder { + return new CheckBuilder(name, value); +} diff --git a/drizzle-orm/src/mssql-core/columns/all.ts b/drizzle-orm/src/mssql-core/columns/all.ts new file mode 100644 index 0000000000..e6501aa234 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/all.ts @@ -0,0 +1,49 @@ +import { bigint } from './bigint.ts'; +import { binary } from './binary.ts'; +import { bit } from './bit.ts'; +import { char } from './char.ts'; +import { customType } from './custom.ts'; +import { date } from './date.ts'; +import { datetime } from './datetime.ts'; +import { datetime2 } from './datetime2.ts'; +import { datetimeoffset } from './datetimeoffset.ts'; +import { decimal } from './decimal.ts'; +import { float } from './float.ts'; +import { int } from './int.ts'; +import { numeric } from './numeric.ts'; +import { real } from './real.ts'; +import { smallint } from './smallint.ts'; +import { ntext, text } from './text.ts'; +import { time } from './time.ts'; +import { tinyint } from './tinyint.ts'; +import { varbinary } from './varbinary.ts'; +import { nvarchar, varchar } from './varchar.ts'; + +export function getMsSqlColumnBuilders() { + return { + bigint, + binary, + bit, + char, + customType, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + real, + numeric, + smallint, + text, + ntext, + time, + tinyint, + varbinary, + varchar, + nvarchar, + }; +} + +export type MsSqlColumnBuilders = ReturnType; diff --git a/drizzle-orm/src/mssql-core/columns/bigint.ts b/drizzle-orm/src/mssql-core/columns/bigint.ts new file mode 100644 index 0000000000..f33364cae1 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/bigint.ts @@ -0,0 +1,70 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlBigIntBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: TMode extends 'string' ? 'string int64' : TMode extends 'number' ? 'number int53' : 'bigint int64'; + data: TMode extends 'string' ? string : TMode extends 'number' ? number : bigint; + driverParam: string; +}, MsSqlBigIntConfig> { + static override readonly [entityKind]: string = 'MsSqlBigIntBuilder'; + + constructor(name: string, config: MsSqlBigIntConfig) { + const { mode } = config; + super( + name, + mode === 'string' ? 'string int64' : mode === 'number' ? 'number int53' : 'bigint int64' as any, + mode === 'string' ? 'MsSqlBigIntString' : mode === 'number' ? 'MsSqlBigIntNumber' : 'MsSqlBigInt', + ); + this.config.mode = mode; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlBigInt( + table, + this.config, + ); + } +} + +export class MsSqlBigInt> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlBigInt'; + + readonly mode: 'number' | 'bigint' | 'string' = this.config.mode; + + getSQLType(): string { + return `bigint`; + } + + constructor(table: MsSqlTable, config: MsSqlBigIntBuilder<'string' | 'number' | 'bigint'>['config']) { + super(table, config); + this.mode = config.mode; + } + + override mapFromDriverValue(value: string): T['data'] { + return this.mode === 'string' ? value.toString() : this.mode === 'number' ? Number(value) : BigInt(value); + } +} + +interface MsSqlBigIntConfig { + mode: T; +} + +export function bigint( + config: MsSqlBigIntConfig, +): MsSqlBigIntBuilder; +export function bigint( + name: string, + config: MsSqlBigIntConfig, +): MsSqlBigIntBuilder; +export function bigint(a: string | MsSqlBigIntConfig, b?: MsSqlBigIntConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlBigIntBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/binary.ts b/drizzle-orm/src/mssql-core/columns/binary.ts new file mode 100644 index 0000000000..f5dd9d0a6e --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/binary.ts @@ -0,0 +1,58 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlBinaryBuilder extends MsSqlColumnBuilder< + { + dataType: 'object buffer'; + data: Buffer; + driverParam: Buffer; + }, + MsSqlBinaryConfig & { + setLength: boolean; + } +> { + static override readonly [entityKind]: string = 'MsSqlBinaryBuilder'; + + constructor(name: string, length: number | undefined) { + super(name, 'object buffer', 'MsSqlBinary'); + this.config.length = length ?? 1; + this.config.setLength = length !== undefined; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlBinary(table, this.config); + } +} + +export class MsSqlBinary> extends MsSqlColumn< + T, + MsSqlBinaryConfig & { setLength: boolean } +> { + static override readonly [entityKind]: string = 'MsSqlBinary'; + + getSQLType(): string { + return this.config.setLength ? `binary(${this.length})` : `binary`; + } +} + +export interface MsSqlBinaryConfig { + length?: number; +} + +export function binary( + config?: MsSqlBinaryConfig, +): MsSqlBinaryBuilder; +export function binary( + name: string, + config?: MsSqlBinaryConfig, +): MsSqlBinaryBuilder; +export function binary(a?: string | MsSqlBinaryConfig, b: MsSqlBinaryConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlBinaryBuilder(name, config.length); +} diff --git a/drizzle-orm/src/mssql-core/columns/bit.ts b/drizzle-orm/src/mssql-core/columns/bit.ts new file mode 100644 index 0000000000..e35019aa1a --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/bit.ts @@ -0,0 +1,37 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlBitBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'boolean'; + data: boolean; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'MsSqlBitBuilder'; + + constructor(name: string) { + super(name, 'boolean', 'MsSqlBit'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlBit(table, this.config); + } +} + +export class MsSqlBit> extends MsSqlColumnWithIdentity { + static override readonly [entityKind]: string = 'MsSqlBit'; + + getSQLType(): string { + return `bit`; + } + + override mapFromDriverValue = Boolean; +} + +export function bit(name?: string) { + return new MsSqlBitBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/mssql-core/columns/char.ts b/drizzle-orm/src/mssql-core/columns/char.ts new file mode 100644 index 0000000000..b5e223ea10 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/char.ts @@ -0,0 +1,97 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlCharBuilder extends MsSqlColumnBuilder<{ + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; +}, MsSqlCharConfig> { + static override readonly [entityKind]: string = 'MsSqlCharBuilder'; + + /** @internal */ + constructor(name: string, config: MsSqlCharConfig) { + super(name, config.enum?.length ? 'string enum' : 'string', 'MsSqlChar'); + this.config.length = config.length ?? 1; + this.config.enum = config.enum; + this.config.nonUnicode = config.nonUnicode; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlChar( + table, + this.config, + ); + } +} + +export class MsSqlChar> + extends MsSqlColumn> +{ + static override readonly [entityKind]: string = 'MsSqlChar'; + + override readonly enumValues = this.config.enum; + + readonly nonUnicode: boolean = this.config.nonUnicode; + + getSQLType(): string { + return this.length === undefined + ? this.nonUnicode ? `nchar` : `char` + : this.nonUnicode + ? `nchar(${this.length})` + : `char(${this.length})`; + } +} + +export type MsSqlCharConfig = + & MsSqlCharConfigInitial + & { + nonUnicode: boolean; + }; + +export type MsSqlCharConfigInitial< + TEnum extends string[] | readonly string[] | undefined = string[] | readonly string[] | undefined, +> = { + length?: number; + enum?: TEnum; +}; + +export function char>( + config?: MsSqlCharConfigInitial>, +): MsSqlCharBuilder>; +export function char>( + name: string, + config?: MsSqlCharConfigInitial>, +): MsSqlCharBuilder>; +export function char( + a?: string | MsSqlCharConfigInitial, + b?: MsSqlCharConfigInitial, +): any { + const { name, config } = getColumnNameAndConfig(a, b); + + return new MsSqlCharBuilder(name, { ...config, nonUnicode: false } as any); +} + +export function nchar>( + config?: MsSqlCharConfigInitial>, +): MsSqlCharBuilder>; +export function nchar>( + name: string, + config?: MsSqlCharConfigInitial>, +): MsSqlCharBuilder>; +export function nchar( + a?: string | MsSqlCharConfigInitial, + b?: MsSqlCharConfigInitial, +): any { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlCharBuilder(name, { + ...config, + nonUnicode: true, + } as any); +} diff --git a/drizzle-orm/src/mssql-core/columns/common.ts b/drizzle-orm/src/mssql-core/columns/common.ts new file mode 100644 index 0000000000..57e3d92248 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/common.ts @@ -0,0 +1,161 @@ +import { ColumnBuilder } from '~/column-builder.ts'; +import type { + ColumnBuilderBaseConfig, + ColumnBuilderExtraConfig, + ColumnBuilderRuntimeConfig, + ColumnType, + HasGenerated, + NotNull, +} from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { Column } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { ForeignKey, UpdateDeleteAction } from '~/mssql-core/foreign-keys.ts'; +import { ForeignKeyBuilder } from '~/mssql-core/foreign-keys.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import type { SQL } from '~/sql/index.ts'; +import type { Update } from '~/utils.ts'; + +export type MsSqlColumns = Record>; + +export interface ReferenceConfig { + ref: () => MsSqlColumn; + actions: { + onUpdate?: UpdateDeleteAction; + onDelete?: UpdateDeleteAction; + }; +} + +export interface MsSqlGeneratedColumnConfig { + mode?: 'virtual' | 'persisted'; +} + +export abstract class MsSqlColumnBuilder< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends ColumnBuilder { + static override readonly [entityKind]: string = 'MsSqlColumnBuilder'; + + private foreignKeyConfigs: ReferenceConfig[] = []; + + references(ref: ReferenceConfig['ref'], actions: ReferenceConfig['actions'] = {}): this { + this.foreignKeyConfigs.push({ ref, actions }); + return this; + } + + unique(name?: string): this { + this.config.isUnique = true; + this.config.uniqueName = name; + return this; + } + + generatedAlwaysAs( + as: SQL | T['data'] | (() => SQL), + config?: MsSqlGeneratedColumnConfig, + ): HasGenerated { + this.config.generated = { + as, + type: 'always', + mode: config?.mode ?? 'virtual', + }; + return this as any; + } + + /** @internal */ + buildForeignKeys(column: MsSqlColumn, table: MsSqlTable): ForeignKey[] { + return this.foreignKeyConfigs.map(({ ref, actions }) => { + return ((ref, actions) => { + const builder = new ForeignKeyBuilder(() => { + const foreignColumn = ref(); + return { columns: [column], foreignColumns: [foreignColumn] }; + }); + if (actions.onUpdate) { + builder.onUpdate(actions.onUpdate); + } + if (actions.onDelete) { + builder.onDelete(actions.onDelete); + } + return builder.build(table); + })(ref, actions); + }); + } + + /** @internal */ + abstract build( + table: AnyMsSqlTable<{ name: TTableName }>, + ): MsSqlColumn; +} + +// To understand how to use `MsSqlColumn` and `AnyMsSqlColumn`, see `Column` and `AnyColumn` documentation. +export abstract class MsSqlColumn< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends Column { + static override readonly [entityKind]: string = 'MsSqlColumn'; + + /** @internal */ + override readonly table: MsSqlTable; + + constructor( + table: MsSqlTable, + config: ColumnBuilderRuntimeConfig & TRuntimeConfig, + ) { + super(table, config); + this.table = table; + } + + /** @internal */ + override shouldDisableInsert(): boolean { + return false; + } +} + +export type AnyMsSqlColumn> = {}> = MsSqlColumn< + Required, TPartial>> +>; + +export interface MsSqlColumnWithIdentityConfig { + identity: { seed?: number; increment?: number } | undefined; +} + +export abstract class MsSqlColumnBuilderWithIdentity< + T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig< + ColumnType + >, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends MsSqlColumnBuilder { + static override readonly [entityKind]: string = 'MsSqlColumnBuilderWithAutoIncrement'; + + constructor(name: string, dataType: T['dataType'], columnType: string) { + super(name, dataType, columnType); + } + + identity(): NotNull>; + identity(config: { seed: number; increment: number }): NotNull>; + identity(config?: { seed: number; increment: number }): NotNull> { + this.config.identity = { + seed: config ? config.seed : 1, + increment: config ? config.increment : 1, + }; + this.config.hasDefault = true; + this.config.notNull = true; + return this as NotNull>; + } +} + +export abstract class MsSqlColumnWithIdentity< + T extends ColumnBaseConfig = ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlColumnWithAutoIncrement'; + + readonly identity = this.config.identity; + + override shouldDisableInsert(): boolean { + return !!this.identity; + } +} diff --git a/drizzle-orm/src/mssql-core/columns/custom.ts b/drizzle-orm/src/mssql-core/columns/custom.ts new file mode 100644 index 0000000000..d21e795cfb --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/custom.ts @@ -0,0 +1,369 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { ColumnBuilderBaseConfig } from '~/index.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import type { SQL, SQLGenerator } from '~/sql/sql.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export type ConvertCustomConfig> = + & { + dataType: 'custom'; + data: T['data']; + driverParam: T['driverData']; + } + & (T['notNull'] extends true ? { notNull: true } : {}) + & (T['default'] extends true ? { hasDefault: true } : {}); + +export interface MsSqlCustomColumnInnerConfig { + customTypeValues: CustomTypeValues; +} + +export class MsSqlCustomColumnBuilder> extends MsSqlColumnBuilder< + T, + { + fieldConfig: CustomTypeValues['config']; + customTypeParams: CustomTypeParams; + } +> { + static override readonly [entityKind]: string = 'MsSqlCustomColumnBuilder'; + + constructor( + name: string, + fieldConfig: CustomTypeValues['config'], + customTypeParams: CustomTypeParams, + ) { + super(name, 'custom', 'MsSqlCustomColumn'); + this.config.fieldConfig = fieldConfig; + this.config.customTypeParams = customTypeParams; + } + + /** @internal */ + build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlCustomColumn( + table, + this.config, + ); + } +} + +export class MsSqlCustomColumn> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlCustomColumn'; + + private sqlName: string; + private mapTo?: (value: T['data']) => T['driverParam']; + private mapFrom?: (value: T['driverParam']) => T['data']; + private mapJson?: (value: unknown) => T['data']; + private forJsonSelect?: (name: SQL, sql: SQLGenerator) => SQL; + + constructor( + table: MsSqlTable, + config: MsSqlCustomColumnBuilder['config'], + ) { + super(table, config); + this.sqlName = config.customTypeParams.dataType(config.fieldConfig); + this.mapTo = config.customTypeParams.toDriver; + this.mapFrom = config.customTypeParams.fromDriver; + this.mapJson = config.customTypeParams.fromJson; + this.forJsonSelect = config.customTypeParams.forJsonSelect; + } + + getSQLType(): string { + return this.sqlName; + } + + override mapFromDriverValue(value: T['driverParam']): T['data'] { + return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; + } + + mapFromJsonValue(value: unknown): T['data'] { + return typeof this.mapJson === 'function' ? this.mapJson(value) : this.mapFromDriverValue(value) as T['data']; + } + + jsonSelectIdentifier(identifier: SQL, sql: SQLGenerator): SQL { + if (typeof this.forJsonSelect === 'function') return this.forJsonSelect(identifier, sql); + + const rawType = this.getSQLType().toLowerCase(); + const parenPos = rawType.indexOf('('); + const type = (parenPos + 1) ? rawType.slice(0, parenPos) : rawType; + + switch (type) { + case 'binary': + case 'varbinary': + case 'time': + case 'datetime': + case 'datetime2': + case 'decimal': + case 'float': + case 'bigint': { + return sql`cast(${identifier} as char)`; + } + default: { + return identifier; + } + } + } + + override mapToDriverValue(value: T['data']): T['driverParam'] { + return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; + } +} + +export type CustomTypeValues = { + /** + * Required type for custom column, that will infer proper type model + * + * Examples: + * + * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` + * + * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` + */ + data: unknown; + + /** + * Type helper, that represents what type database driver is returning for specific database data type + * + * Needed only in case driver's output and input for type differ + * + * Defaults to {@link driverData} + */ + driverOutput?: unknown; + + /** + * Type helper, that represents what type database driver is accepting for specific database data type + */ + driverData?: unknown; + + /** + * Type helper, that represents what type field returns after being aggregated to JSON + */ + jsonData?: unknown; + + /** + * What config type should be used for {@link CustomTypeParams} `dataType` generation + */ + config?: Record; + + /** + * Whether the config argument should be required or not + * @default false + */ + configRequired?: boolean; + + /** + * If your custom data type should be notNull by default you can use `notNull: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + notNull?: boolean; + + /** + * If your custom data type has default you can use `default: true` + * + * @example + * const customSerial = customType<{ data: number, notNull: true, default: true }>({ + * dataType() { + * return 'serial'; + * }, + * }); + */ + default?: boolean; +}; + +export interface CustomTypeParams { + /** + * Database data type string representation, that is used for migrations + * @example + * ``` + * `jsonb`, `text` + * ``` + * + * If database data type needs additional params you can use them from `config` param + * @example + * ``` + * `varchar(256)`, `numeric(2,3)` + * ``` + * + * To make `config` be of specific type please use config generic in {@link CustomTypeValues} + * + * @example + * Usage example + * ``` + * dataType() { + * return 'boolean'; + * }, + * ``` + * Or + * ``` + * dataType(config) { + * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; + * } + * ``` + */ + dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; + + /** + * Optional mapping function, that is used to transform inputs from desired to be used in code format to one suitable for driver + * @example + * For example, when using jsonb we need to map JS/TS object to string before writing to database + * ``` + * toDriver(value: TData): string { + * return JSON.stringify(value); + * } + * ``` + */ + toDriver?: (value: T['data']) => T['driverData'] | SQL; + + /** + * Optional mapping function, that is used for transforming data returned by driver to desired column's output format + * @example + * For example, when using timestamp we need to map string Date representation to JS Date + * ``` + * fromDriver(value: string): Date { + * return new Date(value); + * } + * ``` + * + * It'll cause the returned data to change from: + * ``` + * { + * customField: "2025-04-07 03:25:16.635"; + * } + * ``` + * to: + * ``` + * { + * customField: new Date("2025-04-07 03:25:16.635"); + * } + * ``` + */ + fromDriver?: (value: 'driverOutput' extends keyof T ? T['driverOutput'] : T['driverData']) => T['data']; + + /** + * Optional mapping function, that is used for transforming data returned by transofmed to JSON in database data to desired format + * + * Used by [relational queries](https://orm.drizzle.team/docs/rqb-v2) + * + * Defaults to {@link fromDriver} function + * @example + * For example, when querying bigint column via [RQB](https://orm.drizzle.team/docs/rqb-v2) or [JSON functions](https://orm.drizzle.team/docs/json-functions), the result field will be returned as it's string representation, as opposed to bigint from regular query + * To handle that, we need a separate function to handle such field's mapping: + * ``` + * fromJson(value: string): bigint { + * return BigInt(value); + * }, + * ``` + * + * It'll cause the returned data to change from: + * ``` + * { + * customField: "5044565289845416380"; + * } + * ``` + * to: + * ``` + * { + * customField: 5044565289845416380n; + * } + * ``` + */ + fromJson?: (value: T['jsonData']) => T['data']; + + /** + * Optional selection modifier function, that is used for modifying selection of column inside [JSON functions](https://orm.drizzle.team/docs/json-functions) + * + * Additional mapping that could be required for such scenarios can be handled using {@link fromJson} function + * + * Used by [relational queries](https://orm.drizzle.team/docs/rqb-v2) + * + * Following types are being casted to text by default: `binary`, `varbinary`, `time`, `datetime`, `decimal`, `float`, `bigint` + * @example + * For example, when using bigint we need to cast field to text to preserve data integrity + * ``` + * forJsonSelect(identifier: SQL, sql: SQLGenerator): SQL { + * return sql`cast(${identifier} as char)` + * }, + * ``` + * + * This will change query from: + * ``` + * SELECT + * json_build_object('bigint', `t`.`bigint`) + * FROM + * ( + * SELECT + * `table`.`custom_bigint` AS `bigint` + * FROM + * `table` + * ) AS `t` + * ``` + * to: + * ``` + * SELECT + * json_build_object('bigint', `t`.`bigint`) + * FROM + * ( + * SELECT + * cast(`table`.`custom_bigint` as char) AS `bigint` + * FROM + * `table` + * ) AS `t` + * ``` + * + * Returned by query object will change from: + * ``` + * { + * bigint: 5044565289845416000; // Partial data loss due to direct conversion to JSON format + * } + * ``` + * to: + * ``` + * { + * bigint: "5044565289845416380"; // Data is preserved due to conversion of field to text before JSON-ification + * } + * ``` + */ + forJsonSelect?: (identifier: SQL, sql: SQLGenerator) => SQL; +} + +/** + * Custom mssql database data type generator + */ +export function customType( + customTypeParams: CustomTypeParams, +): Equal extends true ? { + & T['config']>( + fieldConfig: TConfig, + ): MsSqlCustomColumnBuilder>; + ( + dbName: string, + fieldConfig: T['config'], + ): MsSqlCustomColumnBuilder>; + } + : { + (): MsSqlCustomColumnBuilder>; + & T['config']>( + fieldConfig?: TConfig, + ): MsSqlCustomColumnBuilder>; + ( + dbName: string, + fieldConfig?: T['config'], + ): MsSqlCustomColumnBuilder>; + } +{ + return ( + a?: string | T['config'], + b?: T['config'], + ): MsSqlCustomColumnBuilder> => { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlCustomColumnBuilder(name, config, customTypeParams); + }; +} diff --git a/drizzle-orm/src/mssql-core/columns/date.common.ts b/drizzle-orm/src/mssql-core/columns/date.common.ts new file mode 100644 index 0000000000..c122a5371e --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/date.common.ts @@ -0,0 +1,23 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnType } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { sql } from '~/sql/sql.ts'; +import { MsSqlColumnBuilder } from './common.ts'; + +export abstract class MsSqlDateColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends MsSqlColumnBuilder { + static override readonly [entityKind]: string = 'MsSqlDateColumnBuilder'; + + defaultGetDate() { + return this.default(sql`(getdate())`); + } +} + +export type DatetimePrecision = 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7; + +export interface MsSqlDatetimeConfig { + mode?: TMode; + precision?: DatetimePrecision; +} diff --git a/drizzle-orm/src/mssql-core/columns/date.ts b/drizzle-orm/src/mssql-core/columns/date.ts new file mode 100644 index 0000000000..108791812a --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/date.ts @@ -0,0 +1,105 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn } from './common.ts'; +import { MsSqlDateColumnBaseBuilder } from './date.common.ts'; + +export class MsSqlDateBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'object date'; + data: Date; + driverParam: string | number; +}> { + static override readonly [entityKind]: string = 'MsSqlDateBuilder'; + + constructor(name: string) { + super(name, 'object date', 'MsSqlDate'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDate(table, this.config); + } +} + +export class MsSqlDate> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDate'; + + constructor( + table: MsSqlTable, + config: MsSqlDateBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } + + override mapFromDriverValue(value: Date | string): Date { + return new Date(value); + } +} + +export class MsSqlDateStringBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'string date'; + data: string; + driverParam: string | number; +}> { + static override readonly [entityKind]: string = 'MsSqlDateStringBuilder'; + + constructor(name: string) { + super(name, 'string date', 'MsSqlDateString'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateString( + table, + this.config, + ); + } +} + +export class MsSqlDateString> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateString'; + + constructor( + table: MsSqlTable, + config: MsSqlDateStringBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `date`; + } + + override mapFromDriverValue(value: Date | string | null): string | null { + return typeof value === 'string' ? value : value?.toISOString().split('T')[0] ?? null; + } +} + +export interface MsSqlDateConfig { + mode?: TMode; +} + +export function date( + config?: MsSqlDateConfig, +): Equal extends true ? MsSqlDateStringBuilder : MsSqlDateBuilder; +export function date( + name: string, + config?: MsSqlDateConfig, +): Equal extends true ? MsSqlDateStringBuilder : MsSqlDateBuilder; +export function date(a?: string | MsSqlDateConfig, b?: MsSqlDateConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + + if (config?.mode === 'string') { + return new MsSqlDateStringBuilder(name); + } + return new MsSqlDateBuilder(name); +} diff --git a/drizzle-orm/src/mssql-core/columns/datetime.ts b/drizzle-orm/src/mssql-core/columns/datetime.ts new file mode 100644 index 0000000000..83c99804da --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/datetime.ts @@ -0,0 +1,103 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn } from './common.ts'; +import { MsSqlDateColumnBaseBuilder } from './date.common.ts'; + +export class MsSqlDateTimeBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'object date'; + data: Date; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTimeBuilder'; + + constructor(name: string) { + super(name, 'object date', 'MsSqlDateTime'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTime( + table, + this.config, + ); + } +} + +export class MsSqlDateTime> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTime'; + + constructor( + table: MsSqlTable, + config: MsSqlDateTimeBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `datetime`; + } +} + +export class MsSqlDateTimeStringBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'string datetime'; + data: string; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTimeStringBuilder'; + + constructor(name: string) { + super(name, 'string datetime', 'MsSqlDateTimeString'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTimeString( + table, + this.config, + ); + } +} + +export class MsSqlDateTimeString> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTimeString'; + + constructor( + table: MsSqlTable, + config: MsSqlDateTimeStringBuilder['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return 'datetime'; + } + + override mapFromDriverValue(value: Date | string | null): string | null { + return typeof value === 'string' ? value : value?.toISOString() ?? null; + } +} + +export interface MsSqlDatetimeConfig { + mode?: TMode; +} + +export function datetime( + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTimeStringBuilder : MsSqlDateTimeBuilder; +export function datetime( + name: string, + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTimeStringBuilder : MsSqlDateTimeBuilder; +export function datetime(a?: string | MsSqlDatetimeConfig, b?: MsSqlDatetimeConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MsSqlDateTimeStringBuilder(name); + } + return new MsSqlDateTimeBuilder(name); +} diff --git a/drizzle-orm/src/mssql-core/columns/datetime2.ts b/drizzle-orm/src/mssql-core/columns/datetime2.ts new file mode 100644 index 0000000000..5c02133ae1 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/datetime2.ts @@ -0,0 +1,110 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn } from './common.ts'; +import type { MsSqlDatetimeConfig } from './date.common.ts'; +import { MsSqlDateColumnBaseBuilder } from './date.common.ts'; + +export class MsSqlDateTime2Builder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'object date'; + data: Date; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTime2Builder'; + + constructor(name: string, config: MsSqlDatetimeConfig | undefined) { + super(name, 'object date', 'MsSqlDateTime2'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTime2( + table, + this.config, + ); + } +} + +export class MsSqlDateTime2> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTime2'; + + readonly precision: number | undefined; + + constructor( + table: MsSqlTable, + config: MsSqlDateTime2Builder['config'], + ) { + super(table, config); + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `datetime2${precision}`; + } +} + +export class MsSqlDateTime2StringBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'string datetime'; + data: string; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTime2StringBuilder'; + + constructor(name: string, config: MsSqlDatetimeConfig | undefined) { + super(name, 'string datetime', 'MsSqlDateTime2String'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTime2String( + table, + this.config, + ); + } +} + +export class MsSqlDateTime2String> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTime2String'; + + readonly precision: number | undefined; + + constructor( + table: MsSqlTable, + config: MsSqlDateTime2StringBuilder['config'], + ) { + super(table, config); + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `datetime2${precision}`; + } + + override mapFromDriverValue(value: Date | string | null): string | null { + return typeof value === 'string' ? value : value?.toISOString() ?? null; + } +} +export function datetime2( + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTime2StringBuilder : MsSqlDateTime2Builder; +export function datetime2( + name: string, + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTime2StringBuilder + : MsSqlDateTime2Builder; +export function datetime2(a?: string | MsSqlDatetimeConfig, b?: MsSqlDatetimeConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MsSqlDateTime2StringBuilder(name, config); + } + return new MsSqlDateTime2Builder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/datetimeoffset.ts b/drizzle-orm/src/mssql-core/columns/datetimeoffset.ts new file mode 100644 index 0000000000..05fd21b681 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/datetimeoffset.ts @@ -0,0 +1,113 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn } from './common.ts'; +import type { MsSqlDatetimeConfig } from './date.common.ts'; +import { MsSqlDateColumnBaseBuilder } from './date.common.ts'; + +export class MsSqlDateTimeOffsetBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'object date'; + data: Date; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTimeOffsetBuilder'; + + constructor(name: string, config: MsSqlDatetimeConfig | undefined) { + super(name, 'object date', 'MsSqlDateTimeOffset'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTimeOffset( + table, + this.config, + ); + } +} + +export class MsSqlDateTimeOffset> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTimeOffset'; + + readonly precision: number | undefined; + + constructor( + table: MsSqlTable, + config: MsSqlDateTimeOffsetBuilder['config'], + ) { + super(table, config); + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `datetimeoffset${precision}`; + } +} + +export class MsSqlDateTimeOffsetStringBuilder extends MsSqlDateColumnBaseBuilder<{ + dataType: 'string datetime'; + data: string; + driverParam: string | Date; +}, MsSqlDatetimeConfig> { + static override readonly [entityKind]: string = 'MsSqlDateTimeOffsetStringBuilder'; + + constructor(name: string, config: MsSqlDatetimeConfig | undefined) { + super(name, 'string datetime', 'MsSqlDateTimeOffsetString'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDateTimeOffsetString( + table, + this.config, + ); + } +} + +export class MsSqlDateTimeOffsetString> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlDateTimeOffsetString'; + + readonly precision: number | undefined; + + constructor( + table: MsSqlTable, + config: MsSqlDateTimeOffsetStringBuilder['config'], + ) { + super(table, config); + this.precision = config.precision; + } + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `datetimeoffset${precision}`; + } + + override mapFromDriverValue(value: Date | string | null): string | null { + return typeof value === 'string' ? value : value?.toISOString() ?? null; + } +} + +export function datetimeoffset(): MsSqlDateTimeOffsetBuilder; +export function datetimeoffset( + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTimeOffsetStringBuilder + : MsSqlDateTimeOffsetBuilder; +export function datetimeoffset( + name: string, + config?: MsSqlDatetimeConfig, +): Equal extends true ? MsSqlDateTimeOffsetStringBuilder + : MsSqlDateTimeOffsetBuilder; +export function datetimeoffset(a?: string | MsSqlDatetimeConfig, b?: MsSqlDatetimeConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MsSqlDateTimeOffsetStringBuilder(name, config); + } + return new MsSqlDateTimeOffsetBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/decimal.ts b/drizzle-orm/src/mssql-core/columns/decimal.ts new file mode 100644 index 0000000000..de98e47988 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/decimal.ts @@ -0,0 +1,184 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlDecimalBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'string numeric'; + data: string; + driverParam: string; +}, MsSqlDecimalConfig> { + static override readonly [entityKind]: string = 'MsSqlDecimalBuilder'; + + constructor(name: string, config: MsSqlDecimalConfig | undefined) { + super(name, 'string numeric', 'MsSqlDecimal'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDecimal( + table, + this.config, + ); + } +} + +export class MsSqlDecimal> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlDecimal'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue(value: unknown): string { + if (typeof value === 'string') return value; + + return String(value); + } + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export class MsSqlDecimalNumberBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number'; + data: number; + driverParam: string; +}, MsSqlDecimalConfig> { + static override readonly [entityKind]: string = 'MsSqlDecimalNumberBuilder'; + + constructor(name: string, config: MsSqlDecimalConfig | undefined) { + super(name, 'number', 'MsSqlDecimalNumber'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDecimalNumber( + table, + this.config, + ); + } +} + +export class MsSqlDecimalNumber> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlDecimalNumber'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue(value: unknown): number { + if (typeof value === 'number') return value; + + return Number(value); + } + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export class MsSqlDecimalBigIntBuilder extends MsSqlColumnBuilderWithIdentity< + { + dataType: 'bigint int64'; + data: bigint; + driverParam: string; + }, + MsSqlDecimalConfig +> { + static override readonly [entityKind]: string = 'MsSqlDecimalBigIntBuilder'; + + constructor(name: string, config: MsSqlDecimalConfig | undefined) { + super(name, 'bigint int64', 'MsSqlDecimalBigInt'); + this.config.precision = config?.precision ?? 18; + this.config.scale = config?.scale ?? 0; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlDecimalBigInt( + table, + this.config, + ); + } +} + +export class MsSqlDecimalBigInt> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlDecimalBigInt'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue = BigInt; + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `decimal(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'decimal'; + } else { + return `decimal(${this.precision})`; + } + } +} + +export interface MsSqlDecimalConfig< + T extends 'string' | 'number' | 'bigint' = 'string' | 'number' | 'bigint', +> { + precision?: number; + scale?: number; + mode?: T; +} + +export function decimal( + config?: MsSqlDecimalConfig, +): Equal extends true ? MsSqlDecimalNumberBuilder + : Equal extends true ? MsSqlDecimalBigIntBuilder + : MsSqlDecimalBuilder; +export function decimal( + name: string, + config?: MsSqlDecimalConfig, +): Equal extends true ? MsSqlDecimalNumberBuilder + : Equal extends true ? MsSqlDecimalBigIntBuilder + : MsSqlDecimalBuilder; +export function decimal(a?: string | MsSqlDecimalConfig, b?: MsSqlDecimalConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + const mode = config?.mode; + return mode === 'number' + ? new MsSqlDecimalNumberBuilder(name, config) + : mode === 'bigint' + ? new MsSqlDecimalBigIntBuilder(name, config) + : new MsSqlDecimalBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/float.ts b/drizzle-orm/src/mssql-core/columns/float.ts new file mode 100644 index 0000000000..229275171d --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/float.ts @@ -0,0 +1,54 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlFloatBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number double'; + data: number; + driverParam: number; +}, MsSqlFloatConfig> { + static override readonly [entityKind]: string = 'MsSqlFloatBuilder'; + + constructor(name: string, config?: MsSqlFloatConfig) { + super(name, 'number double', 'MsSqlFloat'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlFloat(table, this.config); + } +} + +export class MsSqlFloat> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlFloat'; + + readonly precision: number | undefined = this.config.precision; + + getSQLType(): string { + const precision = this.precision === undefined ? '' : `(${this.precision})`; + return `float${precision}`; + } +} + +export interface MsSqlFloatConfig { + precision?: number; +} + +export function float( + config?: MsSqlFloatConfig, +): MsSqlFloatBuilder; +export function float( + name: string, + config?: MsSqlFloatConfig, +): MsSqlFloatBuilder; +export function float(a?: string | MsSqlFloatConfig, b: MsSqlFloatConfig = {}) { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlFloatBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/index.ts b/drizzle-orm/src/mssql-core/columns/index.ts new file mode 100644 index 0000000000..fcc2c30808 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/index.ts @@ -0,0 +1,21 @@ +export * from './bigint.ts'; +export * from './binary.ts'; +export * from './bit.ts'; +export * from './char.ts'; +export * from './common.ts'; +export * from './custom.ts'; +export * from './date.ts'; +export * from './datetime.ts'; +export * from './datetime2.ts'; +export * from './datetimeoffset.ts'; +export * from './decimal.ts'; +export * from './float.ts'; +export * from './int.ts'; +export * from './numeric.ts'; +export * from './real.ts'; +export * from './smallint.ts'; +export * from './text.ts'; +export * from './time.ts'; +export * from './tinyint.ts'; +export * from './varbinary.ts'; +export * from './varchar.ts'; diff --git a/drizzle-orm/src/mssql-core/columns/int.ts b/drizzle-orm/src/mssql-core/columns/int.ts new file mode 100644 index 0000000000..ad569404f9 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/int.ts @@ -0,0 +1,35 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlIntBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number int32'; + data: number; + driverParam: number; +}> { + static override readonly [entityKind]: string = 'MsSqlIntBuilder'; + + constructor(name: string) { + super(name, 'number int32', 'MsSqlInt'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlInt(table, this.config); + } +} + +export class MsSqlInt> extends MsSqlColumnWithIdentity { + static override readonly [entityKind]: string = 'MsSqlInt'; + + getSQLType(): string { + return `int`; + } +} + +export function int(name?: string) { + return new MsSqlIntBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/mssql-core/columns/numeric.ts b/drizzle-orm/src/mssql-core/columns/numeric.ts new file mode 100644 index 0000000000..e5fbb95e5d --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/numeric.ts @@ -0,0 +1,181 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlNumericBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'string numeric'; + data: string; + driverParam: string; +}, MsSqlNumericConfig> { + static override readonly [entityKind]: string = 'MsSqlNumericBuilder'; + + constructor(name: string, config: MsSqlNumericConfig | undefined) { + super(name, 'string numeric', 'MsSqlNumeric'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlNumeric( + table, + this.config, + ); + } +} + +export class MsSqlNumeric> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlNumeric'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue(value: unknown): string { + if (typeof value === 'string') return value; + + return String(value); + } + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `numeric(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'numeric'; + } else { + return `numeric(${this.precision})`; + } + } +} + +export class MsSqlNumericNumberBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number'; + data: number; + driverParam: string; +}, MsSqlNumericConfig> { + static override readonly [entityKind]: string = 'MsSqlNumericNumberBuilder'; + + constructor(name: string, config: MsSqlNumericConfig | undefined) { + super(name, 'number', 'MsSqlNumericNumber'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlNumericNumber( + table, + this.config, + ); + } +} + +export class MsSqlNumericNumber> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlNumericNumber'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue(value: unknown): number { + if (typeof value === 'number') return value; + + return Number(value); + } + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `numeric(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'numeric'; + } else { + return `numeric(${this.precision})`; + } + } +} + +export class MsSqlNumericBigIntBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'bigint int64'; + data: bigint; + driverParam: string; +}, MsSqlNumericConfig> { + static override readonly [entityKind]: string = 'MsSqlNumericBigIntBuilder'; + + constructor(name: string, config: MsSqlNumericConfig | undefined) { + super(name, 'bigint int64', 'MsSqlNumericBigInt'); + this.config.precision = config?.precision; + this.config.scale = config?.scale; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlNumericBigInt( + table, + this.config, + ); + } +} + +export class MsSqlNumericBigInt> + extends MsSqlColumnWithIdentity +{ + static override readonly [entityKind]: string = 'MsSqlNumericBigInt'; + + readonly precision: number | undefined = this.config.precision; + readonly scale: number | undefined = this.config.scale; + + override mapFromDriverValue = BigInt; + + override mapToDriverValue = String; + + getSQLType(): string { + if (this.precision !== undefined && this.scale !== undefined) { + return `numeric(${this.precision},${this.scale})`; + } else if (this.precision === undefined) { + return 'numeric'; + } else { + return `numeric(${this.precision})`; + } + } +} + +export interface MsSqlNumericConfig< + T extends 'string' | 'number' | 'bigint' = 'string' | 'number' | 'bigint', +> { + precision?: number; + scale?: number; + mode?: T; +} + +export function numeric( + config?: MsSqlNumericConfig, +): Equal extends true ? MsSqlNumericNumberBuilder + : Equal extends true ? MsSqlNumericBigIntBuilder + : MsSqlNumericBuilder; +export function numeric( + name: string, + config?: MsSqlNumericConfig, +): Equal extends true ? MsSqlNumericNumberBuilder + : Equal extends true ? MsSqlNumericBigIntBuilder + : MsSqlNumericBuilder; +export function numeric(a?: string | MsSqlNumericConfig, b?: MsSqlNumericConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + const mode = config?.mode; + return mode === 'number' + ? new MsSqlNumericNumberBuilder(name, config) + : mode === 'bigint' + ? new MsSqlNumericBigIntBuilder(name, config) + : new MsSqlNumericBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/real.ts b/drizzle-orm/src/mssql-core/columns/real.ts new file mode 100644 index 0000000000..f74c04dd88 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/real.ts @@ -0,0 +1,35 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlRealBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number float'; + data: number; + driverParam: number; +}> { + static override readonly [entityKind]: string = 'MsSqlRealBuilder'; + + constructor(name: string) { + super(name, 'number float', 'MsSqlReal'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlReal(table, this.config); + } +} + +export class MsSqlReal> extends MsSqlColumnWithIdentity { + static override readonly [entityKind]: string = 'MsSqlReal'; + + getSQLType(): string { + return 'real'; + } +} + +export function real(name?: string) { + return new MsSqlRealBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/mssql-core/columns/smallint.ts b/drizzle-orm/src/mssql-core/columns/smallint.ts new file mode 100644 index 0000000000..9195f96969 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/smallint.ts @@ -0,0 +1,45 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlSmallIntBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number int16'; + data: number; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'MsSqlSmallIntBuilder'; + + constructor(name: string) { + super(name, 'number int16', 'MsSqlSmallInt'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlSmallInt( + table, + this.config, + ); + } +} + +export class MsSqlSmallInt> extends MsSqlColumnWithIdentity { + static override readonly [entityKind]: string = 'MsSqlSmallInt'; + + getSQLType(): string { + return `smallint`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function smallint(name?: string) { + return new MsSqlSmallIntBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/mssql-core/columns/text.ts b/drizzle-orm/src/mssql-core/columns/text.ts new file mode 100644 index 0000000000..8427756f43 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/text.ts @@ -0,0 +1,89 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable, MsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlTextBuilder extends MsSqlColumnBuilder< + { + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + driverParam: string; + enumValues: TEnum; + }, + { enumValues: TEnum | undefined; nonUnicode: boolean } +> { + static override readonly [entityKind]: string = 'MsSqlTextBuilder'; + + constructor(name: string, config: MsSqlTextConfig & { nonUnicode: boolean }) { + super(name, config.enum?.length ? 'string enum' : 'string', 'MsSqlText'); + this.config.enumValues = config.enum; + this.config.nonUnicode = config.nonUnicode; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlText(table, this.config); + } +} + +export class MsSqlText> + extends MsSqlColumn +{ + static override readonly [entityKind]: string = 'MsSqlText'; + + override readonly enumValues = this.config.enumValues; + + readonly nonUnicode: boolean = this.config.nonUnicode; + + constructor( + table: MsSqlTable, + config: MsSqlTextBuilder<[string, ...string[]]>['config'], + ) { + super(table, config); + } + + getSQLType(): string { + return `${this.nonUnicode ? 'n' : ''}text`; + } +} + +export type MsSqlTextConfig< + TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, +> = { + enum?: TEnum; +}; + +export function text>( + config?: MsSqlTextConfig>, +): MsSqlTextBuilder>; +export function text>( + name: string, + config?: MsSqlTextConfig>, +): MsSqlTextBuilder>; +export function text( + a?: string | MsSqlTextConfig, + b?: MsSqlTextConfig, +): any { + const { name, config } = getColumnNameAndConfig(a, b); + + return new MsSqlTextBuilder(name, { ...config, nonUnicode: false } as any); +} + +export function ntext>( + config?: MsSqlTextConfig>, +): MsSqlTextBuilder>; +export function ntext>( + name: string, + config?: MsSqlTextConfig>, +): MsSqlTextBuilder>; +export function ntext( + a?: string | MsSqlTextConfig, + b?: MsSqlTextConfig, +): any { + const { name, config } = getColumnNameAndConfig(a, b); + + return new MsSqlTextBuilder(name, { ...config, nonUnicode: true } as any); +} diff --git a/drizzle-orm/src/mssql-core/columns/time.ts b/drizzle-orm/src/mssql-core/columns/time.ts new file mode 100644 index 0000000000..a80efc03c3 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/time.ts @@ -0,0 +1,108 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlTimeStringBuilder extends MsSqlColumnBuilder< + { + dataType: 'string time'; + data: string; + driverParam: string | Date; + }, + TimeConfig +> { + static override readonly [entityKind]: string = 'MsSqlTimeBuilder'; + + constructor( + name: string, + config: TimeConfig | undefined, + ) { + super(name, 'string time', 'MsSqlTime'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlTimeString( + table, + this.config, + ); + } +} + +export class MsSqlTimeString> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlTime'; + + readonly fsp: number | undefined = this.config.precision; + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `time${precision}`; + } + + override mapFromDriverValue(value: Date | string | null): string | null { + return typeof value === 'string' ? value : value?.toISOString().split('T')[1]?.split('Z')[0] ?? null; + } +} + +export class MsSqlTimeBuilder extends MsSqlColumnBuilder< + { + dataType: 'object date'; + data: Date; + driverParam: string | Date; + }, + TimeConfig +> { + static override readonly [entityKind]: string = 'MsSqlTimeBuilder'; + + constructor( + name: string, + config: TimeConfig | undefined, + ) { + super(name, 'object date', 'MsSqlTime'); + this.config.precision = config?.precision; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlTime(table, this.config); + } +} + +export class MsSqlTime< + T extends ColumnBaseConfig<'object date'>, +> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlTime'; + + readonly fsp: number | undefined = this.config.precision; + + getSQLType(): string { + const precision = this.fsp === undefined ? '' : `(${this.fsp})`; + return `time${precision}`; + } +} +export type TimeConfig = { + precision?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7; + mode?: TMode; +}; + +export function time( + config?: TimeConfig, +): Equal extends true ? MsSqlTimeStringBuilder : MsSqlTimeBuilder; +export function time( + name: string, + config?: TimeConfig, +): Equal extends true ? MsSqlTimeStringBuilder + : MsSqlTimeBuilder; +export function time(a?: string | TimeConfig, b?: TimeConfig) { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MsSqlTimeStringBuilder(name, config); + } + return new MsSqlTimeBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/tinyint.ts b/drizzle-orm/src/mssql-core/columns/tinyint.ts new file mode 100644 index 0000000000..148e627c45 --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/tinyint.ts @@ -0,0 +1,45 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { MsSqlColumnBuilderWithIdentity, MsSqlColumnWithIdentity } from './common.ts'; + +export class MsSqlTinyIntBuilder extends MsSqlColumnBuilderWithIdentity<{ + dataType: 'number uint8'; + data: number; + driverParam: number | string; +}> { + static override readonly [entityKind]: string = 'MsSqlTinyIntBuilder'; + + constructor(name: string) { + super(name, 'number uint8', 'MsSqlTinyInt'); + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlTinyInt( + table, + this.config, + ); + } +} + +export class MsSqlTinyInt> extends MsSqlColumnWithIdentity { + static override readonly [entityKind]: string = 'MsSqlTinyInt'; + + getSQLType(): string { + return `tinyint`; + } + + override mapFromDriverValue(value: number | string): number { + if (typeof value === 'string') { + return Number(value); + } + return value; + } +} + +export function tinyint(name?: string) { + return new MsSqlTinyIntBuilder(name ?? ''); +} diff --git a/drizzle-orm/src/mssql-core/columns/varbinary.ts b/drizzle-orm/src/mssql-core/columns/varbinary.ts new file mode 100644 index 0000000000..896d8fc55d --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/varbinary.ts @@ -0,0 +1,56 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { getColumnNameAndConfig } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlVarBinaryBuilder extends MsSqlColumnBuilder<{ + dataType: 'object buffer'; + data: Buffer; + driverParam: Buffer; +}, MsSqlVarbinaryOptions & { rawLength: MsSqlVarbinaryOptions['length'] | undefined }> { + static override readonly [entityKind]: string = 'MsSqlVarBinaryBuilder'; + + /** @internal */ + constructor(name: string, config?: MsSqlVarbinaryOptions) { + super(name, 'object buffer', 'MsSqlVarBinary'); + this.config.length = typeof config?.length === 'number' ? config.length : config?.length === 'max' ? 2147483647 : 1; + this.config.rawLength = config?.length; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlVarBinary( + table, + this.config, + ); + } +} + +export class MsSqlVarBinary< + T extends ColumnBaseConfig<'object buffer'>, +> extends MsSqlColumn { + static override readonly [entityKind]: string = 'MsSqlVarBinary'; + + getSQLType(): string { + return this.config.rawLength === undefined ? `varbinary` : `varbinary(${this.config.rawLength})`; + } +} + +export interface MsSqlVarbinaryOptions { + length: number | 'max'; +} + +export function varbinary( + config?: MsSqlVarbinaryOptions, +): MsSqlVarBinaryBuilder; +export function varbinary( + name: string, + config?: MsSqlVarbinaryOptions, +): MsSqlVarBinaryBuilder; +export function varbinary(a?: string | MsSqlVarbinaryOptions, b?: MsSqlVarbinaryOptions) { + const { name, config } = getColumnNameAndConfig(a, b); + return new MsSqlVarBinaryBuilder(name, config); +} diff --git a/drizzle-orm/src/mssql-core/columns/varchar.ts b/drizzle-orm/src/mssql-core/columns/varchar.ts new file mode 100644 index 0000000000..21c9f2415d --- /dev/null +++ b/drizzle-orm/src/mssql-core/columns/varchar.ts @@ -0,0 +1,180 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlTable } from '~/mssql-core/table.ts'; +import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; +import { MsSqlColumn, MsSqlColumnBuilder } from './common.ts'; + +export class MsSqlVarCharBuilder extends MsSqlColumnBuilder<{ + dataType: Equal extends true ? 'string' : 'string enum'; + data: TEnum[number]; + driverParam: number | string; + enumValues: TEnum; +}, MsSqlVarCharConfig<'text', TEnum> & { rawLength: number | 'max' | undefined }> { + static override readonly [entityKind]: string = 'MsSqlVarCharBuilder'; + + /** @internal */ + constructor(name: string, config: MsSqlVarCharConfig<'text', TEnum>) { + super(name, config.enum?.length ? 'string enum' : 'string', 'MsSqlVarChar'); + this.config.length = typeof config?.length === 'number' ? config.length : config?.length === 'max' ? 2147483647 : 1; + this.config.rawLength = config?.length; + this.config.enum = config.enum; + this.config.nonUnicode = config.nonUnicode; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlVarChar( + table, + this.config, + ); + } +} + +export class MsSqlVarChar> extends MsSqlColumn< + T, + MsSqlVarCharConfig< + 'text', + T['enumValues'] + > & { rawLength: number | 'max' | undefined } +> { + static override readonly [entityKind]: string = 'MsSqlVarChar'; + + override readonly enumValues = this.config.enum; + + readonly nonUnicode: boolean = this.config.nonUnicode; + + getSQLType(): string { + return this.config.rawLength === undefined + ? this.nonUnicode ? `nvarchar` : `varchar` + : this.nonUnicode + ? `nvarchar(${this.config.rawLength})` + : `varchar(${this.config.rawLength})`; + } +} + +export class MsSqlVarCharJsonBuilder extends MsSqlColumnBuilder<{ + dataType: 'object json'; + data: unknown; + driverParam: string; +}, { length: number; nonUnicode: boolean; rawLength: number | 'max' | undefined }> { + static override readonly [entityKind]: string = 'MsSqlVarCharJsonBuilder'; + + /** @internal */ + constructor(name: string, config: { length: number | 'max' | undefined }) { + super(name, 'object json', 'MsSqlNVarCharJson'); + this.config.length = typeof config?.length === 'number' ? config.length : config?.length === 'max' ? 2147483647 : 1; + this.config.rawLength = config?.length; + this.config.nonUnicode = true; + } + + /** @internal */ + override build( + table: AnyMsSqlTable<{ name: TTableName }>, + ) { + return new MsSqlVarCharJson( + table, + this.config, + ); + } +} + +export class MsSqlVarCharJson> + extends MsSqlColumn +{ + static override readonly [entityKind]: string = 'MsSqlVarCharJson'; + + getSQLType(): string { + return this.config.rawLength === undefined + ? `nvarchar` + : `nvarchar(${this.config.rawLength})`; + } + + override mapFromDriverValue(value: string): T['data'] { + return JSON.parse(value); + } + + override mapToDriverValue(value: T['data']): string { + return JSON.stringify(value); + } +} + +export type MsSqlVarCharConfig< + TMode extends 'text' | 'json', + TEnum extends string[] | readonly string[] | undefined, +> = + & MsSqlVarCharConfigInitial + & { + nonUnicode: boolean; + }; + +export type MsSqlVarCharConfigInitial< + TMode extends 'text' | 'json' = 'text' | 'json', + TEnum extends string[] | readonly string[] | undefined = string[] | readonly string[] | undefined, +> = TMode extends 'text' ? { + mode?: TMode; + length?: number | 'max'; + enum?: TEnum; + } + : { + mode?: TMode; + length?: number | 'max'; + }; + +export function varchar(): MsSqlVarCharBuilder<[string, ...string[]]>; +export function varchar>( + config?: MsSqlVarCharConfigInitial<'text', T | Writable>, +): MsSqlVarCharBuilder>; +export function varchar>( + name: string, + config?: MsSqlVarCharConfigInitial<'text', T | Writable>, +): MsSqlVarCharBuilder>; +export function varchar( + a?: string | MsSqlVarCharConfigInitial<'text'>, + b?: MsSqlVarCharConfigInitial<'text'>, +): any { + const { name, config } = getColumnNameAndConfig>(a, b); + + return new MsSqlVarCharBuilder(name, { + ...config, + mode: 'text', + nonUnicode: false, + } as any); +} + +export function nvarchar< + U extends string, + T extends Readonly<[U, ...U[]]>, + TMode extends 'text' | 'json' = 'text' | 'json', +>( + config?: MsSqlVarCharConfigInitial>, +): Equal extends true ? MsSqlVarCharJsonBuilder + : MsSqlVarCharBuilder>; +export function nvarchar< + U extends string, + T extends Readonly<[U, ...U[]]>, + TMode extends 'text' | 'json' = 'text' | 'json', +>( + name: string, + config?: MsSqlVarCharConfigInitial>, +): Equal extends true ? MsSqlVarCharJsonBuilder + : MsSqlVarCharBuilder>; +export function nvarchar( + a?: string | MsSqlVarCharConfigInitial, + b?: MsSqlVarCharConfigInitial, +): any { + const { name, config } = getColumnNameAndConfig(a, b); + + if (config?.mode === 'json') { + return new MsSqlVarCharJsonBuilder(name, { + length: config.length, + }); + } + + return new MsSqlVarCharBuilder(name, { + length: config?.length, + enum: (config as any)?.enum, + nonUnicode: true, + }); +} diff --git a/drizzle-orm/src/mssql-core/db.ts b/drizzle-orm/src/mssql-core/db.ts new file mode 100644 index 0000000000..f5dd6eaded --- /dev/null +++ b/drizzle-orm/src/mssql-core/db.ts @@ -0,0 +1,391 @@ +import type * as V1 from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import { type ColumnsSelection, sql, type SQLWrapper } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import type { DrizzleTypeError } from '~/utils.ts'; +import type { MsSqlDialect } from './dialect.ts'; +import { + MsSqlDeleteBase, + MsSqlInsertBuilder, + MsSqlSelectBuilder, + MsSqlUpdateBuilder, + QueryBuilder, +} from './query-builders/index.ts'; +import { RelationalQueryBuilder } from './query-builders/query.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import type { + MsSqlSession, + MsSqlTransaction, + MsSqlTransactionConfig, + PreparedQueryHKTBase, + QueryResultHKT, + QueryResultKind, +} from './session.ts'; +import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { MsSqlTable } from './table.ts'; + +export class MsSqlDatabase< + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = {}, + TSchema extends V1.TablesRelationalConfig = V1.ExtractTablesWithRelations, +> { + static readonly [entityKind]: string = 'MsSqlDatabase'; + + declare readonly _: { + readonly schema: TSchema | undefined; + readonly tableNamesMap: Record; + }; + + _query: TFullSchema extends Record + ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> + : { + [K in keyof TSchema]: RelationalQueryBuilder; + }; + + constructor( + /** @internal */ + readonly dialect: MsSqlDialect, + /** @internal */ + readonly session: MsSqlSession, + schema: V1.RelationalSchemaConfig | undefined, + ) { + this._ = schema + ? { schema: schema.schema, tableNamesMap: schema.tableNamesMap } + : { schema: undefined, tableNamesMap: {} }; + this._query = {} as typeof this['_query']; + if (this._.schema) { + for (const [tableName, columns] of Object.entries(this._.schema)) { + (this._query as MsSqlDatabase>['_query'])[tableName] = + new RelationalQueryBuilder( + schema!.fullSchema, + this._.schema, + this._.tableNamesMap, + schema!.fullSchema[tableName] as MsSqlTable, + columns, + dialect, + session, + ); + } + } + } + + /** + * Creates a subquery that defines a temporary named result set as a CTE. + * + * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param alias The alias for the subquery. + * + * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. + * + * @example + * + * ```ts + * // Create a subquery with alias 'sq' and use it in the select query + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * const result = await db.with(sq).select().from(sq); + * ``` + * + * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: + * + * ```ts + * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query + * const sq = db.$with('sq').as(db.select({ + * name: sql`upper(${users.name})`.as('name'), + * }) + * .from(users)); + * + * const result = await db.with(sq).select({ name: sq.name }).from(sq); + * ``` + */ + $with(alias: TAlias) { + const self = this; + + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + /** + * Incorporates a previously defined CTE (using `$with`) into the main query. + * + * This method allows the main query to reference a temporary named result set. + * + * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} + * + * @param queries The CTEs to incorporate into the main query. + * + * @example + * + * ```ts + * // Define a subquery 'sq' as a CTE using $with + * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); + * + * // Incorporate the CTE 'sq' into the main query and select from it + * const result = await db.with(sq).select().from(sq); + * ``` + */ + with(...queries: WithSubquery[]) { + const self = this; + + function select(): MsSqlSelectBuilder; + function select( + fields: TSelection, + ): MsSqlSelectBuilder; + function select(fields?: SelectedFields): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + }); + } + + function selectDistinct(): MsSqlSelectBuilder; + function selectDistinct( + fields: TSelection, + ): MsSqlSelectBuilder; + function selectDistinct( + fields?: SelectedFields, + ): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: self.session, + dialect: self.dialect, + withList: queries, + distinct: true, + }); + } + + return { select, selectDistinct }; + } + + /** + * Creates a select query. + * + * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select} + * + * @param fields The selection object. + * + * @example + * + * ```ts + * // Select all columns and all rows from the 'cars' table + * const allCars: Car[] = await db.select().from(cars); + * + * // Select specific columns and all rows from the 'cars' table + * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ + * id: cars.id, + * brand: cars.brand + * }) + * .from(cars); + * ``` + * + * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: + * + * ```ts + * // Select specific columns along with expression and all rows from the 'cars' table + * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ + * id: cars.id, + * lowerBrand: sql`lower(${cars.brand})`, + * }) + * .from(cars); + * ``` + */ + select(): MsSqlSelectBuilder; + select(fields: TSelection): MsSqlSelectBuilder; + select(fields?: SelectedFields): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); + } + + /** + * Adds `distinct` expression to the select query. + * + * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. + * + * Use `.from()` method to specify which table to select from. + * + * See docs: {@link https://orm.drizzle.team/docs/select#distinct} + * + * @param fields The selection object. + * + * @example + * ```ts + * // Select all unique rows from the 'cars' table + * await db.selectDistinct() + * .from(cars) + * .orderBy(cars.id, cars.brand, cars.color); + * + * // Select all unique brands from the 'cars' table + * await db.selectDistinct({ brand: cars.brand }) + * .from(cars) + * .orderBy(cars.brand); + * ``` + */ + selectDistinct(): MsSqlSelectBuilder; + selectDistinct( + fields: TSelection, + ): MsSqlSelectBuilder; + selectDistinct(fields?: SelectedFields): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: this.session, + dialect: this.dialect, + distinct: true, + }); + } + + /** + * Creates an update query. + * + * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. + * + * Use `.set()` method to specify which values to update. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param table The table to update. + * + * @example + * + * ```ts + * // Update all rows in the 'cars' table + * await db.update(cars).set({ color: 'red' }); + * + * // Update rows with filters and conditions + * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); + * ``` + */ + update(table: TTable): MsSqlUpdateBuilder { + return new MsSqlUpdateBuilder(table, this.session, this.dialect); + } + + /** + * Creates an insert query. + * + * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. + * + * See docs: {@link https://orm.drizzle.team/docs/insert} + * + * @param table The table to insert into. + * + * @example + * + * ```ts + * // Insert one row + * await db.insert(cars).values({ brand: 'BMW' }); + * + * // Insert multiple rows + * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); + * ``` + */ + insert(table: TTable): MsSqlInsertBuilder { + return new MsSqlInsertBuilder(table, this.session, this.dialect); + } + + /** + * Creates a delete query. + * + * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param table The table to delete from. + * + * @example + * + * ```ts + * // Delete all rows in the 'cars' table + * await db.delete(cars); + * + * // Delete rows with filters and conditions + * await db.delete(cars).where(eq(cars.color, 'green')); + * ``` + */ + delete(table: TTable): MsSqlDeleteBase { + return new MsSqlDeleteBase(table, this.session, this.dialect); + } + + execute( + query: SQLWrapper | string, + ): Promise> { + return this.session.execute((typeof query === 'string' ? sql.raw(query) : query).getSQL()); + } + + transaction( + transaction: ( + tx: MsSqlTransaction, + config?: MsSqlTransactionConfig, + ) => Promise, + config?: MsSqlTransactionConfig, + ): Promise { + return this.session.transaction(transaction, config); + } +} + +export type MySQLWithReplicas = Q & { $primary: Q }; + +export const withReplicas = < + HKT extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, + Q extends MsSqlDatabase< + HKT, + TPreparedQueryHKT, + TFullSchema, + TSchema extends Record ? V1.ExtractTablesWithRelations : TSchema + >, +>( + primary: Q, + replicas: [Q, ...Q[]], + getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, +): MySQLWithReplicas => { + const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); + const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); + + const update: Q['update'] = (...args: [any]) => primary.update(...args); + const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); + const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); + const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); + const transaction: Q['transaction'] = (...args: [any, any]) => primary.transaction(...args); + + return { + ...primary, + update, + insert, + delete: $delete, + execute, + transaction, + $primary: primary, + $replicas: replicas, + select, + selectDistinct, + with: $with, + get _query() { + return getReplica(replicas)._query; + }, + }; +}; diff --git a/drizzle-orm/src/mssql-core/dialect.ts b/drizzle-orm/src/mssql-core/dialect.ts new file mode 100644 index 0000000000..b1ffcc8811 --- /dev/null +++ b/drizzle-orm/src/mssql-core/dialect.ts @@ -0,0 +1,902 @@ +import * as V1 from '~/_relations.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; +import { CasingCache } from '~/casing.ts'; +import { Column } from '~/column.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { MigrationConfig, MigrationMeta, MigratorInitFailResponse } from '~/migrator.ts'; +import { Param, type QueryWithTypings, SQL, sql, type SQLChunk, View } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { getTableName, getTableUniqueName, Table } from '~/table.ts'; +import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import { and, DrizzleError, eq, type Name, ViewBaseConfig } from '../index.ts'; +import { MsSqlColumn } from './columns/common.ts'; +import type { MsSqlDeleteConfig } from './query-builders/delete.ts'; +import type { MsSqlInsertConfig } from './query-builders/insert.ts'; +import type { MsSqlSelectConfig, SelectedFieldsOrdered } from './query-builders/select.types.ts'; +import type { MsSqlUpdateConfig } from './query-builders/update.ts'; +import type { MsSqlSession } from './session.ts'; +import { MsSqlTable } from './table.ts'; +import { MsSqlViewBase } from './view-base.ts'; + +export interface MsSqlDialectConfig { + casing?: Casing; +} +export class MsSqlDialect { + static readonly [entityKind]: string = 'MsSqlDialect'; + + /** @internal */ + readonly casing: CasingCache; + + constructor(config?: MsSqlDialectConfig) { + this.casing = new CasingCache(config?.casing); + } + + async migrate( + migrations: MigrationMeta[], + session: MsSqlSession, + config: MigrationConfig, + ): Promise { + const migrationsTable = typeof config === 'string' + ? '__drizzle_migrations' + : config.migrationsTable ?? '__drizzle_migrations'; + const migrationsSchema = typeof config === 'string' ? 'drizzle' : config.migrationsSchema ?? 'drizzle'; + const migrationTableCreate = sql` + IF NOT EXISTS ( + SELECT 1 FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${migrationsTable} AND TABLE_SCHEMA = ${migrationsSchema} + ) + CREATE TABLE ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)} ( + id bigint identity PRIMARY KEY, + hash text NOT NULL, + created_at bigint + ) + `; + + const migrationSchemaCreate = sql` + IF NOT EXISTS ( + SELECT 1 FROM sys.schemas WHERE name = ${migrationsSchema} + ) + EXEC(\'CREATE SCHEMA ${sql.identifier(migrationsSchema)}\') + `; + + await session.execute(migrationSchemaCreate); + await session.execute(migrationTableCreate); + + const { recordset: dbMigrations } = await session.execute( + sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } order by created_at desc offset 0 rows fetch next 1 rows only`, + ); + + if (typeof config === 'object' && config.init) { + if (dbMigrations.length) { + return { exitCode: 'databaseMigrations' as const }; + } + + if (migrations.length > 1) { + return { exitCode: 'localMigrations' as const }; + } + + const [migration] = migrations; + + if (!migration) return; + + await session.execute( + sql`insert into ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } ([hash], [created_at]) values(${migration.hash}, ${migration.folderMillis})`, + ); + + return; + } + + const lastDbMigration = dbMigrations[0]; + await session.transaction(async (tx) => { + for (const migration of migrations) { + if ( + !lastDbMigration + || Number(lastDbMigration.created_at) < migration.folderMillis + ) { + for (const stmt of migration.sql) { + await tx.execute(sql.raw(stmt)); + } + await tx.execute( + sql`insert into ${sql.identifier(migrationsSchema)}.${ + sql.identifier(migrationsTable) + } ([hash], [created_at]) values(${migration.hash}, ${migration.folderMillis})`, + ); + } + } + }); + } + + escapeName(name: string): string { + return `[${name}]`; + } + + escapeParam(_num: number): string { + return `@par${_num}`; + } + + escapeString(str: string): string { + return `'${str.replace(/'/g, "''")}'`; + } + + buildDeleteQuery({ table, where, output }: MsSqlDeleteConfig): SQL { + const outputSql = output + ? sql` output ${this.buildSelectionOutput(output, { type: 'DELETED' })}` + : undefined; + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`delete from ${table}${outputSql}${whereSql}`; + } + + buildUpdateSet(table: MsSqlTable, set: UpdateSet): SQL { + const tableColumns = table[Table.Symbol.Columns]; + + const columnNames = Object.keys(tableColumns).filter((colName) => + set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined + ); + + const setSize = columnNames.length; + return sql.join(columnNames.flatMap((colName, i) => { + const col = tableColumns[colName]!; + + const onUpdateFnResult = col.onUpdateFn?.(); + const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); + const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; + + if (i < setSize - 1) { + return [res, sql.raw(', ')]; + } + return [res]; + })); + // const setEntries = Object.entries(set); + // + // const setSize = setEntries.length; + // return sql.join( + // setEntries + // .flatMap(([colName, value], i): SQL[] => { + // const col: MsSqlColumn = table[Table.Symbol.Columns][colName]!; + // const res = sql`${sql.identifier(col.name)} = ${value}`; + // if (i < setSize - 1) { + // return [res, sql.raw(', ')]; + // } + // return [res]; + // }), + // ); + } + + buildUpdateQuery({ table, set, where, output }: MsSqlUpdateConfig): SQL { + const setSql = this.buildUpdateSet(table, set); + + const outputSql = sql``; + + if (output) { + outputSql.append(sql` output `); + + if (output.inserted) { + outputSql.append(this.buildSelectionOutput(output.inserted, { type: 'INSERTED' })); + } + + if (output.deleted) { + if (output.inserted) outputSql.append(sql`, `); // add space if both are present + outputSql.append(this.buildSelectionOutput(output.deleted, { type: 'DELETED' })); + } + } + + const whereSql = where ? sql` where ${where}` : undefined; + + return sql`update ${table} set ${setSql}${outputSql}${whereSql}`; + } + + /** + * Builds selection SQL with provided fields/expressions + * + * Examples: + * + * `select from` + * + * `insert ... returning ` + * + * If `isSingleTable` is true, then columns won't be prefixed with table name + */ + private buildSelection( + fields: SelectedFieldsOrdered, + { isSingleTable = false }: { isSingleTable?: boolean } = {}, + ): SQL { + const columnsLen = fields.length; + + const chunks = fields + .flatMap(({ field }, i) => { + const chunk: SQLChunk[] = []; + + if (is(field, SQL.Aliased) && field.isSelectionField) { + chunk.push(sql.identifier(field.fieldAlias)); + } else if (is(field, SQL.Aliased) || is(field, SQL)) { + const query = is(field, SQL.Aliased) ? field.sql : field; + + if (isSingleTable) { + chunk.push( + new SQL( + query.queryChunks.map((c) => { + if (is(c, MsSqlColumn)) { + return sql.identifier(this.casing.getColumnCasing(c)); + } + return c; + }), + ), + ); + } else { + chunk.push(query); + } + + if (is(field, SQL.Aliased)) { + chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); + } + } else if (is(field, Column)) { + if (isSingleTable) { + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); + } else { + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); + } + } else if (is(field, Subquery)) { + const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; + + if (entries.length === 1) { + const entry = entries[0]![1]; + + const fieldDecoder = is(entry, SQL) + ? entry.decoder + : is(entry, Column) + ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } + : entry.sql.decoder; + + if (fieldDecoder) { + field._.sql.decoder = fieldDecoder; + } + } + chunk.push(field); + } + + if (i < columnsLen - 1) { + chunk.push(sql`, `); + } + + return chunk; + }); + + return sql.join(chunks); + } + + private buildSelectionOutput( + fields: SelectedFieldsOrdered, + { type }: { type: 'INSERTED' | 'DELETED' }, + ): SQL { + const columnsLen = fields.length; + + const chunks = fields + .flatMap(({ field }, i) => { + const chunk: SQLChunk[] = []; + + if (is(field, SQL.Aliased) && field.isSelectionField) { + chunk.push(sql.join([sql.raw(`${type}.`), sql.identifier(field.fieldAlias)])); + } else if (is(field, SQL.Aliased) || is(field, SQL)) { + const query = is(field, SQL.Aliased) ? field.sql : field; + + chunk.push( + new SQL( + query.queryChunks.map((c) => { + if (is(c, MsSqlColumn)) { + return sql.join([sql.raw(`${type}.`), sql.identifier(this.casing.getColumnCasing(c))]); + } + return c; + }), + ), + ); + + if (is(field, SQL.Aliased)) { + chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); + } + } else if (is(field, Column)) { + chunk.push( + sql.join([ + sql.raw(`${type}.`), + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ]), + ); + } + + if (i < columnsLen - 1) { + chunk.push(sql`, `); + } + + return chunk; + }); + + return sql.join(chunks); + } + + buildSelectQuery( + { + withList, + fields, + fieldsFlat, + where, + having, + table, + joins, + orderBy, + groupBy, + fetch, + for: _for, + top, + offset, + distinct, + setOperators, + }: MsSqlSelectConfig, + ): SQL { + const fieldsList = fieldsFlat ?? orderSelectedFields(fields); + for (const f of fieldsList) { + if ( + is(f.field, Column) + && getTableName(f.field.table) + !== (is(table, Subquery) + ? table._.alias + : is(table, MsSqlViewBase) + ? table[ViewBaseConfig].name + : is(table, SQL) + ? undefined + : getTableName(table)) + && !((table) => + joins?.some(({ alias }) => + alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) + ))(f.field.table) + ) { + const tableName = getTableName(f.field.table); + throw new Error( + `Your "${ + f.path.join('->') + }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, + ); + } + } + + const isSingleTable = !joins || joins.length === 0; + + let withSql: SQL | undefined; + if (withList?.length) { + const withSqlChunks = [sql`with `]; + for (const [i, w] of withList.entries()) { + withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); + if (i < withList.length - 1) { + withSqlChunks.push(sql`, `); + } + } + withSqlChunks.push(sql` `); + withSql = sql.join(withSqlChunks); + } + + const distinctSql = distinct ? sql` distinct` : undefined; + + const topSql = top + ? sql` top(${top})` + : undefined; + + const selection = this.buildSelection(fieldsList, { isSingleTable }); + + const tableSql = (() => { + if (is(table, Table) && table[Table.Symbol.OriginalName] !== table[Table.Symbol.Name]) { + let fullName = sql`${sql.identifier(table[Table.Symbol.OriginalName])} ${ + sql.identifier(table[Table.Symbol.Name]) + }`; + if (table[Table.Symbol.Schema]) { + fullName = sql`${sql.identifier(table[Table.Symbol.Schema]!)}.${fullName}`; + } + return fullName; + } + + return table; + })(); + + const joinsArray: SQL[] = []; + + if (joins) { + for (const [index, joinMeta] of joins.entries()) { + if (index === 0) { + joinsArray.push(sql` `); + } + const table = joinMeta.table; + const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; + + if (is(table, MsSqlTable)) { + const tableName = table[MsSqlTable.Symbol.Name]; + const tableSchema = table[MsSqlTable.Symbol.Schema]; + const origTableName = table[MsSqlTable.Symbol.OriginalName]; + const alias = tableName === origTableName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined + }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else if (is(table, View)) { + const viewName = table[ViewBaseConfig].name; + const viewSchema = table[ViewBaseConfig].schema; + const origViewName = table[ViewBaseConfig].originalName; + const alias = viewName === origViewName ? undefined : joinMeta.alias; + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ + viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined + }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`} on ${joinMeta.on}`, + ); + } else { + joinsArray.push( + sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table} on ${joinMeta.on}`, + ); + } + if (index < joins.length - 1) { + joinsArray.push(sql` `); + } + } + } + + const joinsSql = sql.join(joinsArray); + + const whereSql = where ? sql` where ${where}` : undefined; + + const havingSql = having ? sql` having ${having}` : undefined; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; + } + + let groupBySql; + if (groupBy && groupBy.length > 0) { + groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; + } + + const offsetSql = offset === undefined ? undefined : sql` offset ${offset} rows`; + + const fetchSql = fetch === undefined ? undefined : sql` fetch next ${fetch} rows only`; + + let forSQL: SQL | undefined; + if (_for && _for.mode === 'json') { + forSQL = sql` for json ${sql.raw(_for.type)}${ + _for.options?.root ? sql` root(${sql.identifier(_for.options.root)})` : undefined + }${_for.options?.includeNullValues ? sql` include_null_values` : undefined}${ + _for.options?.withoutArrayWrapper ? sql` without_array_wrapper` : undefined + }`; + } + + const finalQuery = + sql`${withSql}select${distinctSql}${topSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${offsetSql}${fetchSql}${forSQL}`; + + if (setOperators.length > 0) { + return this.buildSetOperations(finalQuery, setOperators); + } + + return finalQuery; + } + + buildSetOperations(leftSelect: SQL, setOperators: MsSqlSelectConfig['setOperators']): SQL { + const [setOperator, ...rest] = setOperators; + + if (!setOperator) { + throw new Error('Cannot pass undefined values to any set operator'); + } + + if (rest.length === 0) { + return this.buildSetOperationQuery({ leftSelect, setOperator }); + } + + // Some recursive magic here + return this.buildSetOperations( + this.buildSetOperationQuery({ leftSelect, setOperator }), + rest, + ); + } + + buildSetOperationQuery({ + leftSelect, + setOperator: { type, isAll, rightSelect, fetch, orderBy, offset }, + }: { leftSelect: SQL; setOperator: MsSqlSelectConfig['setOperators'][number] }): SQL { + const leftChunk = sql`(${leftSelect.getSQL()}) `; + const rightChunk = sql`(${rightSelect.getSQL()})`; + + let orderBySql; + if (orderBy && orderBy.length > 0) { + const orderByValues: (SQL | Name)[] = []; + + // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` + // which is invalid MsSql syntax, Table from one of the SELECTs cannot be used in global ORDER clause + for (const orderByUnit of orderBy) { + if (is(orderByUnit, MsSqlColumn)) { + orderByValues.push(sql.identifier(orderByUnit.name)); + } else if (is(orderByUnit, SQL)) { + for (let i = 0; i < orderByUnit.queryChunks.length; i++) { + const chunk = orderByUnit.queryChunks[i]; + + if (is(chunk, MsSqlColumn)) { + orderByUnit.queryChunks[i] = sql.identifier(chunk.name); + } + } + + orderByValues.push(sql`${orderByUnit}`); + } else { + orderByValues.push(sql`${orderByUnit}`); + } + } + + orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; + } + + const offsetSql = offset === undefined ? undefined : sql` offset ${offset} rows`; + + const fetchSql = fetch === undefined ? undefined : sql` fetch next ${fetch} rows only`; + + const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); + + return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${offsetSql}${fetchSql}`; + } + + buildInsertQuery({ table, values, output }: MsSqlInsertConfig): SQL { + // const isSingleValue = values.length === 1; + const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; + const columns: Record = table[Table.Symbol.Columns]; + const colEntries: [string, MsSqlColumn][] = Object.entries(columns).filter( + ([_, col]) => !col.shouldDisableInsert(), + ); + + const insertOrder = colEntries.map(([, column]) => sql.identifier(this.casing.getColumnCasing(column))); + + for (const [valueIndex, value] of values.entries()) { + const valueList: (SQLChunk | SQL)[] = []; + for (const [fieldName, col] of colEntries) { + const colValue = value[fieldName]; + if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { + if (col.defaultFn !== undefined) { + const defaultFnResult = col.defaultFn(); + const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); + valueList.push(defaultValue); + } else if (!col.default && col.onUpdateFn !== undefined) { + const onUpdateFnResult = col.onUpdateFn(); + const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); + valueList.push(newValue); + } else { + valueList.push(sql`default`); + } + } else { + valueList.push(colValue); + } + } + valuesSqlList.push(valueList); + if (valueIndex < values.length - 1) { + valuesSqlList.push(sql`, `); + } + } + + const valuesSql = insertOrder.length === 0 ? undefined : sql.join(valuesSqlList); + + const outputSql = output + ? sql` output ${this.buildSelectionOutput(output, { type: 'INSERTED' })}` + : undefined; + + return sql`insert into ${table} ${ + insertOrder.length === 0 ? sql`default` : insertOrder + }${outputSql} values ${valuesSql}`; + } + + sqlToQuery( + sql: SQL, + invokeSource?: 'indexes' | 'mssql-check' | 'mssql-view-with-schemabinding', + ): QueryWithTypings { + const res = sql.toQuery({ + casing: this.casing, + escapeName: this.escapeName, + escapeParam: this.escapeParam, + escapeString: this.escapeString, + invokeSource, + }); + return res; + } + + buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table, + tableConfig, + queryConfig: config, + tableAlias, + nestedQueryRelation, + joinOn, + }: { + fullSchema: Record; + schema: V1.TablesRelationalConfig; + tableNamesMap: Record; + table: MsSqlTable; + tableConfig: V1.TableRelationalConfig; + queryConfig: true | V1.DBQueryConfig<'many', true>; + tableAlias: string; + nestedQueryRelation?: V1.Relation; + joinOn?: SQL; + }): V1.BuildRelationalQueryResult { + let selection: V1.BuildRelationalQueryResult['selection'] = []; + let limit, offset, orderBy: MsSqlSelectConfig['orderBy'] = [], where; + + if (config === true) { + const selectionEntries = Object.entries(tableConfig.columns); + selection = selectionEntries.map(( + [key, value], + ) => ({ + dbKey: value.name, + tsKey: key, + field: aliasedTableColumn(value as MsSqlColumn, tableAlias), + relationTableTsKey: undefined, + isJson: false, + selection: [], + })); + } else { + const aliasedColumns = Object.fromEntries( + Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), + ); + + if (config.where) { + const whereSql = typeof config.where === 'function' + ? config.where(aliasedColumns, V1.getOperators()) + : config.where; + where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); + } + + const fieldsSelection: { tsKey: string; value: MsSqlColumn | SQL.Aliased }[] = []; + let selectedColumns: string[] = []; + + // Figure out which columns to select + if (config.columns) { + let isIncludeMode = false; + + for (const [field, value] of Object.entries(config.columns)) { + if (value === undefined) { + continue; + } + + if (field in tableConfig.columns) { + if (!isIncludeMode && value === true) { + isIncludeMode = true; + } + selectedColumns.push(field); + } + } + + if (selectedColumns.length > 0) { + selectedColumns = isIncludeMode + ? selectedColumns.filter((c) => config.columns?.[c] === true) + : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); + } + } else { + // Select all columns if selection is not specified + selectedColumns = Object.keys(tableConfig.columns); + } + + for (const field of selectedColumns) { + const column = tableConfig.columns[field]! as MsSqlColumn; + fieldsSelection.push({ tsKey: field, value: column }); + } + + let selectedRelations: { + tsKey: string; + queryConfig: true | V1.DBQueryConfig<'many', false>; + relation: V1.Relation; + }[] = []; + + // Figure out which relations to select + if (config.with) { + selectedRelations = Object.entries(config.with) + .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) + .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); + } + + let extras; + + // Figure out which extras to select + if (config.extras) { + extras = typeof config.extras === 'function' + ? config.extras(aliasedColumns, { sql }) + : config.extras; + for (const [tsKey, value] of Object.entries(extras)) { + fieldsSelection.push({ + tsKey, + value: mapColumnsInAliasedSQLToAlias(value, tableAlias), + }); + } + } + + // Transform `fieldsSelection` into `selection` + // `fieldsSelection` shouldn't be used after this point + for (const { tsKey, value } of fieldsSelection) { + selection.push({ + dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, + tsKey, + field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, + relationTableTsKey: undefined, + isJson: false, + selection: [], + }); + } + + let orderByOrig = typeof config.orderBy === 'function' + ? config.orderBy(aliasedColumns, V1.getOrderByOperators()) + : config.orderBy ?? []; + if (!Array.isArray(orderByOrig)) { + orderByOrig = [orderByOrig]; + } + orderBy = orderByOrig.map((orderByValue) => { + if (is(orderByValue, Column)) { + return aliasedTableColumn(orderByValue, tableAlias) as MsSqlColumn; + } + return mapColumnsInSQLToAlias(orderByValue, tableAlias); + }); + + limit = config.limit; + offset = config.offset; + + // Process all relations + for ( + const { + tsKey: selectedRelationTsKey, + queryConfig: selectedRelationConfigValue, + relation, + } of selectedRelations + ) { + const normalizedRelation = V1.normalizeRelation(schema, tableNamesMap, relation); + const relationTableName = getTableUniqueName(relation.referencedTable); + const relationTableTsName = tableNamesMap[relationTableName]!; + const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; + const joinOn = and( + ...normalizedRelation.fields.map((field, i) => + eq( + aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), + aliasedTableColumn(field, tableAlias), + ) + ), + ); + const builtRelation = this.buildRelationalQuery({ + fullSchema, + schema, + tableNamesMap, + table: fullSchema[relationTableTsName] as MsSqlTable, + tableConfig: schema[relationTableTsName]!, + queryConfig: is(relation, V1.One) + ? (selectedRelationConfigValue === true + ? { limit: 1 } + : { ...selectedRelationConfigValue, limit: 1 }) + : selectedRelationConfigValue, + tableAlias: relationTableAlias, + joinOn, + nestedQueryRelation: relation, + }); + let fieldSql = sql`(${builtRelation.sql} for json auto, include_null_values)${ + nestedQueryRelation ? sql` as ${sql.identifier(relationTableAlias)}` : undefined + }`; + if (is(relation, V1.Many)) { + fieldSql = sql`${fieldSql}`; + } + const field = fieldSql.as(selectedRelationTsKey); + selection.push({ + dbKey: selectedRelationTsKey, + tsKey: selectedRelationTsKey, + field, + relationTableTsKey: relationTableTsName, + isJson: true, + selection: builtRelation.selection, + }); + } + } + + if (selection.length === 0) { + throw new DrizzleError({ + message: + `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}"). You need to have at least one item in "columns", "with" or "extras". If you need to select all columns, omit the "columns" key or set it to undefined.`, + }); + } + + let result; + + where = and(joinOn, where); + + if (nestedQueryRelation) { + let field = sql`${ + sql.join( + selection.map((sel) => { + return is(sel.field, MsSqlColumn) + ? sql.identifier(sel.field.name) + : is(sel.field, SQL.Aliased) + ? sel.isJson + ? sel.field.sql + : sql`${sel.field.sql} as ${sql.identifier(sel.field.fieldAlias)}` + : sel.field; + }), + sql`, `, + ) + }`; + if (is(nestedQueryRelation, V1.Many)) { + field = sql`${field}`; + } + const nestedSelection = [{ + dbKey: 'data', + tsKey: 'data', + field, + isJson: true, + relationTableTsKey: tableConfig.tsName, + selection, + }]; + + result = aliasedTable(table, tableAlias); + + const top = offset ? undefined : limit ?? undefined; + const fetch = offset && limit ? limit : undefined; + + // Mssql required order by to be present in the query if using offset and fetch(limit) + // With order by 1, the query will be ordered by the first column in the selection + if (orderBy.length === 0 && offset !== undefined && fetch !== undefined) { + orderBy = [sql`1`]; + } + + result = this.buildSelectQuery({ + table: is(result, MsSqlTable) ? result : new Subquery(result, {}, tableAlias), + fields: {}, + fieldsFlat: nestedSelection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + where, + top, + offset, + fetch, + orderBy, + setOperators: [], + }); + } else { + const top = offset ? undefined : limit ?? undefined; + const fetch = offset && limit ? limit : undefined; + + if (orderBy.length === 0 && offset !== undefined && fetch !== undefined) { + orderBy = [sql`1`]; + } + result = this.buildSelectQuery({ + table: aliasedTable(table, tableAlias), + fields: {}, + fieldsFlat: selection.map(({ field }) => ({ + path: [], + field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, + })), + where, + top, + offset, + fetch, + orderBy, + setOperators: [], + }); + } + + return { + tableTsKey: tableConfig.tsName, + sql: result, + selection, + }; + } +} diff --git a/drizzle-orm/src/mssql-core/expressions.ts b/drizzle-orm/src/mssql-core/expressions.ts new file mode 100644 index 0000000000..9ec4d5c1e1 --- /dev/null +++ b/drizzle-orm/src/mssql-core/expressions.ts @@ -0,0 +1,31 @@ +import { bindIfParam } from '~/sql/expressions/index.ts'; +import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; +import { sql } from '~/sql/sql.ts'; +import type { MsSqlColumn } from './columns/index.ts'; + +export * from '~/sql/expressions/index.ts'; + +// type ConcatValue = string | number | Placeholder | SQLWrapper; +// +// export function concat(...values: [ConcatValue, ConcatValue, ...ConcatValue[]]): SQL { +// return sql.join(values.map((value) => sql`${value}`), sql`, `) as SQL; +// } + +export function concat(column: MsSqlColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { + return sql`${column} || ${bindIfParam(value, column)}`; +} + +export function substring( + column: MsSqlColumn | SQL.Aliased, + { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, +): SQL { + const chunks: SQLChunk[] = [sql`substring(`, column]; + if (from !== undefined) { + chunks.push(sql` from `, bindIfParam(from, column)); + } + if (_for !== undefined) { + chunks.push(sql` for `, bindIfParam(_for, column)); + } + chunks.push(sql`)`); + return sql.join(chunks); +} diff --git a/drizzle-orm/src/mssql-core/foreign-keys.ts b/drizzle-orm/src/mssql-core/foreign-keys.ts new file mode 100644 index 0000000000..0ef560604d --- /dev/null +++ b/drizzle-orm/src/mssql-core/foreign-keys.ts @@ -0,0 +1,121 @@ +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlColumn, MsSqlColumn } from './columns/index.ts'; +import type { MsSqlTable } from './table.ts'; + +export type UpdateDeleteAction = 'cascade' | 'no action' | 'set null' | 'set default'; + +export type Reference = () => { + readonly name?: string; + readonly columns: MsSqlColumn[]; + readonly foreignTable: MsSqlTable; + readonly foreignColumns: MsSqlColumn[]; +}; + +export class ForeignKeyBuilder { + static readonly [entityKind]: string = 'MsSqlForeignKeyBuilder'; + + /** @internal */ + reference: Reference; + + /** @internal */ + _onUpdate: UpdateDeleteAction | undefined; + + /** @internal */ + _onDelete: UpdateDeleteAction | undefined; + + constructor( + config: () => { + name?: string; + columns: MsSqlColumn[]; + foreignColumns: MsSqlColumn[]; + }, + actions?: { + onUpdate?: UpdateDeleteAction; + onDelete?: UpdateDeleteAction; + } | undefined, + ) { + this.reference = () => { + const { name, columns, foreignColumns } = config(); + return { name, columns, foreignTable: foreignColumns[0]!.table as MsSqlTable, foreignColumns }; + }; + if (actions) { + this._onUpdate = actions.onUpdate; + this._onDelete = actions.onDelete; + } + } + + onUpdate(action: UpdateDeleteAction): this { + this._onUpdate = action; + return this; + } + + onDelete(action: UpdateDeleteAction): this { + this._onDelete = action; + return this; + } + + /** @internal */ + build(table: MsSqlTable): ForeignKey { + return new ForeignKey(table, this); + } +} + +export type AnyForeignKeyBuilder = ForeignKeyBuilder; + +export class ForeignKey { + static readonly [entityKind]: string = 'MsSqlForeignKey'; + + readonly reference: Reference; + readonly onUpdate: UpdateDeleteAction | undefined; + readonly onDelete: UpdateDeleteAction | undefined; + + constructor(readonly table: MsSqlTable, builder: ForeignKeyBuilder) { + this.reference = builder.reference; + this.onUpdate = builder._onUpdate; + this.onDelete = builder._onDelete; + } + + getName() { + const { name } = this.reference(); + return name; + } + + isNameExplicit() { + return !!this.reference().name; + } +} + +type ColumnsWithTable< + TTableName extends string, + TColumns extends MsSqlColumn[], +> = { [Key in keyof TColumns]: AnyMsSqlColumn<{ tableName: TTableName }> }; + +export type GetColumnsTable = ( + TColumns extends MsSqlColumn ? TColumns + : TColumns extends MsSqlColumn[] ? TColumns[number] + : never +) extends AnyMsSqlColumn<{ tableName: infer TTableName extends string }> ? TTableName + : never; + +export function foreignKey< + TTableName extends string, + TForeignTableName extends string, + TColumns extends [AnyMsSqlColumn<{ tableName: TTableName }>, ...AnyMsSqlColumn<{ tableName: TTableName }>[]], +>( + config: { + name?: string; + columns: TColumns; + foreignColumns: ColumnsWithTable; + }, +): ForeignKeyBuilder { + function mappedConfig() { + const { name, columns, foreignColumns } = config; + return { + name, + columns, + foreignColumns, + }; + } + + return new ForeignKeyBuilder(mappedConfig); +} diff --git a/drizzle-orm/src/mssql-core/index.ts b/drizzle-orm/src/mssql-core/index.ts new file mode 100644 index 0000000000..204e0af3c4 --- /dev/null +++ b/drizzle-orm/src/mssql-core/index.ts @@ -0,0 +1,17 @@ +export * from './alias.ts'; +export * from './checks.ts'; +export * from './columns/index.ts'; +export * from './db.ts'; +export * from './dialect.ts'; +export * from './foreign-keys.ts'; +export * from './indexes.ts'; +export * from './primary-keys.ts'; +export * from './query-builders/index.ts'; +export * from './schema.ts'; +export * from './session.ts'; +export * from './subquery.ts'; +export * from './table.ts'; +export * from './unique-constraint.ts'; +export * from './utils.ts'; +export * from './view-common.ts'; +export * from './view.ts'; diff --git a/drizzle-orm/src/mssql-core/indexes.ts b/drizzle-orm/src/mssql-core/indexes.ts new file mode 100644 index 0000000000..650d19be74 --- /dev/null +++ b/drizzle-orm/src/mssql-core/indexes.ts @@ -0,0 +1,90 @@ +import { entityKind } from '~/entity.ts'; +import type { SQL } from '~/sql/sql.ts'; +import type { AnyMsSqlColumn, MsSqlColumn } from './columns/index.ts'; +import type { MsSqlTable } from './table.ts'; + +interface IndexConfig { + name: string; + + columns: IndexColumn[]; + + /** + * If true, the index will be created as `create unique index` instead of `create index`. + */ + unique?: boolean; + + /** + * Condition for partial index. + */ + where?: SQL; +} + +export type IndexColumn = MsSqlColumn | SQL; + +export class IndexBuilderOn { + static readonly [entityKind]: string = 'MsSqlIndexBuilderOn'; + + constructor(private name: string, private unique: boolean) {} + + on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { + return new IndexBuilder(this.name, columns, this.unique); + } +} + +export interface AnyIndexBuilder { + build(table: MsSqlTable): Index; +} + +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface IndexBuilder extends AnyIndexBuilder {} + +export class IndexBuilder implements AnyIndexBuilder { + static readonly [entityKind]: string = 'MsSqlIndexBuilder'; + + /** @internal */ + config: IndexConfig; + + constructor(name: string, columns: IndexColumn[], unique: boolean) { + this.config = { + name, + columns, + unique, + }; + } + + where(condition: SQL): this { + this.config.where = condition; + return this; + } + + /** @internal */ + build(table: MsSqlTable): Index { + return new Index(this.config, table); + } +} + +export class Index { + static readonly [entityKind]: string = 'MsSqlIndex'; + + readonly config: IndexConfig & { table: MsSqlTable }; + readonly isNameExplicit: boolean; + + constructor(config: IndexConfig, table: MsSqlTable) { + this.config = { ...config, table }; + this.isNameExplicit = !!config.name; + } +} + +export type GetColumnsTableName = TColumns extends + AnyMsSqlColumn<{ tableName: infer TTableName extends string }> | AnyMsSqlColumn< + { tableName: infer TTableName extends string } + >[] ? TTableName + : never; + +export function index(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, false); +} + +export function uniqueIndex(name: string): IndexBuilderOn { + return new IndexBuilderOn(name, true); +} diff --git a/drizzle-orm/src/mssql-core/primary-keys.ts b/drizzle-orm/src/mssql-core/primary-keys.ts new file mode 100644 index 0000000000..3f320874f2 --- /dev/null +++ b/drizzle-orm/src/mssql-core/primary-keys.ts @@ -0,0 +1,52 @@ +import { entityKind } from '~/entity.ts'; +import type { AnyMsSqlColumn, MsSqlColumn } from './columns/index.ts'; +import type { MsSqlTable } from './table.ts'; + +export function primaryKey< + TTableName extends string, + TColumn extends AnyMsSqlColumn<{ tableName: TTableName }>, + TColumns extends AnyMsSqlColumn<{ tableName: TTableName }>[], +>(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder { + return new PrimaryKeyBuilder(config.columns, config.name); +} + +export class PrimaryKeyBuilder { + static readonly [entityKind]: string = 'MsSqlPrimaryKeyBuilder'; + + /** @internal */ + columns: MsSqlColumn[]; + + /** @internal */ + name?: string; + + constructor( + columns: MsSqlColumn[], + name?: string, + ) { + this.columns = columns; + this.name = name; + } + + /** @internal */ + build(table: MsSqlTable): PrimaryKey { + return new PrimaryKey(table, this.columns, this.name); + } +} + +export class PrimaryKey { + static readonly [entityKind]: string = 'MsSqlPrimaryKey'; + + readonly columns: MsSqlColumn[]; + readonly name?: string; + readonly isNameExplicit: boolean; + + constructor(readonly table: MsSqlTable, columns: MsSqlColumn[], name?: string) { + this.columns = columns; + this.name = name; + this.isNameExplicit = !!name; + } + + getName() { + return this.name; + } +} diff --git a/drizzle-orm/src/mssql-core/query-builders/delete.ts b/drizzle-orm/src/mssql-core/query-builders/delete.ts new file mode 100644 index 0000000000..814e3dae94 --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/delete.ts @@ -0,0 +1,243 @@ +import { entityKind } from '~/entity.ts'; +import type { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { + AnyQueryResultHKT, + MsSqlSession, + PreparedQueryConfig, + PreparedQueryHKTBase, + PreparedQueryKind, + QueryResultHKT, + QueryResultKind, +} from '~/mssql-core/session.ts'; +import type { MsSqlTable } from '~/mssql-core/table.ts'; +import type { SelectResultFields } from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { Table } from '~/table.ts'; +import { orderSelectedFields } from '~/utils.ts'; +import type { MsSqlColumn } from '../columns/common.ts'; +import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; + +export type MsSqlDeleteWithout< + T extends AnyMsSqlDeleteBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T + : Omit< + MsSqlDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type MsSqlDeleteReturningAll< + T extends AnyMsSqlDeleteBase, + TDynamic extends boolean, +> = MsSqlDeleteWithout< + MsSqlDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['table']['$inferSelect'], + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'output' +>; + +export type MsSqlDeleteReturning< + T extends AnyMsSqlDeleteBase, + TDynamic extends boolean, + TSelectedFields extends SelectedFieldsFlat, +> = MsSqlDeleteWithout< + MsSqlDeleteBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + SelectResultFields, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'output' +>; + +export type MsSqlDelete< + TTable extends MsSqlTable = MsSqlTable, + TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TOutput extends Record | undefined = Record | undefined, +> = MsSqlDeleteBase; + +export interface MsSqlDeleteConfig { + where?: SQL | undefined; + table: MsSqlTable; + output?: SelectedFieldsOrdered; +} + +export type MsSqlDeletePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + PreparedQueryConfig & { + execute: T['_']['output'] extends undefined ? QueryResultKind : T['_']['output'][]; + iterator: never; + } +>; + +type MsSqlDeleteDynamic = MsSqlDelete< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'] +>; + +type AnyMsSqlDeleteBase = MsSqlDeleteBase; + +export interface MsSqlDeleteBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TOutput extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]> { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly dynamic: TDynamic; + readonly output: TOutput; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class MsSqlDeleteBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TOutput extends Record | undefined, + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]> + implements SQLWrapper +{ + static override readonly [entityKind]: string = 'MsSqlDelete'; + + private config: MsSqlDeleteConfig; + + constructor( + private table: TTable, + private session: MsSqlSession, + private dialect: MsSqlDialect, + ) { + super(); + this.config = { table }; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will delete only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/delete} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be deleted. + * + * ```ts + * // Delete all cars with green color + * db.delete(cars).where(eq(cars.color, 'green')); + * // or + * db.delete(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Delete all BMW cars with a green color + * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Delete all cars with the green or blue color + * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): MsSqlDeleteWithout { + this.config.where = where; + return this as any; + } + + /** + * Adds an `output` clause to the query. + * + * Calling this method will return the specified fields of the deleted rows. If no fields are specified, all fields will be returned. + * + * @example + * ```ts + * // Delete all cars with the green color and return all fields + * const deletedCars: Car[] = await db.delete(cars) + * .output(); + * .where(eq(cars.color, 'green')) + * + * // Delete all cars with the green color and return only their id and brand fields + * const deletedCarsIdsAndBrands: { id: number, brand: string }[] = await db.delete(cars) + * .output({ id: cars.id, brand: cars.brand }); + * .where(eq(cars.color, 'green')) + * ``` + */ + output(): MsSqlDeleteReturningAll; + output( + fields: TSelectedFields, + ): MsSqlDeleteReturning; + output( + fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], + ): MsSqlDeleteWithout { + this.config.output = orderSelectedFields(fields); + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildDeleteQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): MsSqlDeletePrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.output, + ) as MsSqlDeletePrepare; + } + + override execute( + placeholderValues?: Record, + ): Promise : TOutput[]> { + return this.prepare().execute(placeholderValues) as any; + } + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): MsSqlDeleteDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/mssql-core/query-builders/index.ts b/drizzle-orm/src/mssql-core/query-builders/index.ts new file mode 100644 index 0000000000..16f0e1d4d9 --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/index.ts @@ -0,0 +1,6 @@ +export * from './delete.ts'; +export * from './insert.ts'; +export * from './query-builder.ts'; +export * from './select.ts'; +export * from './select.types.ts'; +export * from './update.ts'; diff --git a/drizzle-orm/src/mssql-core/query-builders/insert.ts b/drizzle-orm/src/mssql-core/query-builders/insert.ts new file mode 100644 index 0000000000..34e703f67c --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/insert.ts @@ -0,0 +1,243 @@ +import { entityKind, is } from '~/entity.ts'; +import type { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { + AnyQueryResultHKT, + MsSqlSession, + PreparedQueryConfig, + PreparedQueryHKTBase, + PreparedQueryKind, + QueryResultHKT, + QueryResultKind, +} from '~/mssql-core/session.ts'; +import type { MsSqlTable } from '~/mssql-core/table.ts'; +import type { SelectResultFields } from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { Param, SQL } from '~/sql/sql.ts'; +import { type InferInsertModel, type InferSelectModel, Table } from '~/table.ts'; +import { orderSelectedFields } from '~/utils.ts'; +import type { MsSqlColumn } from '../columns/common.ts'; +import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; + +export interface MsSqlInsertConfig { + table: TTable; + values: Record[]; + output?: SelectedFieldsOrdered; +} + +export type MsSqlInsertValue< + TTable extends MsSqlTable, + TModel extends Record = InferInsertModel, +> = + & { + [Key in keyof TModel]: TModel[Key] | SQL | Placeholder; + } + & {}; + +export class MsSqlInsertBuilder< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TOutput extends Record | undefined = undefined, +> { + static readonly [entityKind]: string = 'MsSqlInsertBuilder'; + + private config: { + output?: SelectedFieldsOrdered; + table: TTable; + }; + + protected table: TTable; + protected session: MsSqlSession; + protected dialect: MsSqlDialect; + + constructor( + table: TTable, + session: MsSqlSession, + dialect: MsSqlDialect, + output?: SelectedFieldsOrdered, + ) { + this.table = table; + this.session = session; + this.dialect = dialect; + + this.config = { table, output }; + } + + values( + value: MsSqlInsertValue, + ): MsSqlInsertBase; + values(values: MsSqlInsertValue[]): MsSqlInsertBase; + values( + values: MsSqlInsertValue | MsSqlInsertValue[], + ): MsSqlInsertBase { + values = Array.isArray(values) ? values : [values]; + if (values.length === 0) { + throw new Error('values() must be called with at least one value'); + } + const mappedValues = values.map((entry) => { + const result: Record = {}; + const cols = this.table[Table.Symbol.Columns]; + for (const colKey of Object.keys(entry)) { + const colValue = entry[colKey as keyof typeof entry]; + result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); + } + return result; + }); + + return new MsSqlInsertBase(this.table, mappedValues, this.session, this.dialect, this.config.output); + } + + /** + * Adds an `output` clause to the query. + * + * Calling this method will return the specified fields of the inserted rows. If no fields are specified, all fields will be returned. + * + * @example + * ```ts + * // Insert one row and return all fields + * const insertedCar: Car[] = await db.insert(cars) + * .output(); + * .values({ brand: 'BMW' }) + * + * // Insert one row and return only the id + * const insertedCarId: { id: number }[] = await db.insert(cars) + * .output({ id: cars.id }); + * .values({ brand: 'BMW' }) + * ``` + */ + output(): Omit>, 'output'>; + output( + fields: SelectedFields, + ): Omit>, 'output'>; + output( + fields: SelectedFieldsFlat = this.table[Table.Symbol.Columns], + ) { + this.config.output = orderSelectedFields(fields); + return this as any; + } +} + +export type MsSqlInsertWithout = + TDynamic extends true ? T + : Omit< + MsSqlInsertBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K + >; + +export type MsSqlInsertDynamic = MsSqlInsert< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'] +>; + +export type MsSqlInsertPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + PreparedQueryConfig & { + execute: T['_']['output'] extends undefined ? QueryResultKind : T['_']['output'][]; + iterator: never; + } +>; + +export type MsSqlInsert< + TTable extends MsSqlTable = MsSqlTable, + TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TOutput extends Record | undefined = Record | undefined, +> = MsSqlInsertBase; + +export type AnyMsSqlInsert = MsSqlInsertBase; + +export interface MsSqlInsertBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TOutput extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]>, SQLWrapper { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly output: TOutput; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class MsSqlInsertBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TOutput extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]> + implements SQLWrapper +{ + static override readonly [entityKind]: string = 'MsSqlInsert'; + + declare protected $table: TTable; + + private config: MsSqlInsertConfig; + + constructor( + table: TTable, + values: MsSqlInsertConfig['values'], + private session: MsSqlSession, + private dialect: MsSqlDialect, + output?: SelectedFieldsOrdered, + ) { + super(); + this.config = { table, values, output }; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildInsertQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): MsSqlInsertPrepare { + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + this.config.output, + ) as MsSqlInsertPrepare; + } + + override execute( + placeholderValues?: Record, + ): Promise : TOutput[]> { + return this.prepare().execute(placeholderValues) as any; + } + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + // $dynamic(): MsSqlInsertDynamic { + // return this as any; + // } +} diff --git a/drizzle-orm/src/mssql-core/query-builders/query-builder.ts b/drizzle-orm/src/mssql-core/query-builders/query-builder.ts new file mode 100644 index 0000000000..4e2e07f104 --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/query-builder.ts @@ -0,0 +1,109 @@ +import { entityKind, is } from '~/entity.ts'; +import { MsSqlDialect, type MsSqlDialectConfig } from '~/mssql-core/dialect.ts'; +import type { WithSubqueryWithSelection } from '~/mssql-core/subquery.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { WithSubquery } from '~/subquery.ts'; +import { MsSqlSelectBuilder } from './select.ts'; +import type { SelectedFields } from './select.types.ts'; + +export class QueryBuilder { + static readonly [entityKind]: string = 'MsSqlQueryBuilder'; + + private dialect: MsSqlDialect | undefined; + private dialectConfig: MsSqlDialectConfig | undefined; + + constructor(dialect?: MsSqlDialect | MsSqlDialectConfig) { + this.dialect = is(dialect, MsSqlDialect) ? dialect : undefined; + this.dialectConfig = is(dialect, MsSqlDialect) ? undefined : dialect; + } + + $with(alias: TAlias) { + const queryBuilder = this; + + return { + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } + + return new Proxy( + new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as WithSubqueryWithSelection; + }, + }; + } + + with(...queries: WithSubquery[]) { + const self = this; + + function select(): MsSqlSelectBuilder; + function select( + fields: TSelection, + ): MsSqlSelectBuilder; + function select( + fields?: TSelection, + ): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + }); + } + + function selectDistinct(): MsSqlSelectBuilder; + function selectDistinct( + fields: TSelection, + ): MsSqlSelectBuilder; + function selectDistinct( + fields?: TSelection, + ): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: self.getDialect(), + withList: queries, + distinct: true, + }); + } + + return { select, selectDistinct }; + } + + select(): MsSqlSelectBuilder; + select(fields: TSelection): MsSqlSelectBuilder; + select( + fields?: TSelection, + ): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect() }); + } + + selectDistinct(): MsSqlSelectBuilder; + selectDistinct( + fields: TSelection, + ): MsSqlSelectBuilder; + selectDistinct( + fields?: TSelection, + ): MsSqlSelectBuilder { + return new MsSqlSelectBuilder({ + fields: fields ?? undefined, + session: undefined, + dialect: this.getDialect(), + distinct: true, + }); + } + + // Lazy load dialect to avoid circular dependency + private getDialect() { + if (!this.dialect) { + this.dialect = new MsSqlDialect(this.dialectConfig); + } + + return this.dialect; + } +} diff --git a/drizzle-orm/src/mssql-core/query-builders/query.ts b/drizzle-orm/src/mssql-core/query-builders/query.ts new file mode 100644 index 0000000000..3fffd3acc4 --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/query.ts @@ -0,0 +1,136 @@ +import { + type BuildQueryResult, + type BuildRelationalQueryResult, + type DBQueryConfig, + mapRelationalRowFromObj, + type TableRelationalConfig, + type TablesRelationalConfig, +} from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; +import type { KnownKeysOnly } from '~/utils.ts'; +import type { MsSqlDialect } from '../dialect.ts'; +import type { MsSqlSession, PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; +import type { MsSqlTable } from '../table.ts'; + +export class RelationalQueryBuilder< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TSchema extends TablesRelationalConfig, + TFields extends TableRelationalConfig, +> { + static readonly [entityKind]: string = 'MsSqlRelationalQueryBuilder'; + + constructor( + private fullSchema: Record, + private schema: TSchema, + private tableNamesMap: Record, + private table: MsSqlTable, + private tableConfig: TableRelationalConfig, + private dialect: MsSqlDialect, + private session: MsSqlSession, + ) {} + + findMany>( + config?: KnownKeysOnly>, + ): MsSqlRelationalQuery[]> { + return new MsSqlRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? (config as DBQueryConfig<'many', true>) : {}, + 'many', + ); + } + + findFirst, 'limit'>>( + config?: KnownKeysOnly, 'limit'>>, + ): MsSqlRelationalQuery | undefined> { + return new MsSqlRelationalQuery( + this.fullSchema, + this.schema, + this.tableNamesMap, + this.table, + this.tableConfig, + this.dialect, + this.session, + config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, + 'first', + ); + } +} + +export class MsSqlRelationalQuery< + TPreparedQueryHKT extends PreparedQueryHKTBase, + TResult, +> extends QueryPromise { + static override readonly [entityKind]: string = 'MsSqlRelationalQuery'; + + declare protected $brand: 'MsSqlRelationalQuery'; + + constructor( + private fullSchema: Record, + private schema: TablesRelationalConfig, + private tableNamesMap: Record, + private table: MsSqlTable, + private tableConfig: TableRelationalConfig, + private dialect: MsSqlDialect, + private session: MsSqlSession, + private config: DBQueryConfig<'many', true> | true, + private queryMode: 'many' | 'first', + ) { + super(); + } + + prepare() { + const { query, builtQuery } = this._toSQL(); + return this.session.prepareQuery( + builtQuery, + undefined, + (rawRows) => { + const rows = rawRows.map((row) => mapRelationalRowFromObj(this.schema, this.tableConfig, row, query.selection)); + if (this.queryMode === 'first') { + return rows[0] as TResult; + } + return rows as TResult; + }, + ) as PreparedQueryKind; + } + + private _getQuery() { + return this.dialect.buildRelationalQuery({ + fullSchema: this.fullSchema, + schema: this.schema, + tableNamesMap: this.tableNamesMap, + table: this.table, + tableConfig: this.tableConfig, + queryConfig: this.config, + tableAlias: this.tableConfig.tsName, + }); + } + + private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { + const query = this._getQuery(); + + const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); + + return { builtQuery, query }; + } + + /** @internal */ + getSQL(): SQL { + return this._getQuery().sql as SQL; + } + + toSQL(): Query { + return this._toSQL().builtQuery; + } + + override execute(): Promise { + return this.prepare().execute(); + } +} diff --git a/drizzle-orm/src/mssql-core/query-builders/select.ts b/drizzle-orm/src/mssql-core/query-builders/select.ts new file mode 100644 index 0000000000..35f111b37b --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/select.ts @@ -0,0 +1,1065 @@ +import { entityKind, is } from '~/entity.ts'; +import type { MsSqlColumn } from '~/mssql-core/columns/index.ts'; +import type { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { MsSqlSession, PreparedQueryConfig, PreparedQueryHKTBase } from '~/mssql-core/session.ts'; +import type { SubqueryWithSelection } from '~/mssql-core/subquery.ts'; +import type { MsSqlTable } from '~/mssql-core/table.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + BuildSubquerySelection, + GetSelectTableName, + GetSelectTableSelection, + JoinNullability, + JoinType, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, Placeholder, Query } from '~/sql/sql.ts'; +import { SQL, View } from '~/sql/sql.ts'; +import { Subquery } from '~/subquery.ts'; +import { Table } from '~/table.ts'; +import { + applyMixins, + getTableColumns, + getTableLikeName, + haveSameKeys, + orderSelectedFields, + type ValueOrArray, +} from '~/utils.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import { MsSqlViewBase } from '../view-base.ts'; +import type { + AnyMsSqlSelect, + CreateMsSqlSelectFromBuilderMode, + GetMsSqlSetOperators, + MsSqlCreateSetOperatorFn, + MsSqlJoinFn, + MsSqlSelectConfig, + MsSqlSelectDynamic, + MsSqlSelectHKT, + MsSqlSelectHKTBase, + MsSqlSelectPrepare, + MsSqlSelectReplace, + MsSqlSelectWithout, + MsSqlSetOperatorExcludedMethods, + MsSqlSetOperatorWithResult, + SelectedFields, + SetOperatorRightSelect, +} from './select.types.ts'; + +// Shared base class for `from()` +class MsSqlSelectFromBuilderBase< + TSelection extends SelectedFields | undefined, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBuilderMode extends 'db' | 'qb', + TBranch extends 'from' | 'top', +> { + static readonly [entityKind]: string = 'MsSqlSelectFromBuilderBase'; + + protected fields: TSelection; + protected session: MsSqlSession | undefined; + protected dialect: MsSqlDialect; + protected withList: Subquery[] = []; + protected distinct: boolean | undefined; + protected topValue?: number | Placeholder; + + constructor(config: { + fields: TSelection; + session: MsSqlSession | undefined; + dialect: MsSqlDialect; + withList?: Subquery[]; + distinct?: boolean; + topValue?: number | Placeholder; + }) { + this.fields = config.fields; + this.session = config.session; + this.dialect = config.dialect; + if (config.withList) { + this.withList = config.withList; + } + this.distinct = config.distinct; + this.topValue = config.topValue; + } + + from( + source: TFrom, + ): Omit< + CreateMsSqlSelectFromBuilderMode< + TBuilderMode, + GetSelectTableName, + TSelection extends undefined ? GetSelectTableSelection : TSelection, + TSelection extends undefined ? 'single' : 'partial', + TPreparedQueryHKT, + TBranch + >, + 'fetch' | 'offset' + > { + const isPartialSelect = !!this.fields; + let fields: SelectedFields; + if (this.fields) { + fields = this.fields; + } else if (is(source, Subquery)) { + // This is required to use the proxy handler to get the correct field values from the subquery + fields = Object.fromEntries( + Object.keys(source._.selectedFields).map(( + key, + ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), + ); + } else if (is(source, MsSqlViewBase)) { + fields = source[ViewBaseConfig].selectedFields as SelectedFields; + } else if (is(source, SQL)) { + fields = {}; + } else { + fields = getTableColumns(source); + } + + return new MsSqlSelectBase({ + table: source, + fields, + isPartialSelect, + session: this.session, + dialect: this.dialect, + withList: this.withList, + distinct: this.distinct, + topValue: this.topValue, + }) as any; + } +} + +export class MsSqlSelectBuilder< + TSelection extends SelectedFields | undefined, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBuilderMode extends 'db' | 'qb' = 'db', +> extends MsSqlSelectFromBuilderBase { + static override readonly [entityKind]: string = 'MsSqlSelectFromBuilderBase'; + + top(top: number | Placeholder): MsSqlSelectFromBuilderBase { + return new MsSqlSelectFromBuilderBase({ + fields: this.fields, + session: this.session, + dialect: this.dialect, + withList: this.withList, + distinct: this.distinct, + topValue: top, + }); + } +} + +export abstract class MsSqlSelectQueryBuilderBase< + THKT extends MsSqlSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBranch extends 'from' | 'top', + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = 'offset' | 'fetch', + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends TypedQueryBuilder { + static override readonly [entityKind]: string = 'MsSqlSelectQueryBuilderBase'; + + override readonly _: { + readonly hkt: THKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + readonly branch: TBranch; + }; + + protected config: MsSqlSelectConfig; + protected joinsNotNullableMap: Record; + private tableName: string | undefined; + private isPartialSelect: boolean; + /** @internal */ + readonly session: MsSqlSession | undefined; + protected dialect: MsSqlDialect; + + constructor( + { table, fields, isPartialSelect, session, dialect, withList, distinct, topValue }: { + table: MsSqlSelectConfig['table']; + fields: MsSqlSelectConfig['fields']; + isPartialSelect: boolean; + session: MsSqlSession | undefined; + dialect: MsSqlDialect; + withList: Subquery[]; + distinct: boolean | undefined; + topValue: number | undefined | Placeholder; + }, + ) { + super(); + this.config = { + withList, + table, + fields: { ...fields }, + distinct, + setOperators: [], + top: topValue, + }; + this.isPartialSelect = isPartialSelect; + this.session = session; + this.dialect = dialect; + this._ = { + selectedFields: fields as TSelectedFields, + } as this['_']; + this.tableName = getTableLikeName(table); + this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; + } + + private createJoin( + joinType: TJoinType, + ): MsSqlJoinFn { + return ( + table: MsSqlTable | Subquery | MsSqlViewBase | SQL, + on: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, + ) => { + const baseTableName = this.tableName; + const tableName = getTableLikeName(table); + + if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { + throw new Error(`Alias "${tableName}" is already used in this query`); + } + + if (!this.isPartialSelect) { + // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object + if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { + this.config.fields = { + [baseTableName]: this.config.fields, + }; + } + if (typeof tableName === 'string' && !is(table, SQL)) { + const selection = is(table, Subquery) + ? table._.selectedFields + : is(table, View) + ? table[ViewBaseConfig].selectedFields + : table[Table.Symbol.Columns]; + this.config.fields[tableName] = selection; + } + } + + if (typeof on === 'function') { + on = on( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + + if (!this.config.joins) { + this.config.joins = []; + } + + this.config.joins.push({ on, table, joinType, alias: tableName }); + + if (typeof tableName === 'string') { + switch (joinType) { + case 'left': { + this.joinsNotNullableMap[tableName] = false; + break; + } + case 'right': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'inner': { + this.joinsNotNullableMap[tableName] = true; + break; + } + case 'full': { + this.joinsNotNullableMap = Object.fromEntries( + Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), + ); + this.joinsNotNullableMap[tableName] = false; + break; + } + } + } + + return this as any; + }; + } + + /** + * Executes a `left join` operation by adding another table to the current query. + * + * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet | null }[] = await db.select() + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .leftJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + leftJoin = this.createJoin('left'); + + /** + * Executes a `right join` operation by adding another table to the current query. + * + * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet }[] = await db.select() + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .rightJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + rightJoin = this.createJoin('right'); + + /** + * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. + * + * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User; pets: Pet }[] = await db.select() + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number; petId: number }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .innerJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + innerJoin = this.createJoin('inner'); + + /** + * Executes a `full join` operation by combining rows from two tables into a new table. + * + * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. + * + * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} + * + * @param table the table to join. + * @param on the `on` clause. + * + * @example + * + * ```ts + * // Select all users and their pets + * const usersWithPets: { user: User | null; pets: Pet | null }[] = await db.select() + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * + * // Select userId and petId + * const usersIdsAndPetIds: { userId: number | null; petId: number | null }[] = await db.select({ + * userId: users.id, + * petId: pets.id, + * }) + * .from(users) + * .fullJoin(pets, eq(users.id, pets.ownerId)) + * ``` + */ + fullJoin = this.createJoin('full'); + + private createSetOperator( + type: SetOperator, + isAll: boolean, + ): >( + rightSelection: + | ((setOperators: GetMsSqlSetOperators) => SetOperatorRightSelect) + | SetOperatorRightSelect, + ) => MsSqlSelectWithout< + this, + TDynamic, + MsSqlSetOperatorExcludedMethods, + true + > { + return (rightSelection) => { + const rightSelect = (typeof rightSelection === 'function' + ? rightSelection(getMsSqlSetOperators()) + : rightSelection) as TypedQueryBuilder< + any, + TResult + >; + + if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + + this.config.setOperators.push({ type, isAll, rightSelect }); + return this as any; + }; + } + + /** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * import { union } from 'drizzle-orm/mssql-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ + union = this.createSetOperator('union', false); + + /** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * import { unionAll } from 'drizzle-orm/mssql-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ + unionAll = this.createSetOperator('union', true); + + /** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { intersect } from 'drizzle-orm/mssql-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + intersect = this.createSetOperator('intersect', false); + + /** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * import { except } from 'drizzle-orm/mssql-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ + except = this.createSetOperator('except', false); + + /** @internal */ + addSetOperators(setOperators: MsSqlSelectConfig['setOperators']): MsSqlSelectWithout< + this, + TDynamic, + MsSqlSetOperatorExcludedMethods, + true + > { + this.config.setOperators.push(...setOperators); + return this as any; + } + + /** + * Adds a `where` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#filtering} + * + * @param where the `where` clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be selected. + * + * ```ts + * // Select all cars with green color + * await db.select().from(cars).where(eq(cars.color, 'green')); + * // or + * await db.select().from(cars).where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Select all BMW cars with a green color + * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Select all cars with the green or blue color + * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where( + where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): MsSqlSelectWithout { + if (typeof where === 'function') { + where = where( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.where = where; + return this as any; + } + + /** + * Adds a `having` clause to the query. + * + * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @param having the `having` clause. + * + * @example + * + * ```ts + * // Select all brands with more than one car + * await db.select({ + * brand: cars.brand, + * count: sql`cast(count(${cars.id}) as int)`, + * }) + * .from(cars) + * .groupBy(cars.brand) + * .having(({ count }) => gt(count, 1)); + * ``` + */ + having( + having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, + ): MsSqlSelectWithout { + if (typeof having === 'function') { + having = having( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), + ) as TSelection, + ); + } + this.config.having = having; + return this as any; + } + + /** + * Adds a `group by` clause to the query. + * + * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. + * + * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} + * + * @example + * + * ```ts + * // Group and count people by their last names + * await db.select({ + * lastName: people.lastName, + * count: sql`cast(count(*) as int)` + * }) + * .from(people) + * .groupBy(people.lastName); + * ``` + */ + groupBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): MsSqlSelectWithout; + groupBy(...columns: (MsSqlColumn | SQL | SQL.Aliased)[]): MsSqlSelectWithout; + groupBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (MsSqlColumn | SQL | SQL.Aliased)[] + ): MsSqlSelectWithout { + if (typeof columns[0] === 'function') { + const groupBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; + } else { + this.config.groupBy = columns as (MsSqlColumn | SQL | SQL.Aliased)[]; + } + return this as any; + } + + /** + * Adds an `order by` clause to the query. + * + * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. + * + * See docs: {@link https://orm.drizzle.team/docs/select#order-by} + * + * @example + * + * ``` + * // Select cars ordered by year + * await db.select().from(cars).orderBy(cars.year); + * ``` + * + * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. + * + * ```ts + * // Select cars ordered by year in descending order + * await db.select().from(cars).orderBy(desc(cars.year)); + * + * // Select cars ordered by year and price + * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); + * ``` + */ + orderBy( + builder: (aliases: this['_']['selection']) => ValueOrArray, + ): TBranch extends 'from' ? MsSqlSelectReplace + : MsSqlSelectWithout; + orderBy( + ...columns: (MsSqlColumn | SQL | SQL.Aliased)[] + ): TBranch extends 'from' ? MsSqlSelectReplace + : MsSqlSelectWithout; + orderBy( + ...columns: + | [(aliases: this['_']['selection']) => ValueOrArray] + | (MsSqlColumn | SQL | SQL.Aliased)[] + ): TBranch extends 'from' ? MsSqlSelectReplace + : MsSqlSelectWithout + { + if (typeof columns[0] === 'function') { + const orderBy = columns[0]( + new Proxy( + this.config.fields, + new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), + ) as TSelection, + ); + + const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } else { + const orderByArray = columns as (MsSqlColumn | SQL | SQL.Aliased)[]; + + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.orderBy = orderByArray; + } else { + this.config.orderBy = orderByArray; + } + } + return this as any; + } + + /** + * Adds an `OFFSET` clause to the query. + * + * Calling this method will skip the first N rows of the result set. This is commonly used for pagination, often in combination with `FETCH NEXT` (e.g., `.fetch()`). + * + * * ⚠️ **Note:** This method can only be used after calling `.orderBy()`, as SQL Server requires `ORDER BY` to be present with `OFFSET`. + * + * @example + * + * ```ts + * // Skip the first 10 results + * await db.select().from(cars).orderBy(cars.year).offset(10); + * ``` + * + * `OFFSET` is zero-based — `offset(0)` will include all rows, while `offset(10)` will skip the first 10. + * + * Typically used with `.fetch()` to implement pagination: + * + * ```ts + * // Get 10 cars, skipping the first 20 + * await db.select().from(cars).orderBy(cars.year).offset(20).fetch(10); + * ``` + * + * @param offset The number of rows to skip + */ + offset(offset: number | Placeholder): MsSqlSelectReplace { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.offset = offset; + } else { + this.config.offset = offset; + } + return this as any; + } + + /** + * Adds a `FETCH NEXT` clause to the query (commonly known as `LIMIT`). + * + * Limits the number of rows returned — used after `.offset()`. + * + * @example + * ```ts + * // Get only 10 rows, skipping 5 rows + * await db.select().from(cars).orderBy(cars.year).offset(5).fetch(10); + * ``` + * + * @example + * ```ts + * // Pagination: skip 20 cars, then fetch 10 + * await db.select().from(cars).orderBy(cars.year).offset(20).fetch(10); + * ``` + * + * @param fetch The number of rows to fetch + */ + fetch(fetch: number | Placeholder): MsSqlSelectWithout { + if (this.config.setOperators.length > 0) { + this.config.setOperators.at(-1)!.fetch = fetch; + } else { + this.config.fetch = fetch; + } + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildSelectQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + as( + alias: TAlias, + ): SubqueryWithSelection { + return new Proxy( + new Subquery(this.getSQL(), this.config.fields, alias), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as SubqueryWithSelection; + } + + /** @internal */ + override getSelectedFields(): this['_']['selectedFields'] { + return new Proxy( + this.config.fields, + new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as this['_']['selectedFields']; + } + + $dynamic(): MsSqlSelectDynamic { + return this as any; + } +} + +export interface MsSqlSelectBase< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBranch extends 'from' | 'top', + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = 'offset' | 'fetch', + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> extends + MsSqlSelectQueryBuilderBase< + MsSqlSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TBranch, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + QueryPromise +{} + +export class MsSqlSelectBase< + TTableName extends string | undefined, + TSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBranch extends 'from' | 'top', + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = 'offset' | 'fetch', + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> extends MsSqlSelectQueryBuilderBase< + MsSqlSelectHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TBranch, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields +> { + static override readonly [entityKind]: string = 'MsSqlSelect'; + + prepare(): MsSqlSelectPrepare { + if (!this.session) { + throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); + } + const fieldsList = orderSelectedFields(this.config.fields); + const query = this.session.prepareQuery< + PreparedQueryConfig & { execute: SelectResult[] }, + TPreparedQueryHKT + >(this.dialect.sqlToQuery(this.getSQL()), fieldsList); + query.joinsNotNullableMap = this.joinsNotNullableMap; + return query as MsSqlSelectPrepare; + } + + execute = ((placeholderValues) => { + return this.prepare().execute(placeholderValues); + }) as ReturnType['execute']; + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); +} + +applyMixins(MsSqlSelectBase, [QueryPromise]); + +function createSetOperator(type: SetOperator, isAll: boolean): MsSqlCreateSetOperatorFn { + return (leftSelect, rightSelect, ...restSelects) => { + const setOperators = [rightSelect, ...restSelects].map((select) => ({ + type, + isAll, + rightSelect: select as AnyMsSqlSelect, + })); + + for (const setOperator of setOperators) { + if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { + throw new Error( + 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', + ); + } + } + + return (leftSelect as AnyMsSqlSelect).addSetOperators(setOperators) as any; + }; +} + +const getMsSqlSetOperators = () => ({ + union, + unionAll, + intersect, + except, +}); + +/** + * Adds `union` set operator to the query. + * + * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} + * + * @example + * + * ```ts + * // Select all unique names from customers and users tables + * import { union } from 'drizzle-orm/mssql-core' + * + * await union( + * db.select({ name: users.name }).from(users), + * db.select({ name: customers.name }).from(customers) + * ); + * // or + * await db.select({ name: users.name }) + * .from(users) + * .union( + * db.select({ name: customers.name }).from(customers) + * ); + * ``` + */ +export const union = createSetOperator('union', false); + +/** + * Adds `union all` set operator to the query. + * + * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} + * + * @example + * + * ```ts + * // Select all transaction ids from both online and in-store sales + * import { unionAll } from 'drizzle-orm/mssql-core' + * + * await unionAll( + * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * // or + * await db.select({ transaction: onlineSales.transactionId }) + * .from(onlineSales) + * .unionAll( + * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) + * ); + * ``` + */ +export const unionAll = createSetOperator('union', true); + +/** + * Adds `intersect` set operator to the query. + * + * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} + * + * @example + * + * ```ts + * // Select course names that are offered in both departments A and B + * import { intersect } from 'drizzle-orm/mssql-core' + * + * await intersect( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .intersect( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const intersect = createSetOperator('intersect', false); + +/** + * Adds `except` set operator to the query. + * + * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. + * + * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} + * + * @example + * + * ```ts + * // Select all courses offered in department A but not in department B + * import { except } from 'drizzle-orm/mssql-core' + * + * await except( + * db.select({ courseName: depA.courseName }).from(depA), + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * // or + * await db.select({ courseName: depA.courseName }) + * .from(depA) + * .except( + * db.select({ courseName: depB.courseName }).from(depB) + * ); + * ``` + */ +export const except = createSetOperator('except', false); diff --git a/drizzle-orm/src/mssql-core/query-builders/select.types.ts b/drizzle-orm/src/mssql-core/query-builders/select.types.ts new file mode 100644 index 0000000000..f3afe7768f --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/select.types.ts @@ -0,0 +1,478 @@ +import type { MsSqlColumn } from '~/mssql-core/columns/index.ts'; +import type { MsSqlTable, MsSqlTableWithColumns } from '~/mssql-core/table.ts'; +import type { + SelectedFields as SelectedFieldsBase, + SelectedFieldsFlat as SelectedFieldsFlatBase, + SelectedFieldsOrdered as SelectedFieldsOrderedBase, +} from '~/operations.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { + AppendToNullabilityMap, + AppendToResult, + BuildSubquerySelection, + GetSelectTableName, + JoinNullability, + JoinType, + MapColumnsToTableAlias, + SelectMode, + SelectResult, + SetOperator, +} from '~/query-builders/select.types.ts'; +import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; +import type { Subquery } from '~/subquery.ts'; +import type { Table, UpdateTableConfig } from '~/table.ts'; +import type { Assume, ValidateShape } from '~/utils.ts'; +import type { PreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; +import type { MsSqlViewBase } from '../view-base.ts'; +import type { MsSqlView, MsSqlViewWithSelection } from '../view.ts'; +import type { MsSqlSelectBase, MsSqlSelectQueryBuilderBase } from './select.ts'; + +export interface MsSqlSelectJoinConfig { + on: SQL | undefined; + table: MsSqlTable | Subquery | MsSqlViewBase | SQL; + alias: string | undefined; + joinType: JoinType; + lateral?: boolean; +} + +export type BuildAliasTable = TTable extends Table + ? MsSqlTableWithColumns< + UpdateTableConfig; + }> + > + : TTable extends View ? MsSqlViewWithSelection< + TAlias, + TTable['_']['existing'], + MapColumnsToTableAlias + > + : never; + +export interface MsSqlSelectConfig { + withList?: Subquery[]; + fields: Record; + fieldsFlat?: SelectedFieldsOrdered; + where?: SQL; + having?: SQL; + table: MsSqlTable | Subquery | MsSqlViewBase | SQL; + fetch?: number | Placeholder; + joins?: MsSqlSelectJoinConfig[]; + orderBy?: (MsSqlColumn | SQL | SQL.Aliased)[]; + groupBy?: (MsSqlColumn | SQL | SQL.Aliased)[]; + for?: { // this is not exposed. Just used internally for the RQB + mode: 'browse'; // TODO: implement in dialect + } | { + mode: 'xml'; // TODO: implement in dialect + } | { + mode: 'json'; + type: 'auto' | 'path'; + options?: { + root?: string; + includeNullValues?: true; + withoutArrayWrapper?: true; + }; + }; + top?: number | Placeholder; + offset?: number | Placeholder; + distinct?: boolean; + setOperators: { + rightSelect: TypedQueryBuilder; + type: SetOperator; + isAll: boolean; + orderBy?: (MsSqlColumn | SQL | SQL.Aliased)[]; + fetch?: number | Placeholder; + offset?: number | Placeholder; + }[]; +} + +export type MsSqlJoin< + T extends AnyMsSqlSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, + TJoinedTable extends MsSqlTable | Subquery | MsSqlViewBase | SQL, + TJoinedName extends GetSelectTableName = GetSelectTableName, +> = T extends any ? MsSqlSelectWithout< + MsSqlSelectKind< + T['_']['hkt'], + T['_']['tableName'], + AppendToResult< + T['_']['tableName'], + T['_']['selection'], + TJoinedName, + TJoinedTable extends MsSqlTable ? TJoinedTable['_']['columns'] + : TJoinedTable extends Subquery ? Assume + : never, + T['_']['selectMode'] + >, + T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', + T['_']['preparedQueryHKT'], + T['_']['branch'], + AppendToNullabilityMap, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + T['_']['excludedMethods'] + > + : never; + +export type MsSqlJoinFn< + T extends AnyMsSqlSelectQueryBuilder, + TDynamic extends boolean, + TJoinType extends JoinType, +> = < + TJoinedTable extends MsSqlTable | Subquery | MsSqlViewBase | SQL, + TJoinedName extends GetSelectTableName = GetSelectTableName, +>( + table: TJoinedTable, + on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, +) => MsSqlJoin; + +export type SelectedFieldsFlat = SelectedFieldsFlatBase; + +export type SelectedFieldsFlatUpdate = { + inserted?: SelectedFieldsFlat | true; + deleted?: SelectedFieldsFlat | true; +}; + +export type SelectedFields = SelectedFieldsBase; + +export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; + +export interface MsSqlSelectHKTBase { + tableName: string | undefined; + selection: unknown; + selectMode: SelectMode; + preparedQueryHKT: unknown; + branch: 'from' | 'top'; + nullabilityMap: unknown; + dynamic: boolean; + excludedMethods: string; + result: unknown; + selectedFields: unknown; + _type: unknown; +} + +export type MsSqlSelectKind< + T extends MsSqlSelectHKTBase, + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBranch extends 'from' | 'top', + TNullabilityMap extends Record, + TDynamic extends boolean, + TExcludedMethods extends string, + TResult = SelectResult[], + TSelectedFields = BuildSubquerySelection, +> = (T & { + tableName: TTableName; + selection: TSelection; + selectMode: TSelectMode; + preparedQueryHKT: TPreparedQueryHKT; + branch: TBranch; + nullabilityMap: TNullabilityMap; + dynamic: TDynamic; + excludedMethods: TExcludedMethods; + result: TResult; + selectedFields: TSelectedFields; +})['_type']; + +export interface MsSqlSelectQueryBuilderHKT extends MsSqlSelectHKTBase { + _type: MsSqlSelectQueryBuilderBase< + MsSqlSelectQueryBuilderHKT, + this['tableName'], + Assume, + this['selectMode'], + Assume, + this['branch'], + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export interface MsSqlSelectHKT extends MsSqlSelectHKTBase { + _type: MsSqlSelectBase< + this['tableName'], + Assume, + this['selectMode'], + Assume, + this['branch'], + Assume>, + this['dynamic'], + this['excludedMethods'], + Assume, + Assume + >; +} + +export type MsSqlSetOperatorExcludedMethods = + | 'where' + | 'having' + | 'groupBy' + | 'session' + | 'fetch' + | 'offset' + | 'leftJoin' + | 'rightJoin' + | 'innerJoin' + | 'fullJoin'; + +export type MsSqlSelectWithout< + T extends AnyMsSqlSelectQueryBuilder, + TDynamic extends boolean, + K extends keyof T & string, + TResetExcluded extends boolean = false, +> = TDynamic extends true ? T : Omit< + MsSqlSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['branch'], + T['_']['nullabilityMap'], + TDynamic, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, + T['_']['result'], + T['_']['selectedFields'] + >, + TResetExcluded extends true ? K : T['_']['excludedMethods'] | K +>; + +export type MsSqlSelectReplace< + T extends AnyMsSqlSelectQueryBuilder, + TDynamic extends boolean, + NewExcluded extends string, + OldExcluded extends string, +> = TDynamic extends true ? T + : Omit< + MsSqlSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['branch'], + T['_']['nullabilityMap'], + TDynamic, + Exclude | NewExcluded, + T['_']['result'], + T['_']['selectedFields'] + >, + NewExcluded | Exclude + >; + +export type MsSqlSelectPrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + PreparedQueryConfig & { + execute: T['_']['result']; + iterator: T['_']['result'][number]; + }, + true +>; + +export type MsSqlSelectDynamic = MsSqlSelectKind< + T['_']['hkt'], + T['_']['tableName'], + T['_']['selection'], + T['_']['selectMode'], + T['_']['preparedQueryHKT'], + T['_']['branch'], + T['_']['nullabilityMap'], + true, + never, + T['_']['result'], + T['_']['selectedFields'] +>; + +export type CreateMsSqlSelectFromBuilderMode< + TBuilderMode extends 'db' | 'qb', + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TBranch extends 'from' | 'top', +> = TBuilderMode extends 'db' ? MsSqlSelectBase + : MsSqlSelectQueryBuilderBase< + MsSqlSelectQueryBuilderHKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TBranch + >; + +export type AnyMsSqlSelectQueryBuilder = MsSqlSelectQueryBuilderBase; + +export type AnyMsSqlSetOperatorInterface = MsSqlSetOperatorInterface; + +export interface MsSqlSetOperatorInterface< + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +> { + _: { + readonly hkt: MsSqlSelectHKT; + readonly tableName: TTableName; + readonly selection: TSelection; + readonly selectMode: TSelectMode; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly nullabilityMap: TNullabilityMap; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + readonly result: TResult; + readonly selectedFields: TSelectedFields; + }; +} + +export type MsSqlSetOperatorWithResult = MsSqlSetOperatorInterface< + any, + any, + any, + any, + any, + any, + any, + TResult, + any +>; + +export type AnyMsSqlSelect = MsSqlSelectBase; + +export type SetOperatorRightSelect< + TValue extends MsSqlSetOperatorWithResult, + TResult extends any[], +> = TValue extends MsSqlSetOperatorInterface + ? ValidateShape< + TValueResult[number], + TResult[number], + TypedQueryBuilder + > + : TValue; + +export type MsSqlSelect< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TNullabilityMap extends Record = Record, + TBranch extends 'from' | 'top' = 'from' | 'top', +> = MsSqlSelectBase; + +export type MsSqlSetOperator< + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = Record, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TBranch extends 'from' | 'top' = 'from' | 'top', + TNullabilityMap extends Record = Record, +> = MsSqlSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TBranch, + TNullabilityMap, + true, + MsSqlSetOperatorExcludedMethods +>; + +export type SetOperatorRestSelect< + TValue extends readonly MsSqlSetOperatorWithResult[], + TResult extends any[], +> = TValue extends [infer First, ...infer Rest] + ? First extends MsSqlSetOperatorInterface + ? Rest extends AnyMsSqlSetOperatorInterface[] ? [ + ValidateShape>, + ...SetOperatorRestSelect, + ] + : ValidateShape[]> + : never + : TValue; + +export type MsSqlCreateSetOperatorFn = < + TTableName extends string | undefined, + TSelection extends ColumnsSelection, + TSelectMode extends SelectMode, + TValue extends MsSqlSetOperatorWithResult, + TRest extends MsSqlSetOperatorWithResult[], + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = TTableName extends string ? Record + : {}, + TDynamic extends boolean = false, + TExcludedMethods extends string = 'offset' | 'fetch', + TResult extends any[] = SelectResult[], + TSelectedFields extends ColumnsSelection = BuildSubquerySelection, +>( + leftSelect: MsSqlSetOperatorInterface< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + rightSelect: SetOperatorRightSelect, + ...restSelects: SetOperatorRestSelect +) => MsSqlSelectWithout< + MsSqlSelectBase< + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + 'from', + TNullabilityMap, + TDynamic, + TExcludedMethods, + TResult, + TSelectedFields + >, + false, + MsSqlSetOperatorExcludedMethods, + true +>; + +export type GetMsSqlSetOperators = { + union: MsSqlCreateSetOperatorFn; + intersect: MsSqlCreateSetOperatorFn; + except: MsSqlCreateSetOperatorFn; + unionAll: MsSqlCreateSetOperatorFn; +}; + +export type MsSqlSelectQueryBuilder< + THKT extends MsSqlSelectHKTBase = MsSqlSelectQueryBuilderHKT, + TTableName extends string | undefined = string | undefined, + TSelection extends ColumnsSelection = ColumnsSelection, + TSelectMode extends SelectMode = SelectMode, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TNullabilityMap extends Record = Record, + TResult extends any[] = unknown[], + TSelectedFields extends ColumnsSelection = ColumnsSelection, + TBranch extends 'from' | 'top' = 'from' | 'top', +> = MsSqlSelectQueryBuilderBase< + THKT, + TTableName, + TSelection, + TSelectMode, + TPreparedQueryHKT, + TBranch, + TNullabilityMap, + true, + never, + TResult, + TSelectedFields +>; diff --git a/drizzle-orm/src/mssql-core/query-builders/update.ts b/drizzle-orm/src/mssql-core/query-builders/update.ts new file mode 100644 index 0000000000..e185c2423d --- /dev/null +++ b/drizzle-orm/src/mssql-core/query-builders/update.ts @@ -0,0 +1,332 @@ +import type { GetColumnData } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { + AnyQueryResultHKT, + MsSqlSession, + PreparedQueryConfig, + PreparedQueryHKTBase, + PreparedQueryKind, + QueryResultHKT, + QueryResultKind, +} from '~/mssql-core/session.ts'; +import type { MsSqlTable } from '~/mssql-core/table.ts'; +import type { SelectResultFields } from '~/query-builders/select.types.ts'; +import { QueryPromise } from '~/query-promise.ts'; +import type { ExtractObjectValues } from '~/relations.ts'; +import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { type InferInsertModel, Table } from '~/table.ts'; +import { mapUpdateSet, orderSelectedFields, type UpdateSet } from '~/utils.ts'; +import type { MsSqlColumn } from '../columns/common.ts'; +import type { SelectedFieldsFlatUpdate, SelectedFieldsOrdered } from './select.types.ts'; + +export interface MsSqlUpdateConfig { + where?: SQL | undefined; + set: UpdateSet; + table: MsSqlTable; + output?: { + inserted?: SelectedFieldsOrdered; + deleted?: SelectedFieldsOrdered; + }; +} + +export type MsSqlUpdateSetSource = + & { + [Key in keyof InferInsertModel]?: + | GetColumnData + | SQL; + } + & {}; + +export type NonUndefinedKeysOnly = + & ExtractObjectValues< + { + [K in keyof T as T[K] extends undefined ? never : K]: K; + } + > + & keyof T; + +export type FormSelection = { + [K in keyof T as T[K] extends undefined ? never : K]: T[K] extends true ? TTable['_']['columns'] : T[K]; +}; + +export type MsSqlUpdateReturning< + T extends AnyMsSqlUpdateBase, + TDynamic extends boolean, + SelectedFields extends SelectedFieldsFlatUpdate, +> = MsSqlUpdateWithout< + MsSqlUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + SelectResultFields>, + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'output' +>; + +export type MsSqlUpdateReturningAll = MsSqlUpdateWithout< + MsSqlUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['table']['$inferSelect'], + TDynamic, + T['_']['excludedMethods'] + >, + TDynamic, + 'output' +>; + +export class MsSqlUpdateBuilder< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, +> { + static readonly [entityKind]: string = 'MsSqlUpdateBuilder'; + + declare readonly _: { + readonly table: TTable; + }; + + constructor( + private table: TTable, + private session: MsSqlSession, + private dialect: MsSqlDialect, + ) {} + + set(values: MsSqlUpdateSetSource): MsSqlUpdateBase { + return new MsSqlUpdateBase(this.table, mapUpdateSet(this.table, values), this.session, this.dialect); + } +} + +export type MsSqlUpdateWithout< + T extends AnyMsSqlUpdateBase, + TDynamic extends boolean, + K extends keyof T & string, +> = TDynamic extends true ? T : Omit< + MsSqlUpdateBase< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'], + TDynamic, + T['_']['excludedMethods'] | K + >, + T['_']['excludedMethods'] | K +>; + +export type MsSqlUpdatePrepare = PreparedQueryKind< + T['_']['preparedQueryHKT'], + PreparedQueryConfig & { + execute: T['_']['output'] extends undefined ? QueryResultKind : T['_']['output'][]; + iterator: never; + } +>; + +export type MsSqlUpdateDynamic = MsSqlUpdate< + T['_']['table'], + T['_']['queryResult'], + T['_']['preparedQueryHKT'], + T['_']['output'] +>; + +export type MsSqlUpdate< + TTable extends MsSqlTable = MsSqlTable, + TQueryResult extends QueryResultHKT = AnyQueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TOutput extends Record | undefined = Record | undefined, +> = MsSqlUpdateBase; + +export type AnyMsSqlUpdateBase = MsSqlUpdateBase; + +export interface MsSqlUpdateBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TOutput extends Record | undefined = undefined, + TDynamic extends boolean = false, + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]>, SQLWrapper { + readonly _: { + readonly table: TTable; + readonly queryResult: TQueryResult; + readonly preparedQueryHKT: TPreparedQueryHKT; + readonly output: TOutput; + readonly dynamic: TDynamic; + readonly excludedMethods: TExcludedMethods; + }; +} + +export class MsSqlUpdateBase< + TTable extends MsSqlTable, + TQueryResult extends QueryResultHKT, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TPreparedQueryHKT extends PreparedQueryHKTBase, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TOutput extends Record | undefined = undefined, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TDynamic extends boolean = false, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + TExcludedMethods extends string = never, +> extends QueryPromise : TOutput[]> + implements SQLWrapper +{ + static override readonly [entityKind]: string = 'MsSqlUpdate'; + + private config: MsSqlUpdateConfig; + + constructor( + table: TTable, + set: UpdateSet, + private session: MsSqlSession, + private dialect: MsSqlDialect, + ) { + super(); + this.config = { set, table }; + } + + /** + * Adds a 'where' clause to the query. + * + * Calling this method will update only those rows that fulfill a specified condition. + * + * See docs: {@link https://orm.drizzle.team/docs/update} + * + * @param where the 'where' clause. + * + * @example + * You can use conditional operators and `sql function` to filter the rows to be updated. + * + * ```ts + * // Update all cars with green color + * db.update(cars).set({ color: 'red' }) + * .where(eq(cars.color, 'green')); + * // or + * db.update(cars).set({ color: 'red' }) + * .where(sql`${cars.color} = 'green'`) + * ``` + * + * You can logically combine conditional operators with `and()` and `or()` operators: + * + * ```ts + * // Update all BMW cars with a green color + * db.update(cars).set({ color: 'red' }) + * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); + * + * // Update all cars with the green or blue color + * db.update(cars).set({ color: 'red' }) + * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); + * ``` + */ + where(where: SQL | undefined): MsSqlUpdateWithout { + this.config.where = where; + return this as any; + } + + /** + * Adds an `output` clause to the query. + * + * This method allows you to return values from the rows affected by the query. + * MSSQL supports returning `inserted` (new row values) and `deleted` (old row values) values. + * + * If no fields are specified, all `inserted` values will be returned by default. + * + * @example + * ```ts + * // Update cars and return all new values + * const updatedCars: Car[] = await db.update(cars) + * .set({ color: 'red' }) + * .output() + * .where(eq(cars.color, 'green')); + * + * // Update cars and return all old values + * const updatedCarsIds: { deleted: Car }[] = await db.update(cars) + * .set({ color: 'red' }) + * .output({ deleted: true }) + * .where(eq(cars.color, 'green')); + * + * // Update cars and return partial old and new values + * const beforeAndAfter: { deleted: { oldColor: string }, inserted: { newColor: string } }[] = await db.update(cars) + * .set({ color: 'red' }) + * .output({ + * deleted: { oldColor: cars.color }, + * inserted: { newColor: cars.color } + * }) + * .where(eq(cars.color, 'green')); + * ``` + */ + output(): MsSqlUpdateReturningAll; + output( + fields: TSelectedFields, + ): MsSqlUpdateReturning; + output( + fields?: SelectedFieldsFlatUpdate, + ): MsSqlUpdateWithout { + const columns = this.config.table[Table.Symbol.Columns]; + + if (fields) { + const output: typeof this.config.output = {}; + + if (fields.inserted) { + output.inserted = typeof fields.inserted === 'boolean' + ? orderSelectedFields(columns, ['inserted']) + : orderSelectedFields(fields.inserted, ['inserted']); + } + + if (fields.deleted) { + output.deleted = typeof fields.deleted === 'boolean' + ? orderSelectedFields(columns, ['deleted']) + : orderSelectedFields(fields.deleted, ['deleted']); + } + + this.config.output = output; + } else { + this.config.output = { + inserted: orderSelectedFields(columns), + }; + } + + return this as any; + } + + /** @internal */ + getSQL(): SQL { + return this.dialect.buildUpdateQuery(this.config); + } + + toSQL(): Query { + const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); + return rest; + } + + prepare(): MsSqlUpdatePrepare { + const output = [...(this.config.output?.inserted ?? []), ...(this.config.output?.deleted ?? [])]; + + return this.session.prepareQuery( + this.dialect.sqlToQuery(this.getSQL()), + output.length ? output : undefined, + ) as MsSqlUpdatePrepare; + } + + override execute( + placeholderValues?: Record, + ): Promise : TOutput[]> { + return this.prepare().execute(placeholderValues) as any; + } + + private createIterator = (): ReturnType['iterator'] => { + const self = this; + return async function*(placeholderValues) { + yield* self.prepare().iterator(placeholderValues); + }; + }; + + iterator = this.createIterator(); + + $dynamic(): MsSqlUpdateDynamic { + return this as any; + } +} diff --git a/drizzle-orm/src/mssql-core/schema.ts b/drizzle-orm/src/mssql-core/schema.ts new file mode 100644 index 0000000000..b4b799863c --- /dev/null +++ b/drizzle-orm/src/mssql-core/schema.ts @@ -0,0 +1,29 @@ +import { entityKind } from '~/entity.ts'; +import { type MsSqlTableFn, mssqlTableWithSchema } from './table.ts'; +import { type mssqlView, mssqlViewWithSchema } from './view.ts'; + +export class MsSqlSchema { + static readonly [entityKind]: string = 'MsSqlSchema'; + + isExisting: boolean = false; + constructor( + public readonly schemaName: TName, + ) {} + + table: MsSqlTableFn = (name, columns, extraConfig) => { + return mssqlTableWithSchema(name, columns, extraConfig, this.schemaName); + }; + + view = ((name, columns) => { + return mssqlViewWithSchema(name, columns, this.schemaName); + }) as typeof mssqlView; + + existing(): this { + this.isExisting = true; + return this; + } +} + +export function mssqlSchema(name: TName) { + return new MsSqlSchema(name); +} diff --git a/drizzle-orm/src/mssql-core/session.ts b/drizzle-orm/src/mssql-core/session.ts new file mode 100644 index 0000000000..73379717cf --- /dev/null +++ b/drizzle-orm/src/mssql-core/session.ts @@ -0,0 +1,131 @@ +import type * as V1 from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import { TransactionRollbackError } from '~/errors.ts'; +import { type Query, type SQL, sql } from '~/sql/sql.ts'; +import type { Assume, Equal } from '~/utils.ts'; +import { MsSqlDatabase } from './db.ts'; +import type { MsSqlDialect } from './dialect.ts'; +import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; + +export interface QueryResultHKT { + readonly $brand: 'MsSqlQueryRowHKT'; + readonly row: unknown; + readonly type: unknown; +} + +export interface AnyQueryResultHKT extends QueryResultHKT { + readonly type: any; +} + +export type QueryResultKind = (TKind & { + readonly row: TRow; +})['type']; + +export interface PreparedQueryConfig { + execute: unknown; + iterator: unknown; +} + +export interface PreparedQueryHKT { + readonly $brand: 'MsSqlPreparedQueryHKT'; + readonly config: unknown; + readonly type: unknown; +} + +export type PreparedQueryKind< + TKind extends PreparedQueryHKT, + TConfig extends PreparedQueryConfig, + TAssume extends boolean = false, +> = Equal extends true ? Assume<(TKind & { readonly config: TConfig })['type'], PreparedQuery> + : (TKind & { readonly config: TConfig })['type']; + +export abstract class PreparedQuery { + static readonly [entityKind]: string = 'MsSqlPreparedQuery'; + + /** @internal */ + joinsNotNullableMap?: Record; + + abstract execute(placeholderValues?: Record): Promise; + + abstract iterator(placeholderValues?: Record): AsyncGenerator; +} + +export interface MsSqlTransactionConfig { + isolationLevel: 'read uncommitted' | 'read committed' | 'repeatable read' | 'serializable' | 'snapshot'; +} + +export abstract class MsSqlSession< + TQueryResult extends QueryResultHKT = QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends V1.TablesRelationalConfig = Record, +> { + static readonly [entityKind]: string = 'MsSqlSession'; + + constructor(protected dialect: MsSqlDialect) {} + + abstract prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + ): PreparedQueryKind; + + execute(query: SQL): Promise { + return this.prepareQuery( + this.dialect.sqlToQuery(query), + undefined, + ).execute(); + } + + abstract all(query: SQL): Promise; + + abstract transaction( + transaction: (tx: MsSqlTransaction) => Promise, + config?: MsSqlTransactionConfig, + ): Promise; + + protected getSetTransactionSQL(config: MsSqlTransactionConfig): SQL | undefined { + const parts: string[] = []; + + if (config.isolationLevel) { + parts.push(`isolation level ${config.isolationLevel}`); + } + + return parts.length ? sql.join(['set transaction ', parts.join(' ')]) : undefined; + } + + protected getStartTransactionSQL(_config: MsSqlTransactionConfig): SQL | undefined { + return sql`begin transaction`; + } +} + +export abstract class MsSqlTransaction< + TQueryResult extends QueryResultHKT, + TPreparedQueryHKT extends PreparedQueryHKTBase, + TFullSchema extends Record = Record, + TSchema extends V1.TablesRelationalConfig = Record, +> extends MsSqlDatabase { + static override readonly [entityKind]: string = 'MsSqlTransaction'; + + constructor( + dialect: MsSqlDialect, + session: MsSqlSession, + protected schema: V1.RelationalSchemaConfig | undefined, + protected readonly nestedIndex: number, + ) { + super(dialect, session, schema); + } + + rollback(): never { + throw new TransactionRollbackError(); + } + + /** Nested transactions (aka savepoints) only work with InnoDB engine. */ + abstract override transaction( + transaction: (tx: MsSqlTransaction) => Promise, + ): Promise; +} + +export interface PreparedQueryHKTBase extends PreparedQueryHKT { + type: PreparedQuery>; +} diff --git a/drizzle-orm/src/mssql-core/subquery.ts b/drizzle-orm/src/mssql-core/subquery.ts new file mode 100644 index 0000000000..329742b95d --- /dev/null +++ b/drizzle-orm/src/mssql-core/subquery.ts @@ -0,0 +1,35 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; + +export type SubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & Subquery> + & AddAliasToSelection; + +export type WithSubqueryWithSelection< + TSelection extends ColumnsSelection, + TAlias extends string, +> = + & WithSubquery> + & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/mssql-core/table.ts b/drizzle-orm/src/mssql-core/table.ts new file mode 100644 index 0000000000..664844a8c2 --- /dev/null +++ b/drizzle-orm/src/mssql-core/table.ts @@ -0,0 +1,158 @@ +import type { BuildColumns, BuildExtraConfigColumns, ColumnBuilderBase } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import { + type InferTableColumnsModels, + Table, + type TableConfig as TableConfigBase, + type UpdateTableConfig, +} from '~/table.ts'; +import type { CheckBuilder } from './checks.ts'; +import { getMsSqlColumnBuilders, type MsSqlColumnBuilders } from './columns/all.ts'; +import type { MsSqlColumn, MsSqlColumnBuilder, MsSqlColumns } from './columns/common.ts'; +import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; +import type { AnyIndexBuilder } from './indexes.ts'; +import type { PrimaryKeyBuilder } from './primary-keys.ts'; +import type { UniqueConstraintBuilder } from './unique-constraint.ts'; + +export type MsSqlTableExtraConfigValue = + | AnyIndexBuilder + | CheckBuilder + | ForeignKeyBuilder + | PrimaryKeyBuilder + | UniqueConstraintBuilder; + +export type MsSqlTableExtraConfig = Record< + string, + MsSqlTableExtraConfigValue +>; + +export type TableConfig = TableConfigBase; + +/** @internal */ +export const InlineForeignKeys = Symbol.for('drizzle:MsSqlInlineForeignKeys'); + +export class MsSqlTable extends Table { + static override readonly [entityKind]: string = 'MsSqlTable'; + + /** @internal */ + static override readonly Symbol = Object.assign({}, Table.Symbol, { + InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, + }); + + /** @internal */ + override [Table.Symbol.Columns]!: NonNullable; + + /** @internal */ + [InlineForeignKeys]: ForeignKey[] = []; + + /** @internal */ + override [Table.Symbol.ExtraConfigBuilder]: + | ((self: Record) => MsSqlTableExtraConfig) + | undefined = undefined; +} + +export type AnyMsSqlTable = {}> = MsSqlTable< + UpdateTableConfig +>; + +export type MsSqlTableWithColumns = + & MsSqlTable + & T['columns'] + & InferTableColumnsModels; + +export function mssqlTableWithSchema< + TTableName extends string, + TSchemaName extends string | undefined, + TColumnsMap extends Record, +>( + name: TTableName, + columns: TColumnsMap | ((columnTypes: MsSqlColumnBuilders) => TColumnsMap), + extraConfig: + | (( + self: BuildExtraConfigColumns, + ) => MsSqlTableExtraConfig | MsSqlTableExtraConfigValue[]) + | undefined, + schema: TSchemaName, + baseName = name, +): MsSqlTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'mssql'; +}> { + const rawTable = new MsSqlTable<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'mssql'; + }>(name, schema, baseName); + + const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getMsSqlColumnBuilders()) : columns; + + const builtColumns = Object.fromEntries( + Object.entries(parsedColumns).map(([name, colBuilderBase]) => { + const colBuilder = colBuilderBase as MsSqlColumnBuilder; + colBuilder.setName(name); + const column = colBuilder.build(rawTable); + rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); + return [name, column]; + }), + ) as unknown as BuildColumns; + + const table = Object.assign(rawTable, builtColumns); + + table[Table.Symbol.Columns] = builtColumns; + table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< + TTableName, + TColumnsMap, + 'mssql' + >; + + if (extraConfig) { + table[MsSqlTable.Symbol.ExtraConfigBuilder] = extraConfig as unknown as ( + self: Record, + ) => MsSqlTableExtraConfig; + } + + return table as any; +} + +export interface MsSqlTableFn { + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: TColumnsMap, + extraConfig?: (self: BuildExtraConfigColumns) => MsSqlTableExtraConfigValue[], + ): MsSqlTableWithColumns<{ + name: TTableName; + schema: TSchema; + columns: BuildColumns; + dialect: 'mssql'; + }>; + + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: (columnTypes: MsSqlColumnBuilders) => TColumnsMap, + extraConfig?: (self: BuildExtraConfigColumns) => MsSqlTableExtraConfigValue[], + ): MsSqlTableWithColumns<{ + name: TTableName; + schema: TSchema; + columns: BuildColumns; + dialect: 'mssql'; + }>; +} + +export const mssqlTable: MsSqlTableFn = (name, columns, extraConfig) => { + return mssqlTableWithSchema(name, columns, extraConfig, undefined, name); +}; + +export function mssqlTableCreator(customizeTableName: (name: string) => string): MsSqlTableFn { + return (name, columns, extraConfig) => { + return mssqlTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); + }; +} diff --git a/drizzle-orm/src/mssql-core/unique-constraint.ts b/drizzle-orm/src/mssql-core/unique-constraint.ts new file mode 100644 index 0000000000..98f3cb1e8e --- /dev/null +++ b/drizzle-orm/src/mssql-core/unique-constraint.ts @@ -0,0 +1,62 @@ +import { entityKind } from '~/entity.ts'; +import type { MsSqlColumn } from './columns/index.ts'; +import type { MsSqlTable } from './table.ts'; + +export function unique(name?: string): UniqueOnConstraintBuilder { + return new UniqueOnConstraintBuilder(name); +} + +export class UniqueConstraintBuilder { + static readonly [entityKind]: string = 'MsSqlUniqueConstraintBuilder'; + + /** @internal */ + columns: MsSqlColumn[]; + + constructor( + columns: MsSqlColumn[], + private name?: string, + ) { + this.columns = columns; + } + + /** @internal */ + build(table: MsSqlTable): UniqueConstraint { + return new UniqueConstraint(table, this.columns, this.name); + } +} + +export class UniqueOnConstraintBuilder { + static readonly [entityKind]: string = 'MsSqlUniqueOnConstraintBuilder'; + + /** @internal */ + name?: string; + + constructor( + name?: string, + ) { + this.name = name; + } + + on(...columns: [MsSqlColumn, ...MsSqlColumn[]]) { + return new UniqueConstraintBuilder(columns, this.name); + } +} + +export class UniqueConstraint { + static readonly [entityKind]: string = 'MsSqlUniqueConstraint'; + + readonly columns: MsSqlColumn[]; + readonly name?: string; + readonly nullsNotDistinct: boolean = false; + readonly isNameExplicit: boolean; + + constructor(readonly table: MsSqlTable, columns: MsSqlColumn[], name?: string) { + this.columns = columns; + this.name = name; + this.isNameExplicit = !!name; + } + + getName() { + return this.name; + } +} diff --git a/drizzle-orm/src/mssql-core/utils.ts b/drizzle-orm/src/mssql-core/utils.ts new file mode 100644 index 0000000000..4cc73afadf --- /dev/null +++ b/drizzle-orm/src/mssql-core/utils.ts @@ -0,0 +1,68 @@ +import { is } from '~/entity.ts'; +import { Table } from '~/table.ts'; +import { ViewBaseConfig } from '~/view-common.ts'; +import type { Check } from './checks.ts'; +import { CheckBuilder } from './checks.ts'; +import type { ForeignKey } from './foreign-keys.ts'; +import { ForeignKeyBuilder } from './foreign-keys.ts'; +import type { Index } from './indexes.ts'; +import { IndexBuilder } from './indexes.ts'; +import type { PrimaryKey } from './primary-keys.ts'; +import { PrimaryKeyBuilder } from './primary-keys.ts'; +import { MsSqlTable } from './table.ts'; +import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; +import { MsSqlViewConfig } from './view-common.ts'; +import type { MsSqlView } from './view.ts'; + +export function getTableConfig(table: MsSqlTable) { + const columns = Object.values(table[MsSqlTable.Symbol.Columns]); + const indexes: Index[] = []; + const checks: Check[] = []; + const primaryKeys: PrimaryKey[] = []; + const uniqueConstraints: UniqueConstraint[] = []; + const foreignKeys: ForeignKey[] = Object.values(table[MsSqlTable.Symbol.InlineForeignKeys]); + const name = table[Table.Symbol.Name]; + const schema = table[Table.Symbol.Schema]; + const baseName = table[Table.Symbol.BaseName]; + + const extraConfigBuilder = table[MsSqlTable.Symbol.ExtraConfigBuilder]; + + if (extraConfigBuilder !== undefined) { + const extraConfig = extraConfigBuilder(table[MsSqlTable.Symbol.Columns]); + for (const builder of Object.values(extraConfig)) { + if (is(builder, IndexBuilder)) { + indexes.push(builder.build(table)); + } else if (is(builder, CheckBuilder)) { + checks.push(builder.build(table)); + } else if (is(builder, UniqueConstraintBuilder)) { + uniqueConstraints.push(builder.build(table)); + } else if (is(builder, PrimaryKeyBuilder)) { + primaryKeys.push(builder.build(table)); + } else if (is(builder, ForeignKeyBuilder)) { + foreignKeys.push(builder.build(table)); + } + } + } + + return { + columns, + indexes, + foreignKeys, + checks, + primaryKeys, + uniqueConstraints, + name, + schema, + baseName, + }; +} + +export function getViewConfig< + TName extends string = string, + TExisting extends boolean = boolean, +>(view: MsSqlView) { + return { + ...view[ViewBaseConfig], + ...view[MsSqlViewConfig], + }; +} diff --git a/drizzle-orm/src/mssql-core/view-base.ts b/drizzle-orm/src/mssql-core/view-base.ts new file mode 100644 index 0000000000..5ad76153d0 --- /dev/null +++ b/drizzle-orm/src/mssql-core/view-base.ts @@ -0,0 +1,15 @@ +import { entityKind } from '~/entity.ts'; +import type { ColumnsSelection } from '~/sql/sql.ts'; +import { View } from '~/sql/sql.ts'; + +export abstract class MsSqlViewBase< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends View { + static override readonly [entityKind]: string = 'MsSqlViewBase'; + + declare readonly _: View['_'] & { + readonly viewBrand: 'MsSqlViewBase'; + }; +} diff --git a/drizzle-orm/src/mssql-core/view-common.ts b/drizzle-orm/src/mssql-core/view-common.ts new file mode 100644 index 0000000000..fb97254b0f --- /dev/null +++ b/drizzle-orm/src/mssql-core/view-common.ts @@ -0,0 +1 @@ +export const MsSqlViewConfig = Symbol.for('drizzle:MsSqlViewConfig'); diff --git a/drizzle-orm/src/mssql-core/view.ts b/drizzle-orm/src/mssql-core/view.ts new file mode 100644 index 0000000000..755a27370e --- /dev/null +++ b/drizzle-orm/src/mssql-core/view.ts @@ -0,0 +1,194 @@ +import type { BuildColumns, ColumnBuilderBase } from '~/column-builder.ts'; +import { entityKind } from '~/entity.ts'; +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; +import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import { getTableColumns } from '~/utils.ts'; +import type { MsSqlColumn } from './columns/index.ts'; +import { QueryBuilder } from './query-builders/query-builder.ts'; +import type { SelectedFields } from './query-builders/select.types.ts'; +import { mssqlTable } from './table.ts'; +import { MsSqlViewBase } from './view-base.ts'; +import { MsSqlViewConfig } from './view-common.ts'; + +export interface ViewBuilderConfig { + encryption?: boolean; + schemaBinding?: boolean; + viewMetadata?: boolean; + checkOption?: boolean; +} + +export class ViewBuilderCore { + static readonly [entityKind]: string = 'MsSqlViewBuilder'; + + declare readonly _: { + readonly name: TConfig['name']; + readonly columns: TConfig['columns']; + }; + + constructor( + protected name: TConfig['name'], + protected schema: string | undefined, + ) {} + + protected config: ViewBuilderConfig = { + encryption: false, + schemaBinding: false, + viewMetadata: false, + }; + + with( + config?: ViewBuilderConfig, + ): this { + this.config.encryption = config?.encryption; + this.config.schemaBinding = config?.schemaBinding; + this.config.viewMetadata = config?.viewMetadata; + this.config.checkOption = config?.checkOption; + return this; + } +} + +export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { + static override readonly [entityKind]: string = 'MsSqlViewBuilder'; + + as( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): MsSqlViewWithSelection> { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder()); + } + const selectionProxy = new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }); + const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); + return new Proxy( + new MsSqlView({ + mssqlConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: aliasedSelection, + query: qb.getSQL().inlineParams(), + }, + }), + selectionProxy as any, + ) as MsSqlViewWithSelection>; + } +} + +export class ManualViewBuilder< + TName extends string = string, + TColumns extends Record = Record, +> extends ViewBuilderCore<{ name: TName; columns: TColumns }> { + static override readonly [entityKind]: string = 'MsSqlManualViewBuilder'; + + private columns: Record; + + constructor( + name: TName, + columns: TColumns, + schema: string | undefined, + ) { + super(name, schema); + this.columns = getTableColumns(mssqlTable(name, columns)) as BuildColumns; + } + + existing(): MsSqlViewWithSelection> { + return new Proxy( + new MsSqlView({ + mssqlConfig: undefined, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: undefined, + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as MsSqlViewWithSelection>; + } + + as(query: SQL): MsSqlViewWithSelection> { + return new Proxy( + new MsSqlView({ + mssqlConfig: this.config, + config: { + name: this.name, + schema: this.schema, + selectedFields: this.columns, + query: query.inlineParams(), + }, + }), + new SelectionProxyHandler({ + alias: this.name, + sqlBehavior: 'error', + sqlAliasedBehavior: 'alias', + replaceOriginalName: true, + }), + ) as MsSqlViewWithSelection>; + } +} + +export class MsSqlView< + TName extends string = string, + TExisting extends boolean = boolean, + TSelectedFields extends ColumnsSelection = ColumnsSelection, +> extends MsSqlViewBase { + static override readonly [entityKind]: string = 'MsSqlView'; + + declare protected $MsSqlViewBrand: 'MsSqlView'; + + [MsSqlViewConfig]: ViewBuilderConfig | undefined; + + constructor({ mssqlConfig, config }: { + mssqlConfig: ViewBuilderConfig | undefined; + config: { + name: TName; + schema: string | undefined; + selectedFields: SelectedFields; + query: SQL | undefined; + }; + }) { + super(config); + this[MsSqlViewConfig] = mssqlConfig; + } +} + +export type MsSqlViewWithSelection< + TName extends string, + TExisting extends boolean, + TSelectedFields extends ColumnsSelection, +> = MsSqlView & TSelectedFields; + +/** @internal */ +export function mssqlViewWithSchema( + name: string, + selection: Record | undefined, + schema: string | undefined, +): ViewBuilder | ManualViewBuilder { + if (selection) { + return new ManualViewBuilder(name, selection, schema); + } + return new ViewBuilder(name, schema); +} + +export function mssqlView(name: TName): ViewBuilder; +export function mssqlView>( + name: TName, + columns: TColumns, +): ManualViewBuilder; +export function mssqlView( + name: string, + selection?: Record, +): ViewBuilder | ManualViewBuilder { + return mssqlViewWithSchema(name, selection, undefined); +} diff --git a/drizzle-orm/src/mysql-core/columns/all.ts b/drizzle-orm/src/mysql-core/columns/all.ts index 44c03eff0c..23e3014d1b 100644 --- a/drizzle-orm/src/mysql-core/columns/all.ts +++ b/drizzle-orm/src/mysql-core/columns/all.ts @@ -1,5 +1,6 @@ import { bigint } from './bigint.ts'; import { binary } from './binary.ts'; +import { blob, longblob, mediumblob, tinyblob } from './blob.ts'; import { boolean } from './boolean.ts'; import { char } from './char.ts'; import { customType } from './custom.ts'; @@ -52,6 +53,10 @@ export function getMySqlColumnBuilders() { longtext, mediumtext, tinytext, + blob, + longblob, + mediumblob, + tinyblob, }; } diff --git a/drizzle-orm/src/mysql-core/columns/bigint.ts b/drizzle-orm/src/mysql-core/columns/bigint.ts index 887bb67638..831ce0b966 100644 --- a/drizzle-orm/src/mysql-core/columns/bigint.ts +++ b/drizzle-orm/src/mysql-core/columns/bigint.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlBigInt53Builder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint53' : 'number int53'; data: number; driverParam: number | string; @@ -44,7 +43,6 @@ export class MySqlBigInt53 extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'bigint uint64' : 'bigint int64'; data: bigint; driverParam: string; @@ -88,7 +86,6 @@ export class MySqlBigInt64 extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'string uint64' : 'string int64'; data: string; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/binary.ts b/drizzle-orm/src/mysql-core/columns/binary.ts index 099042d77f..6915aae6c4 100644 --- a/drizzle-orm/src/mysql-core/columns/binary.ts +++ b/drizzle-orm/src/mysql-core/columns/binary.ts @@ -6,12 +6,11 @@ import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlBinaryBuilder extends MySqlColumnBuilder< { - name: string; dataType: 'string binary'; data: string; driverParam: string; }, - MySqlBinaryConfig & { setLength: boolean; isLengthExact: true } + MySqlBinaryConfig & { setLength: boolean } > { static override readonly [entityKind]: string = 'MySqlBinaryBuilder'; @@ -19,7 +18,6 @@ export class MySqlBinaryBuilder extends MySqlColumnBuilder< super(name, 'string binary', 'MySqlBinary'); this.config.length = length ?? 1; this.config.setLength = length !== undefined; - this.config.isLengthExact = true; } /** @internal */ diff --git a/drizzle-orm/src/mysql-core/columns/blob.ts b/drizzle-orm/src/mysql-core/columns/blob.ts new file mode 100644 index 0000000000..a2df39da3c --- /dev/null +++ b/drizzle-orm/src/mysql-core/columns/blob.ts @@ -0,0 +1,206 @@ +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import type { MySqlTable } from '~/mysql-core/table.ts'; +import { type Equal, getColumnNameAndConfig, textDecoder } from '~/utils.ts'; +import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; + +export type MySqlBlobColumnType = 'tinyblob' | 'blob' | 'mediumblob' | 'longblob'; + +export class MySqlStringBlobBuilder extends MySqlColumnBuilder< + { + dataType: 'string'; + data: string; + driverParam: string; + }, + { blobType: MySqlBlobColumnType; length: number } +> { + static override readonly [entityKind]: string = 'MySqlBlobBuilder'; + + constructor(name: string, blobType: MySqlBlobColumnType) { + super(name, 'string', 'MySqlBlob'); + this.config.blobType = blobType; + switch (blobType) { + case 'tinyblob': { + this.config.length = 255; + break; + } + case 'blob': { + this.config.length = 65535; + break; + } + case 'mediumblob': { + this.config.length = 16777215; + break; + } + case 'longblob': { + this.config.length = 4294967295; + break; + } + } + } + + /** @internal */ + override build(table: MySqlTable) { + return new MySqlStringBlob(table, this.config as any); + } +} + +export class MySqlStringBlob> + extends MySqlColumn +{ + static override readonly [entityKind]: string = 'MySqlBlob'; + + readonly blobType: MySqlBlobColumnType = this.config.blobType; + + getSQLType(): string { + return this.blobType; + } + + override mapFromDriverValue(value: Buffer | Uint8Array | ArrayBuffer): T['data'] { + if (typeof Buffer !== 'undefined' && Buffer.from) { + const buf = Buffer.isBuffer(value) + ? value + // oxlint-disable-next-line drizzle-internal/no-instanceof + : value instanceof ArrayBuffer + ? Buffer.from(value) + : value.buffer + ? Buffer.from(value.buffer, value.byteOffset, value.byteLength) + : Buffer.from(value); + return buf.toString('utf8'); + } + + return textDecoder!.decode(value as ArrayBuffer); + } +} + +export class MySqlBufferBlobBuilder extends MySqlColumnBuilder< + { + dataType: 'string'; + data: Buffer; + driverParam: string; + }, + { blobType: MySqlBlobColumnType; length: number } +> { + static override readonly [entityKind]: string = 'MySqlBlobBuilder'; + + constructor(name: string, blobType: MySqlBlobColumnType) { + super(name, 'string', 'MySqlBlob'); + this.config.blobType = blobType; + switch (blobType) { + case 'tinyblob': { + this.config.length = 255; + break; + } + case 'blob': { + this.config.length = 65535; + break; + } + case 'mediumblob': { + this.config.length = 16777215; + break; + } + case 'longblob': { + this.config.length = 4294967295; + break; + } + } + } + + /** @internal */ + override build(table: MySqlTable) { + return new MySqlBufferBlob(table, this.config as any); + } +} +export class MySqlBufferBlob> + extends MySqlColumn +{ + static override readonly [entityKind]: string = 'MySqlBlob'; + + readonly blobType: MySqlBlobColumnType = this.config.blobType; + + getSQLType(): string { + return this.blobType; + } + + override mapFromDriverValue(value: Buffer | Uint8Array | ArrayBuffer): T['data'] { + if (Buffer.isBuffer(value)) { + return value; + } + + return Buffer.from(value as Uint8Array); + } +} + +export interface MySqlBlobConfig< + TMode extends 'buffer' | 'string' = 'buffer' | 'string', +> { + mode?: TMode; +} + +export function blob( + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function blob( + name: string, + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function blob(a?: string | MySqlBlobConfig, b: MySqlBlobConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MySqlStringBlobBuilder(name, 'blob'); + } + return new MySqlBufferBlobBuilder(name, 'blob'); +} + +export function tinyblob( + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function tinyblob( + name: string, + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function tinyblob(a?: string | MySqlBlobConfig, b: MySqlBlobConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MySqlStringBlobBuilder(name, 'tinyblob'); + } + return new MySqlBufferBlobBuilder(name, 'tinyblob'); +} + +export function mediumblob( + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function mediumblob( + name: string, + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function mediumblob(a?: string | MySqlBlobConfig, b: MySqlBlobConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MySqlStringBlobBuilder(name, 'mediumblob'); + } + return new MySqlBufferBlobBuilder(name, 'mediumblob'); +} + +export function longblob( + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function longblob( + name: string, + config?: MySqlBlobConfig, +): Equal extends true ? MySqlStringBlobBuilder + : MySqlBufferBlobBuilder; +export function longblob(a?: string | MySqlBlobConfig, b: MySqlBlobConfig = {}): any { + const { name, config } = getColumnNameAndConfig(a, b); + if (config?.mode === 'string') { + return new MySqlStringBlobBuilder(name, 'longblob'); + } + return new MySqlBufferBlobBuilder(name, 'longblob'); +} diff --git a/drizzle-orm/src/mysql-core/columns/boolean.ts b/drizzle-orm/src/mysql-core/columns/boolean.ts index 489ac918a0..442b2ab2be 100644 --- a/drizzle-orm/src/mysql-core/columns/boolean.ts +++ b/drizzle-orm/src/mysql-core/columns/boolean.ts @@ -4,7 +4,6 @@ import type { MySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlBooleanBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'boolean'; data: boolean; driverParam: number | boolean; diff --git a/drizzle-orm/src/mysql-core/columns/char.ts b/drizzle-orm/src/mysql-core/columns/char.ts index f16f3f4378..6bb5ac5c22 100644 --- a/drizzle-orm/src/mysql-core/columns/char.ts +++ b/drizzle-orm/src/mysql-core/columns/char.ts @@ -2,19 +2,18 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; -import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; +import { MySqlStringBaseColumn, MySqlStringColumnBaseBuilder } from './string.common.ts'; export class MySqlCharBuilder< TEnum extends [string, ...string[]], -> extends MySqlColumnBuilder< +> extends MySqlStringColumnBaseBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; }, - { enum?: TEnum; length: number; setLength: boolean; isLengthExact: true } + { enum?: TEnum; length: number; setLength: boolean } > { static override readonly [entityKind]: string = 'MySqlCharBuilder'; @@ -23,7 +22,6 @@ export class MySqlCharBuilder< this.config.length = (config.length ?? 1) as number; this.config.setLength = config.length !== undefined; this.config.enum = config.enum; - this.config.isLengthExact = true; } /** @internal */ @@ -36,7 +34,7 @@ export class MySqlCharBuilder< } export class MySqlChar> - extends MySqlColumn + extends MySqlStringBaseColumn { static override readonly [entityKind]: string = 'MySqlChar'; diff --git a/drizzle-orm/src/mysql-core/columns/common.ts b/drizzle-orm/src/mysql-core/columns/common.ts index be624a9fd0..7c78f8281e 100644 --- a/drizzle-orm/src/mysql-core/columns/common.ts +++ b/drizzle-orm/src/mysql-core/columns/common.ts @@ -16,7 +16,6 @@ import { ForeignKeyBuilder } from '~/mysql-core/foreign-keys.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import type { Update } from '~/utils.ts'; -import { uniqueKeyName } from '../unique-constraint.ts'; export type MySqlColumns = Record>; @@ -102,9 +101,6 @@ export abstract class MySqlColumn< table: MySqlTable, config: ColumnBuilderRuntimeConfig & TRuntimeConfig, ) { - if (!config.uniqueName) { - config.uniqueName = uniqueKeyName(table, [config.name]); - } super(table, config); this.table = table; } @@ -125,7 +121,7 @@ export abstract class MySqlColumnBuilderWithAutoIncrement< > extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlColumnBuilderWithAutoIncrement'; - constructor(name: NonNullable, dataType: T['dataType'], columnType: string) { + constructor(name: string, dataType: T['dataType'], columnType: string) { super(name, dataType, columnType); this.config.autoIncrement = false; } diff --git a/drizzle-orm/src/mysql-core/columns/custom.ts b/drizzle-orm/src/mysql-core/columns/custom.ts index 49a517d80e..ed77b0119a 100644 --- a/drizzle-orm/src/mysql-core/columns/custom.ts +++ b/drizzle-orm/src/mysql-core/columns/custom.ts @@ -8,7 +8,6 @@ import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { - name: string; dataType: 'custom'; data: T['data']; driverParam: T['driverData']; @@ -363,6 +362,6 @@ export function customType( b?: T['config'], ): MySqlCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); - return new MySqlCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); + return new MySqlCustomColumnBuilder(name, config, customTypeParams); }; } diff --git a/drizzle-orm/src/mysql-core/columns/date.common.ts b/drizzle-orm/src/mysql-core/columns/date.common.ts index fdf92ebaa9..3c4bfae8e7 100644 --- a/drizzle-orm/src/mysql-core/columns/date.common.ts +++ b/drizzle-orm/src/mysql-core/columns/date.common.ts @@ -3,9 +3,11 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; +import type { TimestampFsp } from './timestamp.ts'; export interface MySqlDateColumnBaseConfig { hasOnUpdateNow: boolean; + onUpdateNowFsp: TimestampFsp | undefined; } export abstract class MySqlDateColumnBaseBuilder< @@ -20,8 +22,9 @@ export abstract class MySqlDateColumnBaseBuilder< } // "on update now" also adds an implicit default value to the column - https://dev.mysql.com/doc/refman/8.0/en/timestamp-ization.html - onUpdateNow(): HasDefault { + onUpdateNow(config?: { fsp: TimestampFsp }): HasDefault { this.config.hasOnUpdateNow = true; + this.config.onUpdateNowFsp = config?.fsp; this.config.hasDefault = true; return this as HasDefault; } @@ -34,4 +37,5 @@ export abstract class MySqlDateBaseColumn< static override readonly [entityKind]: string = 'MySqlDateColumn'; readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; + readonly onUpdateNowFsp: TimestampFsp | undefined = this.config.onUpdateNowFsp; } diff --git a/drizzle-orm/src/mysql-core/columns/date.ts b/drizzle-orm/src/mysql-core/columns/date.ts index a40dfcaab7..75018a4887 100644 --- a/drizzle-orm/src/mysql-core/columns/date.ts +++ b/drizzle-orm/src/mysql-core/columns/date.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlDateBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -42,7 +41,6 @@ export class MySqlDate> extends MySqlC } export class MySqlDateStringBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'string date'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/mysql-core/columns/datetime.ts b/drizzle-orm/src/mysql-core/columns/datetime.ts index 0d04a6b010..e45f0de89e 100644 --- a/drizzle-orm/src/mysql-core/columns/datetime.ts +++ b/drizzle-orm/src/mysql-core/columns/datetime.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlDateTimeBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -56,7 +55,6 @@ export class MySqlDateTime> extends My } export class MySqlDateTimeStringBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'string datetime'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/mysql-core/columns/decimal.ts b/drizzle-orm/src/mysql-core/columns/decimal.ts index 60057c360d..64bedca703 100644 --- a/drizzle-orm/src/mysql-core/columns/decimal.ts +++ b/drizzle-orm/src/mysql-core/columns/decimal.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlDecimalBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'string unumeric' : 'string numeric'; data: string; driverParam: string; @@ -59,7 +58,6 @@ export class MySqlDecimal extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number unsigned' : 'number'; data: number; driverParam: string; @@ -115,7 +113,6 @@ export class MySqlDecimalNumber extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'bigint uint64' : 'bigint int64'; data: bigint; driverParam: string; diff --git a/drizzle-orm/src/mysql-core/columns/double.ts b/drizzle-orm/src/mysql-core/columns/double.ts index 6299c343ec..086d332545 100644 --- a/drizzle-orm/src/mysql-core/columns/double.ts +++ b/drizzle-orm/src/mysql-core/columns/double.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlDoubleBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number udouble' : 'number double'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/enum.ts b/drizzle-orm/src/mysql-core/columns/enum.ts index 5fd2ee2b5e..c215140cde 100644 --- a/drizzle-orm/src/mysql-core/columns/enum.ts +++ b/drizzle-orm/src/mysql-core/columns/enum.ts @@ -2,10 +2,9 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import type { NonArray, Writable } from '~/utils.ts'; -import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; +import { MySqlStringBaseColumn, MySqlStringColumnBaseBuilder } from './string.common.ts'; -export class MySqlEnumColumnBuilder extends MySqlColumnBuilder<{ - name: string; +export class MySqlEnumColumnBuilder extends MySqlStringColumnBaseBuilder<{ dataType: 'string enum'; data: TEnum[number]; driverParam: string; @@ -28,7 +27,7 @@ export class MySqlEnumColumnBuilder extends } export class MySqlEnumColumn> - extends MySqlColumn + extends MySqlStringBaseColumn { static override readonly [entityKind]: string = 'MySqlEnumColumn'; @@ -40,8 +39,7 @@ export class MySqlEnumColumn> } // enum as ts enum -export class MySqlEnumObjectColumnBuilder extends MySqlColumnBuilder<{ - name: string; +export class MySqlEnumObjectColumnBuilder extends MySqlStringColumnBaseBuilder<{ dataType: 'string enum'; data: TEnum[keyof TEnum]; driverParam: string; @@ -64,7 +62,7 @@ export class MySqlEnumObjectColumnBuilder extends MySqlCol } export class MySqlEnumObjectColumn> - extends MySqlColumn + extends MySqlStringBaseColumn { static override readonly [entityKind]: string = 'MySqlEnumObjectColumn'; diff --git a/drizzle-orm/src/mysql-core/columns/float.ts b/drizzle-orm/src/mysql-core/columns/float.ts index 738411cf54..3cdf32e6c0 100644 --- a/drizzle-orm/src/mysql-core/columns/float.ts +++ b/drizzle-orm/src/mysql-core/columns/float.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlFloatBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number ufloat' : 'number float'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/index.ts b/drizzle-orm/src/mysql-core/columns/index.ts index b51f0fac48..1c3ba71f37 100644 --- a/drizzle-orm/src/mysql-core/columns/index.ts +++ b/drizzle-orm/src/mysql-core/columns/index.ts @@ -1,5 +1,6 @@ export * from './bigint.ts'; export * from './binary.ts'; +export * from './blob.ts'; export * from './boolean.ts'; export * from './char.ts'; export * from './common.ts'; diff --git a/drizzle-orm/src/mysql-core/columns/int.ts b/drizzle-orm/src/mysql-core/columns/int.ts index f6c41e3a96..08141b291f 100644 --- a/drizzle-orm/src/mysql-core/columns/int.ts +++ b/drizzle-orm/src/mysql-core/columns/int.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlIntBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint32' : 'number int32'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/json.ts b/drizzle-orm/src/mysql-core/columns/json.ts index db55719feb..8a65a0eb6e 100644 --- a/drizzle-orm/src/mysql-core/columns/json.ts +++ b/drizzle-orm/src/mysql-core/columns/json.ts @@ -4,7 +4,6 @@ import type { MySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlJsonBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'object json'; data: unknown; driverParam: string; diff --git a/drizzle-orm/src/mysql-core/columns/mediumint.ts b/drizzle-orm/src/mysql-core/columns/mediumint.ts index 8b41020831..89c6f03f7c 100644 --- a/drizzle-orm/src/mysql-core/columns/mediumint.ts +++ b/drizzle-orm/src/mysql-core/columns/mediumint.ts @@ -6,7 +6,6 @@ import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } fro import type { MySqlIntConfig } from './int.ts'; export class MySqlMediumIntBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint24' : 'number int24'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/real.ts b/drizzle-orm/src/mysql-core/columns/real.ts index 58f39290ba..6cf2716e79 100644 --- a/drizzle-orm/src/mysql-core/columns/real.ts +++ b/drizzle-orm/src/mysql-core/columns/real.ts @@ -6,7 +6,6 @@ import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } fro export class MySqlRealBuilder extends MySqlColumnBuilderWithAutoIncrement< { - name: string; dataType: 'number double'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/serial.ts b/drizzle-orm/src/mysql-core/columns/serial.ts index af06577566..b7fc2b5126 100644 --- a/drizzle-orm/src/mysql-core/columns/serial.ts +++ b/drizzle-orm/src/mysql-core/columns/serial.ts @@ -4,7 +4,6 @@ import type { MySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export class MySqlSerialBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: 'number uint53'; data: number; driverParam: number; diff --git a/drizzle-orm/src/mysql-core/columns/smallint.ts b/drizzle-orm/src/mysql-core/columns/smallint.ts index 020aa93be9..8c2c6c956c 100644 --- a/drizzle-orm/src/mysql-core/columns/smallint.ts +++ b/drizzle-orm/src/mysql-core/columns/smallint.ts @@ -6,7 +6,6 @@ import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } fro import type { MySqlIntConfig } from './int.ts'; export class MySqlSmallIntBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint16' : 'number int16'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/string.common.ts b/drizzle-orm/src/mysql-core/columns/string.common.ts new file mode 100644 index 0000000000..d2d0774778 --- /dev/null +++ b/drizzle-orm/src/mysql-core/columns/string.common.ts @@ -0,0 +1,369 @@ +import type { ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnType } from '~/column-builder.ts'; +import type { ColumnBaseConfig } from '~/column.ts'; +import { entityKind } from '~/entity.ts'; +import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; + +export interface MySqlStringColumnBaseConfig { + charSet: CharSet; + collation: Collation; +} + +export abstract class MySqlStringColumnBaseBuilder< + T extends ColumnBuilderBaseConfig, + TRuntimeConfig extends object = object, + TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, +> extends MySqlColumnBuilder { + static override readonly [entityKind]: string = 'MySqlStringColumnBuilder'; + + charSet(charSet: CharSet): Omit { + this.config.charSet = charSet; + return this; + } + + collate(collation: Collation): Omit { + this.config.collation = collation; + return this; + } +} + +export abstract class MySqlStringBaseColumn< + T extends ColumnBaseConfig, + TRuntimeConfig extends object = object, +> extends MySqlColumn { + static override readonly [entityKind]: string = 'MySqlStringColumn'; + + readonly charSet: CharSet = this.config.charSet; + readonly collation: Collation = this.config.collation; +} + +type CharSet = + | 'ascii' + | 'big5' + | 'binary' + | 'cp1250' + | 'cp1251' + | 'cp1256' + | 'cp1257' + | 'cp850' + | 'cp852' + | 'cp866' + | 'cp932' + | 'dec8' + | 'eucjpms' + | 'euckr' + | 'gb18030' + | 'gb2312' + | 'gbk' + | 'geostd8' + | 'greek' + | 'hebrew' + | 'hp8' + | 'keybcs2' + | 'koi8r' + | 'koi8u' + | 'latin1' + | 'latin2' + | 'latin5' + | 'latin7' + | 'macce' + | 'macroman' + | 'sjis' + | 'swe7' + | 'tis620' + | 'ucs2' + | 'ujis' + | 'utf16' + | 'utf16le' + | 'utf32' + | 'utf8mb3' + | 'utf8mb4' + | (string & {}); + +type Collation = + | 'armscii8_bin' + | 'armscii8_general_ci' + | 'ascii_bin' + | 'ascii_general_ci' + | 'big5_bin' + | 'big5_chinese_ci' + | 'binary' + | 'cp1250_bin' + | 'cp1250_croatian_ci' + | 'cp1250_czech_cs' + | 'cp1250_general_ci' + | 'cp1250_polish_ci' + | 'cp1251_bin' + | 'cp1251_bulgarian_ci' + | 'cp1251_general_ci' + | 'cp1251_general_cs' + | 'cp1251_ukrainian_ci' + | 'cp1256_bin' + | 'cp1256_general_ci' + | 'cp1257_bin' + | 'cp1257_general_ci' + | 'cp1257_lithuanian_ci' + | 'cp850_bin' + | 'cp850_general_ci' + | 'cp852_bin' + | 'cp852_general_ci' + | 'cp866_bin' + | 'cp866_general_ci' + | 'cp932_bin' + | 'cp932_japanese_ci' + | 'dec8_bin' + | 'dec8_swedish_ci' + | 'eucjpms_bin' + | 'eucjpms_japanese_ci' + | 'euckr_bin' + | 'euckr_korean_ci' + | 'gb18030_bin' + | 'gb18030_chinese_ci' + | 'gb18030_unicode_520_ci' + | 'gb2312_bin' + | 'gb2312_chinese_ci' + | 'gbk_bin' + | 'gbk_chinese_ci' + | 'geostd8_bin' + | 'geostd8_general_ci' + | 'greek_bin' + | 'greek_general_ci' + | 'hebrew_bin' + | 'hebrew_general_ci' + | 'hp8_bin' + | 'hp8_english_ci' + | 'keybcs2_bin' + | 'keybcs2_general_ci' + | 'koi8r_bin' + | 'koi8r_general_ci' + | 'koi8u_bin' + | 'koi8u_general_ci' + | 'latin1_bin' + | 'latin1_danish_ci' + | 'latin1_general_ci' + | 'latin1_general_cs' + | 'latin1_german1_ci' + | 'latin1_german2_ci' + | 'latin1_spanish_ci' + | 'latin1_swedish_ci' + | 'latin2_bin' + | 'latin2_croatian_ci' + | 'latin2_czech_cs' + | 'latin2_general_ci' + | 'latin2_hungarian_ci' + | 'latin5_bin' + | 'latin5_turkish_ci' + | 'latin7_bin' + | 'latin7_estonian_cs' + | 'latin7_general_ci' + | 'latin7_general_cs' + | 'macce_bin' + | 'macce_general_ci' + | 'macroman_bin' + | 'macroman_general_ci' + | 'sjis_bin' + | 'sjis_japanese_ci' + | 'swe7_bin' + | 'swe7_swedish_ci' + | 'tis620_bin' + | 'tis620_thai_ci' + | 'ucs2_bin' + | 'ucs2_croatian_ci' + | 'ucs2_czech_ci' + | 'ucs2_danish_ci' + | 'ucs2_esperanto_ci' + | 'ucs2_estonian_ci' + | 'ucs2_general_ci' + | 'ucs2_general_mysql500_ci' + | 'ucs2_german2_ci' + | 'ucs2_hungarian_ci' + | 'ucs2_icelandic_ci' + | 'ucs2_latvian_ci' + | 'ucs2_lithuanian_ci' + | 'ucs2_persian_ci' + | 'ucs2_polish_ci' + | 'ucs2_romanian_ci' + | 'ucs2_roman_ci' + | 'ucs2_sinhala_ci' + | 'ucs2_slovak_ci' + | 'ucs2_slovenian_ci' + | 'ucs2_spanish2_ci' + | 'ucs2_spanish_ci' + | 'ucs2_swedish_ci' + | 'ucs2_turkish_ci' + | 'ucs2_unicode_520_ci' + | 'ucs2_unicode_ci' + | 'ucs2_vietnamese_ci' + | 'ujis_bin' + | 'ujis_japanese_ci' + | 'utf16_bin' + | 'utf16_croatian_ci' + | 'utf16_czech_ci' + | 'utf16_danish_ci' + | 'utf16_esperanto_ci' + | 'utf16_estonian_ci' + | 'utf16_general_ci' + | 'utf16_german2_ci' + | 'utf16_hungarian_ci' + | 'utf16_icelandic_ci' + | 'utf16_latvian_ci' + | 'utf16_lithuanian_ci' + | 'utf16_persian_ci' + | 'utf16_polish_ci' + | 'utf16_romanian_ci' + | 'utf16_roman_ci' + | 'utf16_sinhala_ci' + | 'utf16_slovak_ci' + | 'utf16_slovenian_ci' + | 'utf16_spanish2_ci' + | 'utf16_spanish_ci' + | 'utf16_swedish_ci' + | 'utf16_turkish_ci' + | 'utf16_unicode_520_ci' + | 'utf16_unicode_ci' + | 'utf16_vietnamese_ci' + | 'utf16le_bin' + | 'utf16le_general_ci' + | 'utf32_bin' + | 'utf32_croatian_ci' + | 'utf32_czech_ci' + | 'utf32_danish_ci' + | 'utf32_esperanto_ci' + | 'utf32_estonian_ci' + | 'utf32_general_ci' + | 'utf32_german2_ci' + | 'utf32_hungarian_ci' + | 'utf32_icelandic_ci' + | 'utf32_latvian_ci' + | 'utf32_lithuanian_ci' + | 'utf32_persian_ci' + | 'utf32_polish_ci' + | 'utf32_romanian_ci' + | 'utf32_roman_ci' + | 'utf32_sinhala_ci' + | 'utf32_slovak_ci' + | 'utf32_slovenian_ci' + | 'utf32_spanish2_ci' + | 'utf32_spanish_ci' + | 'utf32_swedish_ci' + | 'utf32_turkish_ci' + | 'utf32_unicode_520_ci' + | 'utf32_unicode_ci' + | 'utf32_vietnamese_ci' + | 'utf8mb3_bin' + | 'utf8mb3_croatian_ci' + | 'utf8mb3_czech_ci' + | 'utf8mb3_danish_ci' + | 'utf8mb3_esperanto_ci' + | 'utf8mb3_estonian_ci' + | 'utf8mb3_general_ci' + | 'utf8mb3_general_mysql500_ci' + | 'utf8mb3_german2_ci' + | 'utf8mb3_hungarian_ci' + | 'utf8mb3_icelandic_ci' + | 'utf8mb3_latvian_ci' + | 'utf8mb3_lithuanian_ci' + | 'utf8mb3_persian_ci' + | 'utf8mb3_polish_ci' + | 'utf8mb3_romanian_ci' + | 'utf8mb3_roman_ci' + | 'utf8mb3_sinhala_ci' + | 'utf8mb3_slovak_ci' + | 'utf8mb3_slovenian_ci' + | 'utf8mb3_spanish2_ci' + | 'utf8mb3_spanish_ci' + | 'utf8mb3_swedish_ci' + | 'utf8mb3_tolower_ci' + | 'utf8mb3_turkish_ci' + | 'utf8mb3_unicode_520_ci' + | 'utf8mb3_unicode_ci' + | 'utf8mb3_vietnamese_ci' + | 'utf8mb4_0900_ai_ci' + | 'utf8mb4_0900_as_ci' + | 'utf8mb4_0900_as_cs' + | 'utf8mb4_0900_bin' + | 'utf8mb4_bg_0900_ai_ci' + | 'utf8mb4_bg_0900_as_cs' + | 'utf8mb4_bin' + | 'utf8mb4_bs_0900_ai_ci' + | 'utf8mb4_bs_0900_as_cs' + | 'utf8mb4_croatian_ci' + | 'utf8mb4_cs_0900_ai_ci' + | 'utf8mb4_cs_0900_as_cs' + | 'utf8mb4_czech_ci' + | 'utf8mb4_danish_ci' + | 'utf8mb4_da_0900_ai_ci' + | 'utf8mb4_da_0900_as_cs' + | 'utf8mb4_de_pb_0900_ai_ci' + | 'utf8mb4_de_pb_0900_as_cs' + | 'utf8mb4_eo_0900_ai_ci' + | 'utf8mb4_eo_0900_as_cs' + | 'utf8mb4_esperanto_ci' + | 'utf8mb4_estonian_ci' + | 'utf8mb4_es_0900_ai_ci' + | 'utf8mb4_es_0900_as_cs' + | 'utf8mb4_es_trad_0900_ai_ci' + | 'utf8mb4_es_trad_0900_as_cs' + | 'utf8mb4_et_0900_ai_ci' + | 'utf8mb4_et_0900_as_cs' + | 'utf8mb4_general_ci' + | 'utf8mb4_german2_ci' + | 'utf8mb4_gl_0900_ai_ci' + | 'utf8mb4_gl_0900_as_cs' + | 'utf8mb4_hr_0900_ai_ci' + | 'utf8mb4_hr_0900_as_cs' + | 'utf8mb4_hungarian_ci' + | 'utf8mb4_hu_0900_ai_ci' + | 'utf8mb4_hu_0900_as_cs' + | 'utf8mb4_icelandic_ci' + | 'utf8mb4_is_0900_ai_ci' + | 'utf8mb4_is_0900_as_cs' + | 'utf8mb4_ja_0900_as_cs' + | 'utf8mb4_ja_0900_as_cs_ks' + | 'utf8mb4_latvian_ci' + | 'utf8mb4_la_0900_ai_ci' + | 'utf8mb4_la_0900_as_cs' + | 'utf8mb4_lithuanian_ci' + | 'utf8mb4_lt_0900_ai_ci' + | 'utf8mb4_lt_0900_as_cs' + | 'utf8mb4_lv_0900_ai_ci' + | 'utf8mb4_lv_0900_as_cs' + | 'utf8mb4_mn_cyrl_0900_ai_ci' + | 'utf8mb4_mn_cyrl_0900_as_cs' + | 'utf8mb4_nb_0900_ai_ci' + | 'utf8mb4_nb_0900_as_cs' + | 'utf8mb4_nn_0900_ai_ci' + | 'utf8mb4_nn_0900_as_cs' + | 'utf8mb4_persian_ci' + | 'utf8mb4_pl_0900_ai_ci' + | 'utf8mb4_pl_0900_as_cs' + | 'utf8mb4_polish_ci' + | 'utf8mb4_romanian_ci' + | 'utf8mb4_roman_ci' + | 'utf8mb4_ro_0900_ai_ci' + | 'utf8mb4_ro_0900_as_cs' + | 'utf8mb4_ru_0900_ai_ci' + | 'utf8mb4_ru_0900_as_cs' + | 'utf8mb4_sinhala_ci' + | 'utf8mb4_sk_0900_ai_ci' + | 'utf8mb4_sk_0900_as_cs' + | 'utf8mb4_slovak_ci' + | 'utf8mb4_slovenian_ci' + | 'utf8mb4_sl_0900_ai_ci' + | 'utf8mb4_sl_0900_as_cs' + | 'utf8mb4_spanish2_ci' + | 'utf8mb4_spanish_ci' + | 'utf8mb4_sr_latn_0900_ai_ci' + | 'utf8mb4_sr_latn_0900_as_cs' + | 'utf8mb4_sv_0900_ai_ci' + | 'utf8mb4_sv_0900_as_cs' + | 'utf8mb4_swedish_ci' + | 'utf8mb4_tr_0900_ai_ci' + | 'utf8mb4_tr_0900_as_cs' + | 'utf8mb4_turkish_ci' + | 'utf8mb4_unicode_520_ci' + | 'utf8mb4_unicode_ci' + | 'utf8mb4_vietnamese_ci' + | 'utf8mb4_vi_0900_ai_ci' + | 'utf8mb4_vi_0900_as_cs' + | 'utf8mb4_zh_0900_as_cs' + | (string & {}); diff --git a/drizzle-orm/src/mysql-core/columns/text.ts b/drizzle-orm/src/mysql-core/columns/text.ts index b3d0a3a86d..d755d4dfa6 100644 --- a/drizzle-orm/src/mysql-core/columns/text.ts +++ b/drizzle-orm/src/mysql-core/columns/text.ts @@ -2,13 +2,12 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; -import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; +import { MySqlStringBaseColumn, MySqlStringColumnBaseBuilder } from './string.common.ts'; export type MySqlTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'longtext'; -export class MySqlTextBuilder extends MySqlColumnBuilder< +export class MySqlTextBuilder extends MySqlStringColumnBaseBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: string; @@ -49,7 +48,7 @@ export class MySqlTextBuilder extends MySql } export class MySqlText> - extends MySqlColumn + extends MySqlStringBaseColumn { static override readonly [entityKind]: string = 'MySqlText'; diff --git a/drizzle-orm/src/mysql-core/columns/time.ts b/drizzle-orm/src/mysql-core/columns/time.ts index cd9c57edcd..0d487f0035 100644 --- a/drizzle-orm/src/mysql-core/columns/time.ts +++ b/drizzle-orm/src/mysql-core/columns/time.ts @@ -6,7 +6,6 @@ import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlTimeBuilder extends MySqlColumnBuilder< { - name: string; dataType: 'string time'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/mysql-core/columns/timestamp.ts b/drizzle-orm/src/mysql-core/columns/timestamp.ts index fa602050cd..7fdfcbdc94 100644 --- a/drizzle-orm/src/mysql-core/columns/timestamp.ts +++ b/drizzle-orm/src/mysql-core/columns/timestamp.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlDateBaseColumn, MySqlDateColumnBaseBuilder } from './date.common.ts'; export class MySqlTimestampBuilder extends MySqlDateColumnBaseBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -50,7 +49,6 @@ export class MySqlTimestamp> } export class MySqlTimestampStringBuilder extends MySqlDateColumnBaseBuilder<{ - name: string; dataType: 'string timestamp'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/mysql-core/columns/tinyint.ts b/drizzle-orm/src/mysql-core/columns/tinyint.ts index 63082c6703..e5cb160965 100644 --- a/drizzle-orm/src/mysql-core/columns/tinyint.ts +++ b/drizzle-orm/src/mysql-core/columns/tinyint.ts @@ -6,7 +6,6 @@ import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } fro import type { MySqlIntConfig } from './int.ts'; export class MySqlTinyIntBuilder extends MySqlColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint8' : 'number int8'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/mysql-core/columns/varbinary.ts b/drizzle-orm/src/mysql-core/columns/varbinary.ts index fa87328b5d..e4a0d13911 100644 --- a/drizzle-orm/src/mysql-core/columns/varbinary.ts +++ b/drizzle-orm/src/mysql-core/columns/varbinary.ts @@ -5,7 +5,6 @@ import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlVarBinaryBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'string binary'; data: string; driverParam: string; diff --git a/drizzle-orm/src/mysql-core/columns/varchar.ts b/drizzle-orm/src/mysql-core/columns/varchar.ts index 78a147ad40..5a55243511 100644 --- a/drizzle-orm/src/mysql-core/columns/varchar.ts +++ b/drizzle-orm/src/mysql-core/columns/varchar.ts @@ -2,13 +2,12 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; -import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; +import { MySqlStringBaseColumn, MySqlStringColumnBaseBuilder } from './string.common.ts'; export class MySqlVarCharBuilder< TEnum extends [string, ...string[]], -> extends MySqlColumnBuilder<{ - name: string; - dataType: Equal extends true ? 'string text' : 'string enum'; +> extends MySqlStringColumnBaseBuilder<{ + dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; @@ -17,7 +16,7 @@ export class MySqlVarCharBuilder< /** @internal */ constructor(name: string, config: MySqlVarCharConfig) { - super(name, config.enum?.length ? 'string enum' : 'string text', 'MySqlVarChar'); + super(name, config.enum?.length ? 'string enum' : 'string', 'MySqlVarChar'); this.config.length = config.length; this.config.enum = config.enum; } @@ -33,7 +32,7 @@ export class MySqlVarCharBuilder< export class MySqlVarChar< T extends ColumnBaseConfig<'string' | 'string enum'> & { length: number }, -> extends MySqlColumn> { +> extends MySqlStringBaseColumn> { static override readonly [entityKind]: string = 'MySqlVarChar'; override readonly enumValues = this.config.enum; diff --git a/drizzle-orm/src/mysql-core/columns/year.ts b/drizzle-orm/src/mysql-core/columns/year.ts index 2a309c1a4a..b11ffe9a56 100644 --- a/drizzle-orm/src/mysql-core/columns/year.ts +++ b/drizzle-orm/src/mysql-core/columns/year.ts @@ -4,7 +4,6 @@ import type { MySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export class MySqlYearBuilder extends MySqlColumnBuilder<{ - name: string; dataType: 'number year'; data: number; driverParam: number; diff --git a/drizzle-orm/src/mysql-core/dialect.ts b/drizzle-orm/src/mysql-core/dialect.ts index 339579ec84..c263ae9207 100644 --- a/drizzle-orm/src/mysql-core/dialect.ts +++ b/drizzle-orm/src/mysql-core/dialect.ts @@ -1,5 +1,11 @@ import * as V1 from '~/_relations.ts'; -import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; @@ -49,6 +55,7 @@ import type { MySqlView } from './view.ts'; export interface MySqlDialectConfig { casing?: Casing; + escapeParam?: (num: number) => string; } export class MySqlDialect { @@ -59,6 +66,10 @@ export class MySqlDialect { constructor(config?: MySqlDialectConfig) { this.casing = new CasingCache(config?.casing); + + if (config?.escapeParam) { + this.escapeParam = config.escapeParam; + } } async migrate( @@ -250,9 +261,13 @@ export class MySqlDialect { } } else if (is(field, Column)) { if (isSingleTable) { - chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); } else { - chunk.push(field); + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; diff --git a/drizzle-orm/src/mysql-core/foreign-keys.ts b/drizzle-orm/src/mysql-core/foreign-keys.ts index c8c34d6fd4..1a78ddfdf3 100644 --- a/drizzle-orm/src/mysql-core/foreign-keys.ts +++ b/drizzle-orm/src/mysql-core/foreign-keys.ts @@ -88,6 +88,10 @@ export class ForeignKey { ]; return name ?? `${chunks.join('_')}_fk`; } + + isNameExplicit(): boolean { + return this.reference().name ? true : false; + } } type ColumnsWithTable< diff --git a/drizzle-orm/src/mysql-core/indexes.ts b/drizzle-orm/src/mysql-core/indexes.ts index 39a803df74..000542be70 100644 --- a/drizzle-orm/src/mysql-core/indexes.ts +++ b/drizzle-orm/src/mysql-core/indexes.ts @@ -87,9 +87,11 @@ export class Index { static readonly [entityKind]: string = 'MySqlIndex'; readonly config: IndexConfig & { table: MySqlTable }; + readonly isNameExplicit: boolean; constructor(config: IndexConfig, table: MySqlTable) { this.config = { ...config, table }; + this.isNameExplicit = !!config.name; } } diff --git a/drizzle-orm/src/mysql-core/primary-keys.ts b/drizzle-orm/src/mysql-core/primary-keys.ts index 014cbd8c0b..a3e60993cd 100644 --- a/drizzle-orm/src/mysql-core/primary-keys.ts +++ b/drizzle-orm/src/mysql-core/primary-keys.ts @@ -1,12 +1,12 @@ import { entityKind } from '~/entity.ts'; import type { AnyMySqlColumn, MySqlColumn } from './columns/index.ts'; -import { MySqlTable } from './table.ts'; +import type { MySqlTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnyMySqlColumn<{ tableName: TTableName }>, TColumns extends AnyMySqlColumn<{ tableName: TTableName }>[], ->(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; +>(config: { columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns @@ -17,7 +17,7 @@ export function primaryKey< >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { - return new PrimaryKeyBuilder(config[0].columns, config[0].name); + return new PrimaryKeyBuilder(config[0].columns); } return new PrimaryKeyBuilder(config); } @@ -28,20 +28,15 @@ export class PrimaryKeyBuilder { /** @internal */ columns: MySqlColumn[]; - /** @internal */ - name?: string; - constructor( columns: MySqlColumn[], - name?: string, ) { this.columns = columns; - this.name = name; } /** @internal */ build(table: MySqlTable): PrimaryKey { - return new PrimaryKey(table, this.columns, this.name); + return new PrimaryKey(table, this.columns); } } @@ -49,15 +44,8 @@ export class PrimaryKey { static readonly [entityKind]: string = 'MySqlPrimaryKey'; readonly columns: MySqlColumn[]; - readonly name?: string; - constructor(readonly table: MySqlTable, columns: MySqlColumn[], name?: string) { + constructor(readonly table: MySqlTable, columns: MySqlColumn[]) { this.columns = columns; - this.name = name; - } - - getName(): string { - return this.name - ?? `${this.table[MySqlTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } diff --git a/drizzle-orm/src/mysql-core/query-builders/count.ts b/drizzle-orm/src/mysql-core/query-builders/count.ts index fd60c4bef3..545cc10351 100644 --- a/drizzle-orm/src/mysql-core/query-builders/count.ts +++ b/drizzle-orm/src/mysql-core/query-builders/count.ts @@ -9,7 +9,7 @@ export class MySqlCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static override readonly [entityKind] = 'MySqlCountBuilder'; + static override readonly [entityKind]: string = 'MySqlCountBuilder'; [Symbol.toStringTag] = 'MySqlCountBuilder'; private session: TSession; diff --git a/drizzle-orm/src/mysql-core/query-builders/insert.ts b/drizzle-orm/src/mysql-core/query-builders/insert.ts index 5c45342b6f..f41fc40f5e 100644 --- a/drizzle-orm/src/mysql-core/query-builders/insert.ts +++ b/drizzle-orm/src/mysql-core/query-builders/insert.ts @@ -38,7 +38,7 @@ export type AnyMySqlInsertConfig = MySqlInsertConfig; export type MySqlInsertValue< TTable extends MySqlTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel]: TModel[Key] | SQL | Placeholder; @@ -47,7 +47,7 @@ export type MySqlInsertValue< export type MySqlInsertSelectQueryBuilder< TTable extends MySqlTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = TypedQueryBuilder< { [K in keyof TModel]: AnyMySqlColumn | SQL | SQL.Aliased | TModel[K] } >; diff --git a/drizzle-orm/src/mysql-core/query-builders/update.ts b/drizzle-orm/src/mysql-core/query-builders/update.ts index 0f043618fa..67e3e45b2a 100644 --- a/drizzle-orm/src/mysql-core/query-builders/update.ts +++ b/drizzle-orm/src/mysql-core/query-builders/update.ts @@ -34,7 +34,7 @@ export interface MySqlUpdateConfig { export type MySqlUpdateSetSource< TTable extends MySqlTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel & string]?: diff --git a/drizzle-orm/src/mysql-core/unique-constraint.ts b/drizzle-orm/src/mysql-core/unique-constraint.ts index 01a3c36c28..bd76768421 100644 --- a/drizzle-orm/src/mysql-core/unique-constraint.ts +++ b/drizzle-orm/src/mysql-core/unique-constraint.ts @@ -51,11 +51,13 @@ export class UniqueConstraint { static readonly [entityKind]: string = 'MySqlUniqueConstraint'; readonly columns: MySqlColumn[]; - readonly name?: string; + readonly name: string; + readonly isNameExplicit: boolean; readonly nullsNotDistinct: boolean = false; constructor(readonly table: MySqlTable, columns: MySqlColumn[], name?: string) { this.columns = columns; + this.isNameExplicit = !!name; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); } diff --git a/drizzle-orm/src/mysql-proxy/driver.ts b/drizzle-orm/src/mysql-proxy/driver.ts index 56f1fdd7e6..761e267d9b 100644 --- a/drizzle-orm/src/mysql-proxy/driver.ts +++ b/drizzle-orm/src/mysql-proxy/driver.ts @@ -26,8 +26,9 @@ export function drizzle< >( callback: RemoteCallback, config: DrizzleConfig = {}, + _dialect: () => MySqlDialect = () => new MySqlDialect({ casing: config.casing }), ): MySqlRemoteDatabase { - const dialect = new MySqlDialect({ casing: config.casing }); + const dialect = _dialect(); let logger; if (config.logger === true) { logger = new DefaultLogger(); diff --git a/drizzle-orm/src/mysql2/driver.ts b/drizzle-orm/src/mysql2/driver.ts index 4c917e016c..a2922a122e 100644 --- a/drizzle-orm/src/mysql2/driver.ts +++ b/drizzle-orm/src/mysql2/driver.ts @@ -9,7 +9,7 @@ import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { Mode } from '~/mysql-core/session.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { DrizzleError } from '../errors.ts'; import type { MySql2Client, MySql2PreparedQueryHKT, MySql2QueryResultHKT } from './session.ts'; import { MySql2Session } from './session.ts'; @@ -135,9 +135,9 @@ export function drizzle< TClient extends AnyMySql2Connection = CallbackPool, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, MySql2DrizzleConfig, ] | [ ( @@ -161,25 +161,21 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: PoolOptions | string; client?: TClient } - & MySql2DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: PoolOptions | string; client?: TClient } + & MySql2DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? createPool({ - uri: connection, - supportBigNumbers: true, - }) - : createPool(connection!); - const db = construct(instance, drizzleConfig); - - return db as any; - } + const instance = typeof connection === 'string' + ? createPool({ + uri: connection, + supportBigNumbers: true, + }) + : createPool(connection!); + const db = construct(instance, drizzleConfig); - return construct(params[0] as TClient, params[1] as MySql2DrizzleConfig | undefined) as any; + return db as any; } export namespace drizzle { diff --git a/drizzle-orm/src/mysql2/session.ts b/drizzle-orm/src/mysql2/session.ts index 5ffa3ef646..5a17273ead 100644 --- a/drizzle-orm/src/mysql2/session.ts +++ b/drizzle-orm/src/mysql2/session.ts @@ -192,7 +192,7 @@ export class MySql2PreparedQuery stream.once('data', resolve))]); if (row === undefined || (Array.isArray(row) && row.length === 0)) { break; - } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof + } else if (row instanceof Error) { // oxlint-disable-line drizzle-internal/no-instanceof throw row; } else { if (hasRowsMapper) { diff --git a/drizzle-orm/src/neon-http/driver.ts b/drizzle-orm/src/neon-http/driver.ts index f9bb97f455..ca418d368b 100644 --- a/drizzle-orm/src/neon-http/driver.ts +++ b/drizzle-orm/src/neon-http/driver.ts @@ -9,7 +9,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { type NeonHttpClient, type NeonHttpQueryResultHKT, NeonHttpSession } from './session.ts'; export interface NeonDriverOptions { @@ -185,9 +185,9 @@ export function drizzle< TClient extends NeonQueryFunction = NeonQueryFunction, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -207,34 +207,30 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { - connection?: - | ({ - connectionString: string; - } & HTTPTransactionOptions) - | string; - client?: TClient; - } - & DrizzleConfig; - - if (client) return construct(client, drizzleConfig); - - if (typeof connection === 'object') { - const { connectionString, ...options } = connection; + const { connection, client, ...drizzleConfig } = params[0] as + & { + connection?: + | ({ + connectionString: string; + } & HTTPTransactionOptions) + | string; + client?: TClient; + } + & DrizzleConfig; - const instance = neon(connectionString, options); + if (client) return construct(client, drizzleConfig); - return construct(instance, drizzleConfig) as any; - } + if (typeof connection === 'object') { + const { connectionString, ...options } = connection; - const instance = neon(connection!); + const instance = neon(connectionString, options); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + const instance = neon(connection!); + + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/neon-http/session.ts b/drizzle-orm/src/neon-http/session.ts index 109905aef7..a48c691afc 100644 --- a/drizzle-orm/src/neon-http/session.ts +++ b/drizzle-orm/src/neon-http/session.ts @@ -10,7 +10,7 @@ import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; -import { PgPreparedQuery as PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; +import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { AnyRelations } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { fillPlaceholders, type Query, type SQL } from '~/sql/sql.ts'; diff --git a/drizzle-orm/src/neon-serverless/driver.ts b/drizzle-orm/src/neon-serverless/driver.ts index a43b09060a..94f0d33961 100644 --- a/drizzle-orm/src/neon-serverless/driver.ts +++ b/drizzle-orm/src/neon-serverless/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { NeonClient, NeonQueryResultHKT } from './session.ts'; import { NeonSession } from './session.ts'; @@ -96,9 +96,9 @@ export function drizzle< TClient extends NeonClient = Pool, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -124,29 +124,25 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ws, ...drizzleConfig } = params[0] as { - connection?: PoolConfig | string; - ws?: any; - client?: TClient; - } & DrizzleConfig; + const { connection, client, ws, ...drizzleConfig } = params[0] as { + connection?: PoolConfig | string; + ws?: any; + client?: TClient; + } & DrizzleConfig; - if (ws) { - neonConfig.webSocketConstructor = ws; - } - - if (client) return construct(client, drizzleConfig); + if (ws) { + neonConfig.webSocketConstructor = ws; + } - const instance = typeof connection === 'string' - ? new Pool({ - connectionString: connection, - }) - : new Pool(connection); + if (client) return construct(client, drizzleConfig); - return construct(instance, drizzleConfig) as any; - } + const instance = typeof connection === 'string' + ? new Pool({ + connectionString: connection, + }) + : new Pool(connection); - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/neon-serverless/session.ts b/drizzle-orm/src/neon-serverless/session.ts index 07dda6252f..21fff061bd 100644 --- a/drizzle-orm/src/neon-serverless/session.ts +++ b/drizzle-orm/src/neon-serverless/session.ts @@ -305,7 +305,7 @@ export class NeonSession< transaction: (tx: NeonTransaction) => Promise, config: PgTransactionConfig = {}, ): Promise { - const session = this.client instanceof Pool // eslint-disable-line no-instanceof/no-instanceof + const session = this.client instanceof Pool // oxlint-disable-line drizzle-internal/no-instanceof ? new NeonSession(await this.client.connect(), this.dialect, this.relations, this.schema, this.options) : this; const tx = new NeonTransaction( @@ -323,7 +323,7 @@ export class NeonSession< await tx.execute(sql`rollback`); throw error; } finally { - if (this.client instanceof Pool) { // eslint-disable-line no-instanceof/no-instanceof + if (this.client instanceof Pool) { // oxlint-disable-line drizzle-internal/no-instanceof (session.client as PoolClient).release(); } } diff --git a/drizzle-orm/src/node-mssql/driver.ts b/drizzle-orm/src/node-mssql/driver.ts new file mode 100644 index 0000000000..9690d26f2b --- /dev/null +++ b/drizzle-orm/src/node-mssql/driver.ts @@ -0,0 +1,165 @@ +import type mssql from 'mssql'; +import * as V1 from '~/_relations.ts'; +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { MsSqlDatabase } from '~/mssql-core/db.ts'; +import { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { DrizzleConfig, Equal } from '~/utils.ts'; +import { AutoPool } from './pool.ts'; +import type { NodeMsSqlClient, NodeMsSqlPreparedQueryHKT, NodeMsSqlQueryResultHKT } from './session.ts'; +import { NodeMsSqlSession } from './session.ts'; + +export interface MsSqlDriverOptions { + logger?: Logger; +} + +export class NodeMsSqlDriver { + static readonly [entityKind]: string = 'NodeMsSqlDriver'; + + constructor( + private client: NodeMsSqlClient, + private dialect: MsSqlDialect, + private options: MsSqlDriverOptions = {}, + ) { + } + + createSession( + schema: V1.RelationalSchemaConfig | undefined, + ): NodeMsSqlSession, V1.TablesRelationalConfig> { + return new NodeMsSqlSession(this.client, this.dialect, schema, { logger: this.options.logger }); + } +} + +export { MsSqlDatabase } from '~/mssql-core/db.ts'; + +export type NodeMsSqlDatabase< + TSchema extends Record = Record, +> = MsSqlDatabase; + +export type NodeMsSqlDrizzleConfig = Record> = + & Omit, 'schema'> + & ({ schema: TSchema } | { schema?: undefined }); + +function construct< + TSchema extends Record = Record, + TClient extends NodeMsSqlClient = NodeMsSqlClient, +>( + client: TClient, + config: DrizzleConfig = {}, +): NodeMsSqlDatabase & { + $client: Equal extends true ? AutoPool : TClient; +} { + const dialect = new MsSqlDialect({ casing: config.casing }); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + if (isCallbackClient(client)) { + client = client.promise() as any; + } + + let schema: V1.RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = V1.extractTablesRelationalConfig( + config.schema, + V1.createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const driver = new NodeMsSqlDriver(client as NodeMsSqlClient, dialect, { logger }); + const session = driver.createSession(schema); + const db = new MsSqlDatabase(dialect, session, schema) as NodeMsSqlDatabase; + ( db).$client = client; + + return db as any; +} + +export function getMsSqlConnectionParams(connectionString: string): mssql.config | string { + try { + const url = new URL(connectionString); + return { + user: url.username, + password: url.password, + server: url.hostname, + port: Number.parseInt(url.port, 10), + database: url.pathname.replace(/^\//, ''), + options: { + encrypt: url.searchParams.get('encrypt') === 'true', + trustServerCertificate: url.searchParams.get('trustServerCertificate') === 'true', + }, + }; + } catch { + return connectionString; + } +} + +export function drizzle< + TSchema extends Record = Record, + TClient extends NodeMsSqlClient = AutoPool, +>( + ...params: + | [ + string, + ] + | [ + string, + DrizzleConfig, + ] + | [ + ( + & DrizzleConfig + & ({ + connection: string; + } | { + client: TClient; + }) + ), + ] +): NodeMsSqlDatabase & { + $client: Equal extends true ? AutoPool : TClient; +} { + if (typeof params[0] === 'string') { + const instance = new AutoPool(getMsSqlConnectionParams(params[0])); + + return construct(instance, params[1] as DrizzleConfig | undefined) as any; + } + + const { connection, client, ...drizzleConfig } = params[0] as ( + & ({ connection?: mssql.config | string; client?: TClient }) + & DrizzleConfig + ); + + if (client) return construct(client, drizzleConfig); + + const instance = typeof connection === 'string' + ? new AutoPool(getMsSqlConnectionParams(connection)) + : new AutoPool(connection!); + + return construct(instance, drizzleConfig) as any; +} + +interface CallbackClient { + promise(): NodeMsSqlClient; +} + +function isCallbackClient(client: any): client is CallbackClient { + return typeof client.promise === 'function'; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): NodeMsSqlDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({} as any, config) as any; + } +} diff --git a/drizzle-orm/src/node-mssql/index.ts b/drizzle-orm/src/node-mssql/index.ts new file mode 100644 index 0000000000..b1b6a52e71 --- /dev/null +++ b/drizzle-orm/src/node-mssql/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/node-mssql/migrator.ts b/drizzle-orm/src/node-mssql/migrator.ts new file mode 100644 index 0000000000..51db049e8e --- /dev/null +++ b/drizzle-orm/src/node-mssql/migrator.ts @@ -0,0 +1,11 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import type { NodeMsSqlDatabase } from './driver.ts'; + +export async function migrate>( + db: NodeMsSqlDatabase, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + return await db.dialect.migrate(migrations, db.session, config); +} diff --git a/drizzle-orm/src/node-mssql/pool.ts b/drizzle-orm/src/node-mssql/pool.ts new file mode 100644 index 0000000000..f269085b22 --- /dev/null +++ b/drizzle-orm/src/node-mssql/pool.ts @@ -0,0 +1,20 @@ +import mssql from 'mssql'; +import { entityKind } from '~/entity.ts'; + +export class AutoPool { + static readonly [entityKind]: string = 'AutoPool'; + + private pool: mssql.ConnectionPool; + + constructor(config: string | mssql.config) { + this.pool = new mssql.ConnectionPool(config as any); + } + + async $instance() { + await this.pool.connect().catch((err) => { + console.error('❌ AutoPool failed to connect:', err); + throw err; + }); + return this.pool; + } +} diff --git a/drizzle-orm/src/node-mssql/session.ts b/drizzle-orm/src/node-mssql/session.ts new file mode 100644 index 0000000000..e8457709ec --- /dev/null +++ b/drizzle-orm/src/node-mssql/session.ts @@ -0,0 +1,328 @@ +import type { ConnectionPool, IResult, Request } from 'mssql'; +import mssql from 'mssql'; +import { once } from 'node:events'; +import type * as V1 from '~/_relations.ts'; +import { entityKind, is } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { MsSqlDialect } from '~/mssql-core/dialect.ts'; +import type { SelectedFieldsOrdered } from '~/mssql-core/query-builders/select.types.ts'; +import { + MsSqlSession, + MsSqlTransaction, + type MsSqlTransactionConfig, + PreparedQuery, + type PreparedQueryConfig, + type PreparedQueryHKT, + type PreparedQueryKind, + type QueryResultHKT, +} from '~/mssql-core/session.ts'; +import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; +import { AutoPool } from './pool.ts'; + +export type NodeMsSqlClient = Pick | AutoPool; + +export type MsSqlQueryResult = IResult; + +export class NodeMsSqlPreparedQuery< + T extends PreparedQueryConfig, +> extends PreparedQuery { + static override readonly [entityKind]: string = 'NodeMsSqlPreparedQuery'; + + private rawQuery: { + sql: string; + parameters: unknown[]; + }; + + constructor( + private client: NodeMsSqlClient, + queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + ) { + super(); + this.rawQuery = { + sql: queryString, + parameters: params, + }; + } + + async execute( + placeholderValues: Record = {}, + ): Promise { + const params = fillPlaceholders(this.params, placeholderValues); + + this.logger.logQuery(this.rawQuery.sql, params); + + const { + fields, + client, + rawQuery, + joinsNotNullableMap, + customResultMapper, + } = this; + let queryClient = client as ConnectionPool; + if (is(client, AutoPool)) { + queryClient = await client.$instance(); + } + const request = queryClient.request() as Request & { arrayRowMode: boolean }; + for (const [index, param] of params.entries()) { + request.input(`par${index}`, param); + } + + if (!fields && !customResultMapper) { + return request.query(rawQuery.sql) as Promise; + } + + request.arrayRowMode = true; + const rows = await request.query(rawQuery.sql); + + if (customResultMapper) { + return customResultMapper(rows.recordset); + } + + return rows.recordset.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + } + + async *iterator( + placeholderValues: Record = {}, + ): AsyncGenerator< + T['execute'] extends any[] ? T['execute'][number] : T['execute'] + > { + const params = fillPlaceholders(this.params, placeholderValues); + + const { + fields, + rawQuery, + joinsNotNullableMap, + client, + customResultMapper, + } = this; + let queryClient = client as ConnectionPool; + if (is(client, AutoPool)) { + queryClient = await client.$instance(); + } + const request = queryClient.request() as Request & { arrayRowMode: boolean }; + request.stream = true; + const hasRowsMapper = Boolean(fields || customResultMapper); + + if (hasRowsMapper) { + request.arrayRowMode = true; + } + + for (const [index, param] of params.entries()) { + request.input(`par${index}`, param); + } + + const stream = request.toReadableStream(); + + request.query(rawQuery.sql); + + function dataListener() { + stream.pause(); + } + + stream.on('data', dataListener); + + try { + const onEnd = once(stream, 'end'); + const onError = once(stream, 'error'); + + while (true) { + stream.resume(); + const row = await Promise.race([ + onEnd, + onError, + new Promise((resolve) => stream.once('data', resolve)), + ]); + if (row === undefined || (Array.isArray(row) && row.length === 0)) { + break; + // oxlint-disable-next-line drizzle-internal/no-instanceof + } else if (row instanceof Error) { + throw row; + } else { + if (hasRowsMapper) { + if (customResultMapper) { + const mappedRow = customResultMapper([row as unknown[]]); + yield Array.isArray(mappedRow) ? mappedRow[0] : mappedRow; + } else { + yield mapResultRow( + fields!, + row as unknown[], + joinsNotNullableMap, + ); + } + } else { + yield row as T['execute']; + } + } + } + } finally { + stream.off('data', dataListener); + request.cancel(); + } + } +} + +export interface NodeMsSqlSessionOptions { + logger?: Logger; +} + +export class NodeMsSqlSession< + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, +> extends MsSqlSession< + NodeMsSqlQueryResultHKT, + NodeMsSqlPreparedQueryHKT, + TFullSchema, + TSchema +> { + static override readonly [entityKind]: string = 'NodeMsSqlSession'; + + private logger: Logger; + + constructor( + private client: NodeMsSqlClient, + dialect: MsSqlDialect, + private schema: V1.RelationalSchemaConfig | undefined, + private options: NodeMsSqlSessionOptions, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + customResultMapper?: (rows: unknown[][]) => T['execute'], + ): PreparedQueryKind { + return new NodeMsSqlPreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + customResultMapper, + ) as PreparedQueryKind; + } + + /** + * @internal + * What is its purpose? + */ + async query(query: string, params: unknown[]): Promise { + this.logger.logQuery(query, params); + + let queryClient = this.client as ConnectionPool; + if (is(this.client, AutoPool)) { + queryClient = await this.client.$instance(); + } + const request = queryClient.request() as Request & { + arrayRowMode: boolean; + }; + request.arrayRowMode = true; + + for (const [index, param] of params.entries()) { + request.input(`par${index}`, param); + } + + return request.query(query); + } + + override async all(query: SQL): Promise { + const querySql = this.dialect.sqlToQuery(query); + this.logger.logQuery(querySql.sql, querySql.params); + return await this.query(querySql.sql, querySql.params).then( + (result) => result.recordset, + ); + } + + override async transaction( + transaction: (tx: NodeMsSqlTransaction) => Promise, + config?: MsSqlTransactionConfig, + ): Promise { + const mssqlTransaction = (this.client as ConnectionPool).transaction(); + const session = new NodeMsSqlSession( + mssqlTransaction, + this.dialect, + this.schema, + this.options, + ); + const tx = new NodeMsSqlTransaction( + this.dialect, + session as MsSqlSession, + this.schema, + 0, + ); + + await mssqlTransaction.begin( + config?.isolationLevel + ? isolationLevelMap[config.isolationLevel] + : undefined, + ); + + try { + const result = await transaction(tx); + await mssqlTransaction.commit(); + return result; + } catch (err) { + await mssqlTransaction.rollback(); + throw err; + } + } +} + +export class NodeMsSqlTransaction< + TFullSchema extends Record, + TSchema extends V1.TablesRelationalConfig, +> extends MsSqlTransaction< + NodeMsSqlQueryResultHKT, + NodeMsSqlPreparedQueryHKT, + TFullSchema, + TSchema +> { + static override readonly [entityKind]: string = 'NodeMsSqlTransaction'; + + override async transaction( + transaction: (tx: NodeMsSqlTransaction) => Promise, + ): Promise { + const savepointName = `sp${this.nestedIndex + 1}`; + const tx = new NodeMsSqlTransaction( + this.dialect, + this.session, + this.schema, + this.nestedIndex + 1, + ); + + await tx.execute(sql.raw(`save transaction ${savepointName}`)); + try { + const result = await transaction(tx); + return result; + } catch (err) { + await tx.execute(sql.raw(`rollback transaction ${savepointName}`)); + throw err; + } + } +} + +const isolationLevelMap: Record< + MsSqlTransactionConfig['isolationLevel'], + (typeof mssql.ISOLATION_LEVEL)[keyof (typeof mssql)['ISOLATION_LEVEL']] +> = { + 'read uncommitted': mssql.ISOLATION_LEVEL.READ_UNCOMMITTED, + 'read committed': mssql.ISOLATION_LEVEL.READ_COMMITTED, + 'repeatable read': mssql.ISOLATION_LEVEL.REPEATABLE_READ, + serializable: mssql.ISOLATION_LEVEL.SERIALIZABLE, + snapshot: mssql.ISOLATION_LEVEL.SNAPSHOT, +}; + +export interface NodeMsSqlQueryResultHKT extends QueryResultHKT { + type: MsSqlQueryResult; +} + +export interface NodeMsSqlPreparedQueryHKT extends PreparedQueryHKT { + type: NodeMsSqlPreparedQuery>; +} diff --git a/drizzle-orm/src/node-postgres/driver.ts b/drizzle-orm/src/node-postgres/driver.ts index f72271a1c2..ea8e8a4843 100644 --- a/drizzle-orm/src/node-postgres/driver.ts +++ b/drizzle-orm/src/node-postgres/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { NodePgClient, NodePgQueryResultHKT } from './session.ts'; import { NodePgSession } from './session.ts'; @@ -100,10 +100,10 @@ export function drizzle< >( ...params: | [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ @@ -125,24 +125,20 @@ export function drizzle< return construct(instance, params[1] as DrizzleConfig | undefined) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as ( - & ({ connection?: PoolConfig | string; client?: TClient }) - & DrizzleConfig - ); + const { connection, client, ...drizzleConfig } = params[0] as ( + & ({ connection?: PoolConfig | string; client?: TClient }) + & DrizzleConfig + ); - if (client) return construct(client, drizzleConfig); + if (client) return construct(client, drizzleConfig); - const instance = typeof connection === 'string' - ? new pg.Pool({ - connectionString: connection, - }) - : new pg.Pool(connection!); - - return construct(instance, drizzleConfig) as any; - } + const instance = typeof connection === 'string' + ? new pg.Pool({ + connectionString: connection, + }) + : new pg.Pool(connection!); - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/node-postgres/session.ts b/drizzle-orm/src/node-postgres/session.ts index fb1e20b734..0ac03a24b0 100644 --- a/drizzle-orm/src/node-postgres/session.ts +++ b/drizzle-orm/src/node-postgres/session.ts @@ -305,7 +305,7 @@ export class NodePgSession< transaction: (tx: NodePgTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { - const session = (this.client instanceof Pool || (NativePool && this.client instanceof NativePool)) // eslint-disable-line no-instanceof/no-instanceof + const session = (this.client instanceof Pool || (NativePool && this.client instanceof NativePool)) // oxlint-disable-line drizzle-internal/no-instanceof ? new NodePgSession(await this.client.connect(), this.dialect, this.relations, this.schema, this.options) : this; const tx = new NodePgTransaction( @@ -323,7 +323,7 @@ export class NodePgSession< await tx.execute(sql`rollback`); throw error; } finally { - if (this.client instanceof Pool || (NativePool && this.client instanceof NativePool)) { // eslint-disable-line no-instanceof/no-instanceof + if (this.client instanceof Pool || (NativePool && this.client instanceof NativePool)) { // oxlint-disable-line drizzle-internal/no-instanceof (session.client as PoolClient).release(); } } diff --git a/drizzle-orm/src/op-sqlite/migrator.ts b/drizzle-orm/src/op-sqlite/migrator.ts index f8cb202ef7..861dfbfedc 100644 --- a/drizzle-orm/src/op-sqlite/migrator.ts +++ b/drizzle-orm/src/op-sqlite/migrator.ts @@ -1,23 +1,21 @@ import { useEffect, useReducer } from 'react'; -import type { MigrationMeta } from '~/migrator.ts'; +import { formatToMillis, type MigrationMeta } from '~/migrator.ts'; import type { AnyRelations } from '~/relations.ts'; import type { OPSQLiteDatabase } from './driver.ts'; interface MigrationConfig { - journal: { - entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; - }; migrations: Record; } -async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { +async function readMigrationFiles({ migrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; - for await (const journalEntry of journal.entries) { - const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; + const sortedMigrations = Object.keys(migrations).sort(); + for (const key of sortedMigrations) { + const query = migrations[key]; if (!query) { - throw new Error(`Missing migration: ${journalEntry.tag}`); + throw new Error(`Missing migration: ${key}`); } try { @@ -25,14 +23,16 @@ async function readMigrationFiles({ journal, migrations }: MigrationConfig): Pro return it; }); + const migrationDate = formatToMillis(key.slice(0, 14)); + migrationQueries.push({ sql: result, - bps: journalEntry.breakpoints, - folderMillis: journalEntry.when, + bps: true, + folderMillis: migrationDate, hash: '', }); } catch { - throw new Error(`Failed to parse migration: ${journalEntry.tag}`); + throw new Error(`Failed to parse migration: ${key}`); } } @@ -58,9 +58,6 @@ type Action = | { type: 'error'; payload: Error }; export const useMigrations = (db: OPSQLiteDatabase, migrations: { - journal: { - entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; - }; migrations: Record; }): State => { const initialState: State = { diff --git a/drizzle-orm/src/pg-core/columns/bigint.ts b/drizzle-orm/src/pg-core/columns/bigint.ts index 4146fc235b..85100c38ef 100644 --- a/drizzle-orm/src/pg-core/columns/bigint.ts +++ b/drizzle-orm/src/pg-core/columns/bigint.ts @@ -6,7 +6,6 @@ import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export class PgBigInt53Builder extends PgIntColumnBaseBuilder<{ - name: string; dataType: 'number int53'; data: number; driverParam: number | string; @@ -39,7 +38,6 @@ export class PgBigInt53> extends PgCo } export class PgBigInt64Builder extends PgIntColumnBaseBuilder<{ - name: string; dataType: 'bigint int64'; data: bigint; driverParam: string; @@ -70,7 +68,6 @@ export class PgBigInt64> extends PgCo } export class PgBigIntStringBuilder extends PgIntColumnBaseBuilder<{ - name: string; dataType: 'string int64'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/bigserial.ts b/drizzle-orm/src/pg-core/columns/bigserial.ts index 2c1ef40418..afe1947208 100644 --- a/drizzle-orm/src/pg-core/columns/bigserial.ts +++ b/drizzle-orm/src/pg-core/columns/bigserial.ts @@ -5,7 +5,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgBigSerial53Builder extends PgColumnBuilder<{ - name: string; dataType: 'number int53'; data: number; driverParam: number; @@ -46,7 +45,6 @@ export class PgBigSerial53> extends P } export class PgBigSerial64Builder extends PgColumnBuilder<{ - name: string; dataType: 'bigint int64'; data: bigint; driverParam: string; @@ -58,6 +56,7 @@ export class PgBigSerial64Builder extends PgColumnBuilder<{ constructor(name: string) { super(name, 'bigint int64', 'PgBigSerial64'); this.config.hasDefault = true; + this.config.notNull = true; } /** @internal */ diff --git a/drizzle-orm/src/pg-core/columns/boolean.ts b/drizzle-orm/src/pg-core/columns/boolean.ts index 59fae0e094..c688081f43 100644 --- a/drizzle-orm/src/pg-core/columns/boolean.ts +++ b/drizzle-orm/src/pg-core/columns/boolean.ts @@ -4,7 +4,6 @@ import type { PgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgBooleanBuilder extends PgColumnBuilder<{ - name: string; dataType: 'boolean'; data: boolean; driverParam: boolean; diff --git a/drizzle-orm/src/pg-core/columns/bytea.ts b/drizzle-orm/src/pg-core/columns/bytea.ts index 7ecbc379c2..ab0f8df422 100644 --- a/drizzle-orm/src/pg-core/columns/bytea.ts +++ b/drizzle-orm/src/pg-core/columns/bytea.ts @@ -4,7 +4,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgByteaBuilder extends PgColumnBuilder<{ - name: string; dataType: 'object buffer'; data: Buffer; driverParam: Buffer; diff --git a/drizzle-orm/src/pg-core/columns/char.ts b/drizzle-orm/src/pg-core/columns/char.ts index b135e82425..05551e6d80 100644 --- a/drizzle-orm/src/pg-core/columns/char.ts +++ b/drizzle-orm/src/pg-core/columns/char.ts @@ -7,12 +7,11 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgCharBuilder< TEnum extends [string, ...string[]], > extends PgColumnBuilder<{ - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; enumValues: TEnum; driverParam: string; -}, { enumValues?: TEnum; length: number; setLength: boolean; isLengthExact: true }> { +}, { enumValues?: TEnum; length: number; setLength: boolean }> { static override readonly [entityKind]: string = 'PgCharBuilder'; constructor(name: string, config: PgCharConfig) { @@ -20,7 +19,6 @@ export class PgCharBuilder< this.config.length = config.length ?? 1; this.config.setLength = config.length !== undefined; this.config.enumValues = config.enum; - this.config.isLengthExact = true; } /** @internal */ diff --git a/drizzle-orm/src/pg-core/columns/cidr.ts b/drizzle-orm/src/pg-core/columns/cidr.ts index a251aae694..cbdc45d5be 100644 --- a/drizzle-orm/src/pg-core/columns/cidr.ts +++ b/drizzle-orm/src/pg-core/columns/cidr.ts @@ -4,7 +4,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgCidrBuilder extends PgColumnBuilder<{ - name: string; dataType: 'string cidr'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/common.ts b/drizzle-orm/src/pg-core/columns/common.ts index 8adefe68cb..825e3e885f 100644 --- a/drizzle-orm/src/pg-core/columns/common.ts +++ b/drizzle-orm/src/pg-core/columns/common.ts @@ -16,14 +16,14 @@ import type { SQL } from '~/sql/sql.ts'; import { iife } from '~/tracing-utils.ts'; import type { Update } from '~/utils.ts'; import type { PgIndexOpClass } from '../indexes.ts'; -import { uniqueKeyName } from '../unique-constraint.ts'; import { makePgArray, parsePgArray } from '../utils/array.ts'; export type PgColumns = Record>; export interface ReferenceConfig { ref: () => PgColumn; - actions: { + config: { + name?: string; onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; }; @@ -54,9 +54,9 @@ export abstract class PgColumnBuilder< references( ref: ReferenceConfig['ref'], - actions: ReferenceConfig['actions'] = {}, + config: ReferenceConfig['config'] = {}, ): this { - this.foreignKeyConfigs.push({ ref, actions }); + this.foreignKeyConfigs.push({ ref, config }); return this; } @@ -85,23 +85,23 @@ export abstract class PgColumnBuilder< /** @internal */ buildForeignKeys(column: PgColumn, table: PgTable): ForeignKey[] { - return this.foreignKeyConfigs.map(({ ref, actions }) => { + return this.foreignKeyConfigs.map(({ ref, config }) => { return iife( - (ref, actions) => { + (ref, config) => { const builder = new ForeignKeyBuilder(() => { const foreignColumn = ref(); - return { columns: [column], foreignColumns: [foreignColumn] }; + return { name: config.name, columns: [column], foreignColumns: [foreignColumn] }; }); - if (actions.onUpdate) { - builder.onUpdate(actions.onUpdate); + if (config.onUpdate) { + builder.onUpdate(config.onUpdate); } - if (actions.onDelete) { - builder.onDelete(actions.onDelete); + if (config.onDelete) { + builder.onDelete(config.onDelete); } return builder.build(table); }, ref, - actions, + config, ); }); } @@ -131,9 +131,6 @@ export abstract class PgColumn< table: PgTable, config: ColumnBuilderRuntimeConfig & TRuntimeConfig, ) { - if (!config.uniqueName) { - config.uniqueName = uniqueKeyName(table, [config.name]); - } super(table, config); this.table = table; } @@ -266,7 +263,7 @@ export class PgArrayBuilder< length: number | undefined; } > { - static override readonly [entityKind] = 'PgArrayBuilder'; + static override readonly [entityKind]: string = 'PgArrayBuilder'; constructor( name: string, diff --git a/drizzle-orm/src/pg-core/columns/custom.ts b/drizzle-orm/src/pg-core/columns/custom.ts index 81335e4933..a7faa1fa5b 100644 --- a/drizzle-orm/src/pg-core/columns/custom.ts +++ b/drizzle-orm/src/pg-core/columns/custom.ts @@ -8,7 +8,6 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { - name: string; dataType: 'custom'; data: T['data']; driverParam: T['driverData']; @@ -361,6 +360,6 @@ export function customType( b?: T['config'], ): PgCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); - return new PgCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); + return new PgCustomColumnBuilder(name, config, customTypeParams); }; } diff --git a/drizzle-orm/src/pg-core/columns/date.ts b/drizzle-orm/src/pg-core/columns/date.ts index ef2fbd5e70..17a2e0a1fc 100644 --- a/drizzle-orm/src/pg-core/columns/date.ts +++ b/drizzle-orm/src/pg-core/columns/date.ts @@ -6,7 +6,6 @@ import { PgColumn } from './common.ts'; import { PgDateColumnBaseBuilder } from './date.common.ts'; export class PgDateBuilder extends PgDateColumnBaseBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string; @@ -42,7 +41,6 @@ export class PgDate> extends PgColumn< } export class PgDateStringBuilder extends PgDateColumnBaseBuilder<{ - name: string; dataType: 'string date'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/double-precision.ts b/drizzle-orm/src/pg-core/columns/double-precision.ts index 07e31066b7..b2e1d67b93 100644 --- a/drizzle-orm/src/pg-core/columns/double-precision.ts +++ b/drizzle-orm/src/pg-core/columns/double-precision.ts @@ -4,7 +4,6 @@ import type { PgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgDoublePrecisionBuilder extends PgColumnBuilder<{ - name: string; dataType: 'number double'; data: number; driverParam: string | number; diff --git a/drizzle-orm/src/pg-core/columns/enum.ts b/drizzle-orm/src/pg-core/columns/enum.ts index 8aa5720663..fd49cbaeb3 100644 --- a/drizzle-orm/src/pg-core/columns/enum.ts +++ b/drizzle-orm/src/pg-core/columns/enum.ts @@ -18,7 +18,6 @@ export interface PgEnumObject { export class PgEnumObjectColumnBuilder< TValues extends object, > extends PgColumnBuilder<{ - name: string; dataType: 'string enum'; data: TValues[keyof TValues]; enumValues: string[]; @@ -81,7 +80,6 @@ export function isPgEnum(obj: unknown): obj is PgEnum<[string, ...string[]]> { export class PgEnumColumnBuilder< TValues extends [string, ...string[]], > extends PgColumnBuilder<{ - name: string; dataType: 'string enum'; data: TValues[number]; enumValues: TValues; diff --git a/drizzle-orm/src/pg-core/columns/inet.ts b/drizzle-orm/src/pg-core/columns/inet.ts index de6bb27a79..fe0d1fe819 100644 --- a/drizzle-orm/src/pg-core/columns/inet.ts +++ b/drizzle-orm/src/pg-core/columns/inet.ts @@ -4,7 +4,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgInetBuilder extends PgColumnBuilder<{ - name: string; dataType: 'string inet'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/integer.ts b/drizzle-orm/src/pg-core/columns/integer.ts index 3d4d95c163..6b5ce5202c 100644 --- a/drizzle-orm/src/pg-core/columns/integer.ts +++ b/drizzle-orm/src/pg-core/columns/integer.ts @@ -5,7 +5,6 @@ import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export class PgIntegerBuilder extends PgIntColumnBaseBuilder<{ - name: string; dataType: 'number int32'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/pg-core/columns/interval.ts b/drizzle-orm/src/pg-core/columns/interval.ts index c61e9e9c93..9a3959c2a3 100644 --- a/drizzle-orm/src/pg-core/columns/interval.ts +++ b/drizzle-orm/src/pg-core/columns/interval.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; import type { Precision } from './timestamp.ts'; export class PgIntervalBuilder extends PgColumnBuilder<{ - name: string; dataType: 'string interval'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/json.ts b/drizzle-orm/src/pg-core/columns/json.ts index 3ffd71a171..8678904156 100644 --- a/drizzle-orm/src/pg-core/columns/json.ts +++ b/drizzle-orm/src/pg-core/columns/json.ts @@ -5,7 +5,6 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgJsonBuilder extends PgColumnBuilder< { - name: string; dataType: 'object json'; data: unknown; driverParam: unknown; diff --git a/drizzle-orm/src/pg-core/columns/jsonb.ts b/drizzle-orm/src/pg-core/columns/jsonb.ts index 016988d327..a4dc9988e6 100644 --- a/drizzle-orm/src/pg-core/columns/jsonb.ts +++ b/drizzle-orm/src/pg-core/columns/jsonb.ts @@ -4,7 +4,6 @@ import type { PgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgJsonbBuilder extends PgColumnBuilder<{ - name: string; dataType: 'object json'; data: unknown; driverParam: unknown; diff --git a/drizzle-orm/src/pg-core/columns/line.ts b/drizzle-orm/src/pg-core/columns/line.ts index 4f48313248..31057fa8c8 100644 --- a/drizzle-orm/src/pg-core/columns/line.ts +++ b/drizzle-orm/src/pg-core/columns/line.ts @@ -1,12 +1,10 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { PgTable } from '~/pg-core/table.ts'; - import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgLineBuilder extends PgColumnBuilder<{ - name: string; dataType: 'array line'; data: [number, number, number]; driverParam: number | string; @@ -29,6 +27,8 @@ export class PgLineBuilder extends PgColumnBuilder<{ export class PgLineTuple> extends PgColumn { static override readonly [entityKind]: string = 'PgLine'; + readonly mode = 'tuple'; + getSQLType(): string { return 'line'; } @@ -44,7 +44,6 @@ export class PgLineTuple> extends PgCol } export class PgLineABCBuilder extends PgColumnBuilder<{ - name: string; dataType: 'object line'; data: { a: number; b: number; c: number }; driverParam: string; @@ -67,6 +66,8 @@ export class PgLineABCBuilder extends PgColumnBuilder<{ export class PgLineABC> extends PgColumn { static override readonly [entityKind]: string = 'PgLineABC'; + readonly mode = 'abc'; + getSQLType(): string { return 'line'; } diff --git a/drizzle-orm/src/pg-core/columns/macaddr.ts b/drizzle-orm/src/pg-core/columns/macaddr.ts index 85ea3b8b70..5e43c4dc97 100644 --- a/drizzle-orm/src/pg-core/columns/macaddr.ts +++ b/drizzle-orm/src/pg-core/columns/macaddr.ts @@ -4,7 +4,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgMacaddrBuilder extends PgColumnBuilder<{ - name: string; dataType: 'string macaddr'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/macaddr8.ts b/drizzle-orm/src/pg-core/columns/macaddr8.ts index 5ddf004e86..87f041e6c6 100644 --- a/drizzle-orm/src/pg-core/columns/macaddr8.ts +++ b/drizzle-orm/src/pg-core/columns/macaddr8.ts @@ -4,7 +4,6 @@ import type { PgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgMacaddr8Builder extends PgColumnBuilder<{ - name: string; dataType: 'string macaddr8'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/numeric.ts b/drizzle-orm/src/pg-core/columns/numeric.ts index f4fb75c23b..6b7b60769e 100644 --- a/drizzle-orm/src/pg-core/columns/numeric.ts +++ b/drizzle-orm/src/pg-core/columns/numeric.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgNumericBuilder extends PgColumnBuilder< { - name: string; dataType: 'string numeric'; data: string; driverParam: string; @@ -61,7 +60,6 @@ export class PgNumeric> extends PgC export class PgNumericNumberBuilder extends PgColumnBuilder< { - name: string; dataType: 'number'; data: number; driverParam: string; @@ -121,7 +119,6 @@ export class PgNumericNumber> extends PgCol export class PgNumericBigIntBuilder extends PgColumnBuilder< { - name: string; dataType: 'bigint int64'; data: bigint; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/point.ts b/drizzle-orm/src/pg-core/columns/point.ts index a1f06a9de3..5e1c9eb719 100644 --- a/drizzle-orm/src/pg-core/columns/point.ts +++ b/drizzle-orm/src/pg-core/columns/point.ts @@ -1,12 +1,10 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { PgTable } from '~/pg-core/table.ts'; - import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgPointTupleBuilder extends PgColumnBuilder<{ - name: string; dataType: 'array point'; data: [number, number]; driverParam: number | string; @@ -29,6 +27,8 @@ export class PgPointTupleBuilder extends PgColumnBuilder<{ export class PgPointTuple> extends PgColumn { static override readonly [entityKind]: string = 'PgPointTuple'; + readonly mode = 'tuple'; + getSQLType(): string { return 'point'; } @@ -47,7 +47,6 @@ export class PgPointTuple> extends PgC } export class PgPointObjectBuilder extends PgColumnBuilder<{ - name: string; dataType: 'object point'; data: { x: number; y: number }; driverParam: string; @@ -70,6 +69,8 @@ export class PgPointObjectBuilder extends PgColumnBuilder<{ export class PgPointObject> extends PgColumn { static override readonly [entityKind]: string = 'PgPointObject'; + readonly mode = 'xy'; + getSQLType(): string { return 'point'; } diff --git a/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts b/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts index f5a2516981..63bc6f72ec 100644 --- a/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts +++ b/drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts @@ -1,21 +1,20 @@ import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { PgTable } from '~/pg-core/table.ts'; - import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; import { parseEWKB } from './utils.ts'; export class PgGeometryBuilder extends PgColumnBuilder<{ - name: string; dataType: 'array geometry'; data: [number, number]; driverParam: string; -}> { +}, { srid: number | undefined }> { static override readonly [entityKind]: string = 'PgGeometryBuilder'; - constructor(name: string) { + constructor(name: string, srid?: number) { super(name, 'array geometry', 'PgGeometry'); + this.config.srid = srid; } /** @internal */ @@ -27,17 +26,22 @@ export class PgGeometryBuilder extends PgColumnBuilder<{ } } -export class PgGeometry> extends PgColumn { +export class PgGeometry> + extends PgColumn +{ static override readonly [entityKind]: string = 'PgGeometry'; + readonly srid = this.config.srid; + readonly mode = 'tuple'; + getSQLType(): string { - return 'geometry(point)'; + return `geometry(point${this.srid === undefined ? '' : `,${this.srid}`})`; } override mapFromDriverValue(value: string | [number, number]): [number, number] { if (typeof value !== 'string') return value as [number, number]; - return parseEWKB(value); + return parseEWKB(value).point; } override mapToDriverValue(value: [number, number]): string { @@ -46,15 +50,15 @@ export class PgGeometry> extends Pg } export class PgGeometryObjectBuilder extends PgColumnBuilder<{ - name: string; dataType: 'object geometry'; data: { x: number; y: number }; driverParam: string; -}> { +}, { srid?: number }> { static override readonly [entityKind]: string = 'PgGeometryObjectBuilder'; - constructor(name: string) { + constructor(name: string, srid: number | undefined) { super(name, 'object geometry', 'PgGeometryObject'); + this.config.srid = srid; } /** @internal */ @@ -66,16 +70,21 @@ export class PgGeometryObjectBuilder extends PgColumnBuilder<{ } } -export class PgGeometryObject> extends PgColumn { +export class PgGeometryObject> + extends PgColumn +{ static override readonly [entityKind]: string = 'PgGeometryObject'; + readonly srid = this.config.srid; + readonly mode = 'object'; + getSQLType(): string { - return 'geometry(point)'; + return `geometry(point${this.srid === undefined ? '' : `,${this.srid}`})`; } override mapFromDriverValue(value: string): { x: number; y: number } { const parsed = parseEWKB(value); - return { x: parsed[0], y: parsed[1] }; + return { x: parsed.point[0], y: parsed.point[1] }; } override mapToDriverValue(value: { x: number; y: number }): string { @@ -99,7 +108,7 @@ export function geometry( export function geometry(a?: string | PgGeometryConfig, b?: PgGeometryConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (!config?.mode || config.mode === 'tuple') { - return new PgGeometryBuilder(name); + return new PgGeometryBuilder(name, config?.srid); } - return new PgGeometryObjectBuilder(name); + return new PgGeometryObjectBuilder(name, config?.srid); } diff --git a/drizzle-orm/src/pg-core/columns/postgis_extension/utils.ts b/drizzle-orm/src/pg-core/columns/postgis_extension/utils.ts index 8b5d9a7865..18a48315bb 100644 --- a/drizzle-orm/src/pg-core/columns/postgis_extension/utils.ts +++ b/drizzle-orm/src/pg-core/columns/postgis_extension/utils.ts @@ -15,7 +15,7 @@ function bytesToFloat64(bytes: Uint8Array, offset: number): number { return view.getFloat64(0, true); } -export function parseEWKB(hex: string): [number, number] { +export function parseEWKB(hex: string): { srid: number | undefined; point: [number, number] } { const bytes = hexToBytes(hex); let offset = 0; @@ -28,9 +28,9 @@ export function parseEWKB(hex: string): [number, number] { const geomType = view.getUint32(offset, byteOrder === 1); offset += 4; - let _srid: number | undefined; + let srid: number | undefined; if (geomType & 0x20000000) { // SRID flag - _srid = view.getUint32(offset, byteOrder === 1); + srid = view.getUint32(offset, byteOrder === 1); offset += 4; } @@ -40,7 +40,7 @@ export function parseEWKB(hex: string): [number, number] { const y = bytesToFloat64(bytes, offset); offset += 8; - return [x, y]; + return { srid, point: [x, y] }; } throw new Error('Unsupported geometry type'); diff --git a/drizzle-orm/src/pg-core/columns/real.ts b/drizzle-orm/src/pg-core/columns/real.ts index d46659d400..27cb820582 100644 --- a/drizzle-orm/src/pg-core/columns/real.ts +++ b/drizzle-orm/src/pg-core/columns/real.ts @@ -5,7 +5,6 @@ import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgRealBuilder extends PgColumnBuilder< { - name: string; dataType: 'number float'; data: number; driverParam: string | number; diff --git a/drizzle-orm/src/pg-core/columns/serial.ts b/drizzle-orm/src/pg-core/columns/serial.ts index 9a3433ea2e..d7cfd166e5 100644 --- a/drizzle-orm/src/pg-core/columns/serial.ts +++ b/drizzle-orm/src/pg-core/columns/serial.ts @@ -4,7 +4,6 @@ import type { PgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgSerialBuilder extends PgColumnBuilder<{ - name: string; dataType: 'number int32'; data: number; driverParam: number; diff --git a/drizzle-orm/src/pg-core/columns/smallint.ts b/drizzle-orm/src/pg-core/columns/smallint.ts index a5f4b6b9f1..b35ee2b46a 100644 --- a/drizzle-orm/src/pg-core/columns/smallint.ts +++ b/drizzle-orm/src/pg-core/columns/smallint.ts @@ -5,7 +5,6 @@ import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export class PgSmallIntBuilder extends PgIntColumnBaseBuilder<{ - name: string; dataType: 'number int16'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/pg-core/columns/smallserial.ts b/drizzle-orm/src/pg-core/columns/smallserial.ts index b50706372e..153d3d0194 100644 --- a/drizzle-orm/src/pg-core/columns/smallserial.ts +++ b/drizzle-orm/src/pg-core/columns/smallserial.ts @@ -4,7 +4,6 @@ import type { PgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgSmallSerialBuilder extends PgColumnBuilder<{ - name: string; dataType: 'number int16'; data: number; driverParam: number; diff --git a/drizzle-orm/src/pg-core/columns/text.ts b/drizzle-orm/src/pg-core/columns/text.ts index e17dd3e2c8..4194538ee9 100644 --- a/drizzle-orm/src/pg-core/columns/text.ts +++ b/drizzle-orm/src/pg-core/columns/text.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgTextBuilder extends PgColumnBuilder<{ - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; enumValues: TEnum; diff --git a/drizzle-orm/src/pg-core/columns/time.ts b/drizzle-orm/src/pg-core/columns/time.ts index 7cfa3c7a40..2962118eaf 100644 --- a/drizzle-orm/src/pg-core/columns/time.ts +++ b/drizzle-orm/src/pg-core/columns/time.ts @@ -8,7 +8,6 @@ import type { Precision } from './timestamp.ts'; export class PgTimeBuilder extends PgDateColumnBaseBuilder< { - name: string; dataType: 'string time'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/timestamp.ts b/drizzle-orm/src/pg-core/columns/timestamp.ts index 2fab6e2ff0..0944165aa3 100644 --- a/drizzle-orm/src/pg-core/columns/timestamp.ts +++ b/drizzle-orm/src/pg-core/columns/timestamp.ts @@ -7,7 +7,6 @@ import { PgDateColumnBaseBuilder } from './date.common.ts'; export class PgTimestampBuilder extends PgDateColumnBaseBuilder< { - name: string; dataType: 'object date'; data: Date; driverParam: string; @@ -62,7 +61,6 @@ export class PgTimestamp> extends PgCo export class PgTimestampStringBuilder extends PgDateColumnBaseBuilder< { - name: string; dataType: 'string timestamp'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/uuid.ts b/drizzle-orm/src/pg-core/columns/uuid.ts index 47b0001848..f4baa8c2c2 100644 --- a/drizzle-orm/src/pg-core/columns/uuid.ts +++ b/drizzle-orm/src/pg-core/columns/uuid.ts @@ -5,7 +5,6 @@ import { sql } from '~/sql/sql.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export class PgUUIDBuilder extends PgColumnBuilder<{ - name: string; dataType: 'string uuid'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/varchar.ts b/drizzle-orm/src/pg-core/columns/varchar.ts index 05b8f29c5f..f0e9ba4890 100644 --- a/drizzle-orm/src/pg-core/columns/varchar.ts +++ b/drizzle-orm/src/pg-core/columns/varchar.ts @@ -8,7 +8,6 @@ export class PgVarcharBuilder< TEnum extends [string, ...string[]], > extends PgColumnBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts b/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts index f775e4d834..a3e1c7c4cb 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/bit.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from '../common.ts'; export class PgBinaryVectorBuilder extends PgColumnBuilder< { - name: string; dataType: 'string binary'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts b/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts index b45471bd58..a8bc2019f2 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from '../common.ts'; export class PgHalfVectorBuilder extends PgColumnBuilder< { - name: string; dataType: 'array halfvector'; data: number[]; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts b/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts index f8b239f419..9c7dbfd2e2 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from '../common.ts'; export class PgSparseVectorBuilder extends PgColumnBuilder< { - name: string; dataType: 'string sparsevec'; data: string; driverParam: string; diff --git a/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts b/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts index 0f881811ec..293070f505 100644 --- a/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts +++ b/drizzle-orm/src/pg-core/columns/vector_extension/vector.ts @@ -6,7 +6,6 @@ import { PgColumn, PgColumnBuilder } from '../common.ts'; export class PgVectorBuilder extends PgColumnBuilder< { - name: string; dataType: 'array vector'; data: number[]; driverParam: string; diff --git a/drizzle-orm/src/pg-core/dialect.ts b/drizzle-orm/src/pg-core/dialect.ts index 31d4841b26..84e3898948 100644 --- a/drizzle-orm/src/pg-core/dialect.ts +++ b/drizzle-orm/src/pg-core/dialect.ts @@ -1,5 +1,11 @@ import * as V1 from '~/_relations.ts'; -import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; @@ -276,9 +282,13 @@ export class PgDialect { } } else if (is(field, Column)) { if (isSingleTable) { - chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); } else { - chunk.push(field); + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; diff --git a/drizzle-orm/src/pg-core/foreign-keys.ts b/drizzle-orm/src/pg-core/foreign-keys.ts index f8ba0b8623..3ba0e0d85e 100644 --- a/drizzle-orm/src/pg-core/foreign-keys.ts +++ b/drizzle-orm/src/pg-core/foreign-keys.ts @@ -69,6 +69,7 @@ export class ForeignKey { readonly reference: Reference; readonly onUpdate: UpdateDeleteAction | undefined; readonly onDelete: UpdateDeleteAction | undefined; + readonly name?: string; constructor(readonly table: PgTable, builder: ForeignKeyBuilder) { this.reference = builder.reference; @@ -88,6 +89,10 @@ export class ForeignKey { ]; return name ?? `${chunks.join('_')}_fk`; } + + isNameExplicit(): boolean { + return !!this.reference().name; + } } type ColumnsWithTable< diff --git a/drizzle-orm/src/pg-core/indexes.ts b/drizzle-orm/src/pg-core/indexes.ts index bf53a56cee..49c6df54dc 100644 --- a/drizzle-orm/src/pg-core/indexes.ts +++ b/drizzle-orm/src/pg-core/indexes.ts @@ -232,9 +232,11 @@ export class Index { static readonly [entityKind]: string = 'PgIndex'; readonly config: IndexConfig & { table: PgTable }; + readonly isNameExplicit: boolean; constructor(config: IndexConfig, table: PgTable) { this.config = { ...config, table }; + this.isNameExplicit = !!config.name; } } diff --git a/drizzle-orm/src/pg-core/primary-keys.ts b/drizzle-orm/src/pg-core/primary-keys.ts index 98d7d3e794..452138df95 100644 --- a/drizzle-orm/src/pg-core/primary-keys.ts +++ b/drizzle-orm/src/pg-core/primary-keys.ts @@ -50,10 +50,12 @@ export class PrimaryKey { readonly columns: AnyPgColumn<{}>[]; readonly name?: string; + readonly isNameExplicit: boolean; constructor(readonly table: PgTable, columns: AnyPgColumn<{}>[], name?: string) { this.columns = columns; this.name = name; + this.isNameExplicit = !!name; } getName(): string { diff --git a/drizzle-orm/src/pg-core/query-builders/count.ts b/drizzle-orm/src/pg-core/query-builders/count.ts index 5f62b5536d..d86a072413 100644 --- a/drizzle-orm/src/pg-core/query-builders/count.ts +++ b/drizzle-orm/src/pg-core/query-builders/count.ts @@ -3,6 +3,7 @@ import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { NeonAuthToken } from '~/utils.ts'; import type { PgSession } from '../session.ts'; import type { PgTable } from '../table.ts'; +import type { PgViewBase } from '../view-base.ts'; export class PgCountBuilder< TSession extends PgSession, @@ -10,20 +11,20 @@ export class PgCountBuilder< private sql: SQL; private token?: NeonAuthToken; - static override readonly [entityKind] = 'PgCountBuilder'; + static override readonly [entityKind]: string = 'PgCountBuilder'; [Symbol.toStringTag] = 'PgCountBuilder'; private session: TSession; private static buildEmbeddedCount( - source: PgTable | SQL | SQLWrapper, + source: PgTable | PgViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( - source: PgTable | SQL | SQLWrapper, + source: PgTable | PgViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters};`; @@ -31,7 +32,7 @@ export class PgCountBuilder< constructor( readonly params: { - source: PgTable | SQL | SQLWrapper; + source: PgTable | PgViewBase | SQL | SQLWrapper; filters?: SQL; session: TSession; }, diff --git a/drizzle-orm/src/pg-core/query-builders/insert.ts b/drizzle-orm/src/pg-core/query-builders/insert.ts index 9f0db13fa7..da02592357 100644 --- a/drizzle-orm/src/pg-core/query-builders/insert.ts +++ b/drizzle-orm/src/pg-core/query-builders/insert.ts @@ -39,10 +39,14 @@ export interface PgInsertConfig { overridingSystemValue_?: boolean; } -export type PgInsertValue, OverrideT extends boolean = false> = +export type PgInsertValue< + TTable extends PgTable, + OverrideT extends boolean = false, + TModel extends Record = InferInsertModel, +> = & { - [Key in keyof InferInsertModel]: - | InferInsertModel[Key] + [Key in keyof TModel]: + | TModel[Key] | SQL | Placeholder; } @@ -50,7 +54,7 @@ export type PgInsertValue, OverrideT extends export type PgInsertSelectQueryBuilder< TTable extends PgTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = TypedQueryBuilder< { [K in keyof TModel]: AnyPgColumn | SQL | SQL.Aliased | TModel[K] } >; diff --git a/drizzle-orm/src/pg-core/query-builders/select.ts b/drizzle-orm/src/pg-core/query-builders/select.ts index dafdb963da..94d97f9fc0 100644 --- a/drizzle-orm/src/pg-core/query-builders/select.ts +++ b/drizzle-orm/src/pg-core/query-builders/select.ts @@ -32,9 +32,9 @@ import { getTableLikeName, haveSameKeys, type NeonAuthToken, + orderSelectedFields, type ValueOrArray, } from '~/utils.ts'; -import { orderSelectedFields } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { extractUsedTable } from '../utils.ts'; import type { diff --git a/drizzle-orm/src/pg-core/query-builders/update.ts b/drizzle-orm/src/pg-core/query-builders/update.ts index a4d9823390..3bad90b0e6 100644 --- a/drizzle-orm/src/pg-core/query-builders/update.ts +++ b/drizzle-orm/src/pg-core/query-builders/update.ts @@ -62,7 +62,7 @@ export interface PgUpdateConfig { export type PgUpdateSetSource< TTable extends PgTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel & string]?: diff --git a/drizzle-orm/src/pg-core/schema.ts b/drizzle-orm/src/pg-core/schema.ts index 828cef1ca0..95dcffcfcd 100644 --- a/drizzle-orm/src/pg-core/schema.ts +++ b/drizzle-orm/src/pg-core/schema.ts @@ -3,18 +3,29 @@ import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { NonArray, Writable } from '~/utils.ts'; import { type PgEnum, type PgEnumObject, pgEnumObjectWithSchema, pgEnumWithSchema } from './columns/enum.ts'; import { type pgSequence, pgSequenceWithSchema } from './sequence.ts'; -import { type PgTableFn, pgTableWithSchema } from './table.ts'; +import { EnableRLS, type PgTableFn, type PgTableFnInternal, pgTableWithSchema } from './table.ts'; import { type pgMaterializedView, pgMaterializedViewWithSchema, type pgView, pgViewWithSchema } from './view.ts'; export class PgSchema implements SQLWrapper { static readonly [entityKind]: string = 'PgSchema'; + + isExisting: boolean = false; constructor( public readonly schemaName: TName, - ) {} + ) { + this.table = Object.assign(this.table, { + withRLS: ((name, columns, extraConfig) => { + const table = pgTableWithSchema(name, columns, extraConfig, this.schemaName); + table[EnableRLS] = true; + + return table; + }) as PgTableFnInternal, + }); + } table: PgTableFn = ((name, columns, extraConfig) => { return pgTableWithSchema(name, columns, extraConfig, this.schemaName); - }); + }) as PgTableFn; view = ((name, columns) => { return pgViewWithSchema(name, columns, this.schemaName); @@ -55,6 +66,11 @@ export class PgSchema implements SQLWrapper { shouldOmitSQLParens(): boolean { return true; } + + existing(): this { + this.isExisting = true; + return this; + } } export function isPgSchema(obj: unknown): obj is PgSchema { diff --git a/drizzle-orm/src/pg-core/table.ts b/drizzle-orm/src/pg-core/table.ts index d47c4197a8..14d23e1ce8 100644 --- a/drizzle-orm/src/pg-core/table.ts +++ b/drizzle-orm/src/pg-core/table.ts @@ -67,6 +67,7 @@ export type PgTableWithColumns< & T['columns'] & InferTableColumnsModels & { + /** @deprecated use `pgTable.withRLS()` instead*/ enableRLS: () => Omit< PgTableWithColumns, 'enableRLS' @@ -142,7 +143,7 @@ export function pgTableWithSchema< }) as any; } -export interface PgTableFn { +export interface PgTableFnInternal { < TTableName extends string, TColumnsMap extends Record, @@ -247,12 +248,34 @@ export interface PgTableFn { }>; } -export const pgTable: PgTableFn = (name, columns, extraConfig) => { +export interface PgTableFn extends PgTableFnInternal { + withRLS: PgTableFnInternal; +} + +const pgTableInternal: PgTableFnInternal = (name, columns, extraConfig) => { return pgTableWithSchema(name, columns, extraConfig, undefined); }; +const pgTableWithRLS: PgTableFn['withRLS'] = (name, columns, extraConfig) => { + const table = pgTableWithSchema(name, columns, extraConfig, undefined); + table[EnableRLS] = true; + + return table; +}; + +export const pgTable: PgTableFn = Object.assign(pgTableInternal, { withRLS: pgTableWithRLS }); + export function pgTableCreator(customizeTableName: (name: string) => string): PgTableFn { - return (name, columns, extraConfig) => { + const fn: PgTableFnInternal = (name, columns, extraConfig) => { return pgTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; + + return Object.assign(fn, { + withRLS: ((name, columns, extraConfig) => { + const table = pgTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); + table[EnableRLS] = true; + + return table; + }) as PgTableFnInternal, + }); } diff --git a/drizzle-orm/src/pg-core/unique-constraint.ts b/drizzle-orm/src/pg-core/unique-constraint.ts index ceb860b6f8..3a4a874ae0 100644 --- a/drizzle-orm/src/pg-core/unique-constraint.ts +++ b/drizzle-orm/src/pg-core/unique-constraint.ts @@ -59,11 +59,13 @@ export class UniqueConstraint { readonly columns: PgColumn[]; readonly name?: string; + readonly isNameExplicit: boolean; readonly nullsNotDistinct: boolean = false; constructor(readonly table: PgTable, columns: PgColumn[], nullsNotDistinct: boolean, name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); + this.isNameExplicit = !!name; this.nullsNotDistinct = nullsNotDistinct; } diff --git a/drizzle-orm/src/pg-core/utils.ts b/drizzle-orm/src/pg-core/utils.ts index dca095c99f..a8f5a328b6 100644 --- a/drizzle-orm/src/pg-core/utils.ts +++ b/drizzle-orm/src/pg-core/utils.ts @@ -13,8 +13,8 @@ import { PgPolicy } from './policies.ts'; import { type PrimaryKey, PrimaryKeyBuilder } from './primary-keys.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { PgViewBase } from './view-base.ts'; -import { PgViewConfig } from './view-common.ts'; -import { type PgMaterializedView, PgMaterializedViewConfig, type PgView } from './view.ts'; +import { PgMaterializedViewConfig, PgViewConfig } from './view-common.ts'; +import type { PgMaterializedView, PgView } from './view.ts'; export function getTableConfig(table: TTable) { const columns = Object.values(table[Table.Symbol.Columns]); diff --git a/drizzle-orm/src/pg-core/view-common.ts b/drizzle-orm/src/pg-core/view-common.ts index 01194c7f24..7d98d13723 100644 --- a/drizzle-orm/src/pg-core/view-common.ts +++ b/drizzle-orm/src/pg-core/view-common.ts @@ -1 +1,3 @@ export const PgViewConfig = Symbol.for('drizzle:PgViewConfig'); + +export const PgMaterializedViewConfig = Symbol.for('drizzle:PgMaterializedViewConfig'); diff --git a/drizzle-orm/src/pg-core/view.ts b/drizzle-orm/src/pg-core/view.ts index 1b96711f5b..f51628358d 100644 --- a/drizzle-orm/src/pg-core/view.ts +++ b/drizzle-orm/src/pg-core/view.ts @@ -10,7 +10,7 @@ import type { PgColumn } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { pgTable } from './table.ts'; import { PgViewBase } from './view-base.ts'; -import { PgViewConfig } from './view-common.ts'; +import { PgMaterializedViewConfig, PgViewConfig } from './view-common.ts'; export type ViewWithConfig = RequireAtLeastOne<{ checkOption: 'local' | 'cascaded'; @@ -335,8 +335,6 @@ export type PgViewWithSelection< TSelectedFields extends ColumnsSelection = ColumnsSelection, > = PgView & TSelectedFields; -export const PgMaterializedViewConfig = Symbol.for('drizzle:PgMaterializedViewConfig'); - export class PgMaterializedView< TName extends string = string, TExisting extends boolean = boolean, diff --git a/drizzle-orm/src/pglite/driver.ts b/drizzle-orm/src/pglite/driver.ts index 9af48541bf..65b4575366 100644 --- a/drizzle-orm/src/pglite/driver.ts +++ b/drizzle-orm/src/pglite/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { PgliteClient, PgliteQueryResultHKT } from './session.ts'; import { PgliteSession } from './session.ts'; @@ -110,10 +110,10 @@ export function drizzle< ...params: | [] | [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ @@ -134,28 +134,24 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as { - connection?: PGliteOptions & { dataDir: string }; - client?: TClient; - } & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: PGliteOptions & { dataDir: string }; + client?: TClient; + } & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object') { - const { dataDir, ...options } = connection; + if (typeof connection === 'object') { + const { dataDir, ...options } = connection; - const instance = new PGlite(dataDir, options); - - return construct(instance, drizzleConfig) as any; - } - - const instance = new PGlite(connection); + const instance = new PGlite(dataDir, options); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + const instance = new PGlite(connection); + + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/planetscale-serverless/driver.ts b/drizzle-orm/src/planetscale-serverless/driver.ts index ae7a9674b8..a52ef06f44 100644 --- a/drizzle-orm/src/planetscale-serverless/driver.ts +++ b/drizzle-orm/src/planetscale-serverless/driver.ts @@ -7,7 +7,7 @@ import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from './session.ts'; import { PlanetscaleSession } from './session.ts'; @@ -34,7 +34,7 @@ function construct< $client: TClient; } { // Client is not Drizzle Object, so we can ignore this rule here - // eslint-disable-next-line no-instanceof/no-instanceof + // oxlint-disable-next-line drizzle-internal/no-instanceof if (!(client instanceof Client)) { throw new Error(`Warning: You need to pass an instance of Client: @@ -46,7 +46,7 @@ const client = new Client({ password: process.env["DATABASE_PASSWORD"], }); -const db = drizzle(client); +const db = drizzle({ client }); `); } @@ -98,9 +98,9 @@ export function drizzle< TClient extends Client = Client, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -123,25 +123,21 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config | string; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config | string; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? new Client({ - url: connection, - }) - : new Client( - connection!, - ); - - return construct(instance, drizzleConfig) as any; - } + const instance = typeof connection === 'string' + ? new Client({ + url: connection, + }) + : new Client( + connection!, + ); - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/postgres-js/driver.ts b/drizzle-orm/src/postgres-js/driver.ts index 4f880a46be..791c47580a 100644 --- a/drizzle-orm/src/postgres-js/driver.ts +++ b/drizzle-orm/src/postgres-js/driver.ts @@ -5,7 +5,7 @@ import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { PostgresJsQueryResultHKT } from './session.ts'; import { PostgresJsSession } from './session.ts'; @@ -74,9 +74,9 @@ export function drizzle< TClient extends Sql = Sql, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -97,26 +97,22 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as { - connection?: { url?: string } & Options>; - client?: TClient; - } & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & Options>; + client?: TClient; + } & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - if (typeof connection === 'object' && connection.url !== undefined) { - const { url, ...config } = connection; + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; - const instance = pgClient(url, config); - return construct(instance, drizzleConfig) as any; - } - - const instance = pgClient(connection); + const instance = pgClient(url, config); return construct(instance, drizzleConfig) as any; } - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + const instance = pgClient(connection); + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/relations.ts b/drizzle-orm/src/relations.ts index cf5f7aae65..3f91839cbe 100644 --- a/drizzle-orm/src/relations.ts +++ b/drizzle-orm/src/relations.ts @@ -32,7 +32,7 @@ import { notLike, or, } from './sql/expressions/index.ts'; -import { Placeholder, SQL, sql, type SQLWrapper, View } from './sql/sql.ts'; +import { noopDecoder, Placeholder, SQL, sql, type SQLWrapper, View } from './sql/sql.ts'; import type { Assume, DrizzleTypeError, Equal, Simplify, ValueOrArray } from './utils.ts'; export type FilteredSchemaEntry = Table | View; @@ -722,7 +722,7 @@ export type BuildQueryResult< export interface BuildRelationalQueryResult { selection: { key: string; - field: Column | Table | SQL | SQL.Aliased | SQLWrapper | AggregatedField; + field: Column | Table | View | SQL | SQL.Aliased | SQLWrapper | AggregatedField; isArray?: boolean; selection?: BuildRelationalQueryResult['selection']; isOptional?: boolean; @@ -794,6 +794,8 @@ export function mapRelationalRow( decoder = field.decoder; } else if (is(field, SQL.Aliased)) { decoder = field.sql.decoder; + } else if (is(field, Table) || is(field, View)) { + decoder = noopDecoder; } else { decoder = field.getSQL().decoder; } @@ -1232,7 +1234,7 @@ export interface WithContainer { } export interface ColumnWithTSName { - column: Column | SQL | SQLWrapper | SQL.Aliased; + column: Table | View | Column | SQL | SQLWrapper | SQL.Aliased; tsName: string; } diff --git a/drizzle-orm/src/selection-proxy.ts b/drizzle-orm/src/selection-proxy.ts index 7cf46415a1..458dd3cca8 100644 --- a/drizzle-orm/src/selection-proxy.ts +++ b/drizzle-orm/src/selection-proxy.ts @@ -1,4 +1,4 @@ -import { ColumnAliasProxyHandler, TableAliasProxyHandler } from './alias.ts'; +import { ColumnTableAliasProxyHandler, TableAliasProxyHandler } from './alias.ts'; import { Column } from './column.ts'; import { entityKind, is } from './entity.ts'; import { SQL, View } from './sql/sql.ts'; @@ -101,11 +101,12 @@ export class SelectionProxyHandler if (this.config.alias) { return new Proxy( value, - new ColumnAliasProxyHandler( + new ColumnTableAliasProxyHandler( new Proxy( value.table, - new TableAliasProxyHandler(this.config.alias, this.config.replaceOriginalName ?? false), + new TableAliasProxyHandler(this.config.alias, this.config.replaceOriginalName ?? false, true), ), + true, ), ); } diff --git a/drizzle-orm/src/singlestore-core/columns/bigint.ts b/drizzle-orm/src/singlestore-core/columns/bigint.ts index cf816e42a8..eab36a3f24 100644 --- a/drizzle-orm/src/singlestore-core/columns/bigint.ts +++ b/drizzle-orm/src/singlestore-core/columns/bigint.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreBigInt53Builder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint53' : 'number int53'; data: number; driverParam: number | string; @@ -47,7 +46,6 @@ export class SingleStoreBigInt53 extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'bigint uint64' : 'bigint int64'; data: bigint; driverParam: string; @@ -86,7 +84,6 @@ export class SingleStoreBigInt64 extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'string uint64' : 'string int64'; data: string; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/binary.ts b/drizzle-orm/src/singlestore-core/columns/binary.ts index 2fd9a94231..680b9cfb33 100644 --- a/drizzle-orm/src/singlestore-core/columns/binary.ts +++ b/drizzle-orm/src/singlestore-core/columns/binary.ts @@ -6,12 +6,11 @@ import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreBinaryBuilder extends SingleStoreColumnBuilder< { - name: string; dataType: 'string binary'; data: string; driverParam: string; }, - SingleStoreBinaryConfig & { setLength: boolean; isLengthExact: true } + SingleStoreBinaryConfig & { setLength: boolean } > { static override readonly [entityKind]: string = 'SingleStoreBinaryBuilder'; @@ -19,7 +18,6 @@ export class SingleStoreBinaryBuilder extends SingleStoreColumnBuilder< super(name, 'string binary', 'SingleStoreBinary'); this.config.length = length ?? 1; this.config.setLength = length !== undefined; - this.config.isLengthExact = true; } /** @internal */ diff --git a/drizzle-orm/src/singlestore-core/columns/boolean.ts b/drizzle-orm/src/singlestore-core/columns/boolean.ts index fc20d17d57..4f928f886b 100644 --- a/drizzle-orm/src/singlestore-core/columns/boolean.ts +++ b/drizzle-orm/src/singlestore-core/columns/boolean.ts @@ -4,7 +4,6 @@ import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreBooleanBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'boolean'; data: boolean; driverParam: number | boolean; diff --git a/drizzle-orm/src/singlestore-core/columns/char.ts b/drizzle-orm/src/singlestore-core/columns/char.ts index 846ac8bc60..d0be993002 100644 --- a/drizzle-orm/src/singlestore-core/columns/char.ts +++ b/drizzle-orm/src/singlestore-core/columns/char.ts @@ -8,13 +8,12 @@ export class SingleStoreCharBuilder< TEnum extends [string, ...string[]], > extends SingleStoreColumnBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; }, - { enum?: TEnum; length: number; setLength: boolean; isLengthExact: true } + { enum?: TEnum; length: number; setLength: boolean } > { static override readonly [entityKind]: string = 'SingleStoreCharBuilder'; @@ -23,7 +22,6 @@ export class SingleStoreCharBuilder< this.config.length = config.length ?? 1; this.config.setLength = config.length !== undefined; this.config.enum = config.enum; - this.config.isLengthExact = true; } /** @internal */ diff --git a/drizzle-orm/src/singlestore-core/columns/common.ts b/drizzle-orm/src/singlestore-core/columns/common.ts index 4a69e00aae..17fc93dbff 100644 --- a/drizzle-orm/src/singlestore-core/columns/common.ts +++ b/drizzle-orm/src/singlestore-core/columns/common.ts @@ -14,7 +14,6 @@ import { entityKind } from '~/entity.ts'; import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import type { Update } from '~/utils.ts'; -import { uniqueKeyName } from '../unique-constraint.ts'; export type SingleStoreColumns = Record>; @@ -68,9 +67,6 @@ export abstract class SingleStoreColumn< table: SingleStoreTable, config: ColumnBuilderRuntimeConfig & TRuntimeConfig, ) { - if (!config.uniqueName) { - config.uniqueName = uniqueKeyName(table, [config.name]); - } super(table, config); this.table = table; } @@ -91,7 +87,7 @@ export abstract class SingleStoreColumnBuilderWithAutoIncrement< > extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreColumnBuilderWithAutoIncrement'; - constructor(name: NonNullable, dataType: T['dataType'], columnType: string) { + constructor(name: string, dataType: T['dataType'], columnType: string) { super(name, dataType, columnType); this.config.autoIncrement = false; } diff --git a/drizzle-orm/src/singlestore-core/columns/custom.ts b/drizzle-orm/src/singlestore-core/columns/custom.ts index c7acc87357..018be06d81 100644 --- a/drizzle-orm/src/singlestore-core/columns/custom.ts +++ b/drizzle-orm/src/singlestore-core/columns/custom.ts @@ -8,7 +8,6 @@ import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { - name: string; dataType: 'custom'; data: T['data']; driverParam: T['driverData']; @@ -364,6 +363,6 @@ export function customType( b?: T['config'], ): SingleStoreCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); - return new SingleStoreCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); + return new SingleStoreCustomColumnBuilder(name, config, customTypeParams); }; } diff --git a/drizzle-orm/src/singlestore-core/columns/date.ts b/drizzle-orm/src/singlestore-core/columns/date.ts index b64757e3e0..bc663936e0 100644 --- a/drizzle-orm/src/singlestore-core/columns/date.ts +++ b/drizzle-orm/src/singlestore-core/columns/date.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreDateBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -45,7 +44,6 @@ export class SingleStoreDate> extends } export class SingleStoreDateStringBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'string date'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/singlestore-core/columns/datetime.ts b/drizzle-orm/src/singlestore-core/columns/datetime.ts index bdf667449c..a275b09709 100644 --- a/drizzle-orm/src/singlestore-core/columns/datetime.ts +++ b/drizzle-orm/src/singlestore-core/columns/datetime.ts @@ -7,7 +7,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder, type SingleStoreGeneratedColumnConfig } from './common.ts'; export class SingleStoreDateTimeBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -60,7 +59,6 @@ export class SingleStoreDateTime> exte } export class SingleStoreDateTimeStringBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'string datetime'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/singlestore-core/columns/decimal.ts b/drizzle-orm/src/singlestore-core/columns/decimal.ts index 6c03436964..0c0394e5c8 100644 --- a/drizzle-orm/src/singlestore-core/columns/decimal.ts +++ b/drizzle-orm/src/singlestore-core/columns/decimal.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreDecimalBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'string unumeric' : 'string numeric'; data: string; driverParam: string; @@ -62,7 +61,6 @@ export class SingleStoreDecimal extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number unsigned' : 'number'; data: number; driverParam: string; @@ -119,7 +117,6 @@ export class SingleStoreDecimalNumber extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'bigint uint64' : 'bigint int64'; data: bigint; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/double.ts b/drizzle-orm/src/singlestore-core/columns/double.ts index b50e0e557b..e0975d0233 100644 --- a/drizzle-orm/src/singlestore-core/columns/double.ts +++ b/drizzle-orm/src/singlestore-core/columns/double.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreDoubleBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number udouble' : 'number double'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/enum.ts b/drizzle-orm/src/singlestore-core/columns/enum.ts index cb89511046..d567ce931b 100644 --- a/drizzle-orm/src/singlestore-core/columns/enum.ts +++ b/drizzle-orm/src/singlestore-core/columns/enum.ts @@ -7,7 +7,6 @@ import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder, type SingleStoreGeneratedColumnConfig } from './common.ts'; export class SingleStoreEnumColumnBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'string enum'; data: TEnum[number]; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/float.ts b/drizzle-orm/src/singlestore-core/columns/float.ts index 4f86145239..6cc1b27cb5 100644 --- a/drizzle-orm/src/singlestore-core/columns/float.ts +++ b/drizzle-orm/src/singlestore-core/columns/float.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreFloatBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number ufloat' : 'number float'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/int.ts b/drizzle-orm/src/singlestore-core/columns/int.ts index e790146794..af52c79448 100644 --- a/drizzle-orm/src/singlestore-core/columns/int.ts +++ b/drizzle-orm/src/singlestore-core/columns/int.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreIntBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint32' : 'number int32'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/json.ts b/drizzle-orm/src/singlestore-core/columns/json.ts index 4b606c3f41..306dcf353b 100644 --- a/drizzle-orm/src/singlestore-core/columns/json.ts +++ b/drizzle-orm/src/singlestore-core/columns/json.ts @@ -4,7 +4,6 @@ import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreJsonBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'object json'; data: unknown; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/mediumint.ts b/drizzle-orm/src/singlestore-core/columns/mediumint.ts index 161c0a0223..e6c026b443 100644 --- a/drizzle-orm/src/singlestore-core/columns/mediumint.ts +++ b/drizzle-orm/src/singlestore-core/columns/mediumint.ts @@ -7,7 +7,6 @@ import type { SingleStoreIntConfig } from './int.ts'; export class SingleStoreMediumIntBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint24' : 'number int24'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/real.ts b/drizzle-orm/src/singlestore-core/columns/real.ts index 8d0023c7d9..6f1aa963f6 100644 --- a/drizzle-orm/src/singlestore-core/columns/real.ts +++ b/drizzle-orm/src/singlestore-core/columns/real.ts @@ -6,7 +6,6 @@ import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoInc export class SingleStoreRealBuilder extends SingleStoreColumnBuilderWithAutoIncrement< { - name: string; dataType: 'number double'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/serial.ts b/drizzle-orm/src/singlestore-core/columns/serial.ts index 80de0e4fdc..db1b6be604 100644 --- a/drizzle-orm/src/singlestore-core/columns/serial.ts +++ b/drizzle-orm/src/singlestore-core/columns/serial.ts @@ -4,7 +4,6 @@ import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export class SingleStoreSerialBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: 'number uint53'; data: number; driverParam: number; diff --git a/drizzle-orm/src/singlestore-core/columns/smallint.ts b/drizzle-orm/src/singlestore-core/columns/smallint.ts index ff8d6526a2..e02b115558 100644 --- a/drizzle-orm/src/singlestore-core/columns/smallint.ts +++ b/drizzle-orm/src/singlestore-core/columns/smallint.ts @@ -7,7 +7,6 @@ import type { SingleStoreIntConfig } from './int.ts'; export class SingleStoreSmallIntBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint16' : 'number int16'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/text.ts b/drizzle-orm/src/singlestore-core/columns/text.ts index 1396b1bfc9..f274e76276 100644 --- a/drizzle-orm/src/singlestore-core/columns/text.ts +++ b/drizzle-orm/src/singlestore-core/columns/text.ts @@ -8,7 +8,6 @@ export type SingleStoreTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'lo export class SingleStoreTextBuilder extends SingleStoreColumnBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/time.ts b/drizzle-orm/src/singlestore-core/columns/time.ts index 4360a9bf65..9b4e81ca5b 100644 --- a/drizzle-orm/src/singlestore-core/columns/time.ts +++ b/drizzle-orm/src/singlestore-core/columns/time.ts @@ -5,7 +5,6 @@ import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreTimeBuilder extends SingleStoreColumnBuilder< { - name: string; dataType: 'string time'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/singlestore-core/columns/timestamp.ts b/drizzle-orm/src/singlestore-core/columns/timestamp.ts index 904bcecad6..4336d42de8 100644 --- a/drizzle-orm/src/singlestore-core/columns/timestamp.ts +++ b/drizzle-orm/src/singlestore-core/columns/timestamp.ts @@ -6,7 +6,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreDateBaseColumn, SingleStoreDateColumnBaseBuilder } from './date.common.ts'; export class SingleStoreTimestampBuilder extends SingleStoreDateColumnBaseBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: string | number; @@ -49,7 +48,6 @@ export class SingleStoreTimestamp> } export class SingleStoreTimestampStringBuilder extends SingleStoreDateColumnBaseBuilder<{ - name: string; dataType: 'string timestamp'; data: string; driverParam: string | number; diff --git a/drizzle-orm/src/singlestore-core/columns/tinyint.ts b/drizzle-orm/src/singlestore-core/columns/tinyint.ts index b2b4b05cfa..d5df84cdd7 100644 --- a/drizzle-orm/src/singlestore-core/columns/tinyint.ts +++ b/drizzle-orm/src/singlestore-core/columns/tinyint.ts @@ -7,7 +7,6 @@ import type { SingleStoreIntConfig } from './int.ts'; export class SingleStoreTinyIntBuilder extends SingleStoreColumnBuilderWithAutoIncrement<{ - name: string; dataType: Equal extends true ? 'number uint8' : 'number int8'; data: number; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/varbinary.ts b/drizzle-orm/src/singlestore-core/columns/varbinary.ts index a8cbe4d094..8bbb7c53de 100644 --- a/drizzle-orm/src/singlestore-core/columns/varbinary.ts +++ b/drizzle-orm/src/singlestore-core/columns/varbinary.ts @@ -5,7 +5,6 @@ import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreVarBinaryBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'string binary'; data: string; driverParam: string; diff --git a/drizzle-orm/src/singlestore-core/columns/varchar.ts b/drizzle-orm/src/singlestore-core/columns/varchar.ts index 4f797c3135..7c07266b02 100644 --- a/drizzle-orm/src/singlestore-core/columns/varchar.ts +++ b/drizzle-orm/src/singlestore-core/columns/varchar.ts @@ -7,7 +7,6 @@ import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreVarCharBuilder< TEnum extends [string, ...string[]], > extends SingleStoreColumnBuilder<{ - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: number | string; diff --git a/drizzle-orm/src/singlestore-core/columns/vector.ts b/drizzle-orm/src/singlestore-core/columns/vector.ts index fce697341c..12d1f1e065 100644 --- a/drizzle-orm/src/singlestore-core/columns/vector.ts +++ b/drizzle-orm/src/singlestore-core/columns/vector.ts @@ -7,7 +7,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder, type SingleStoreGeneratedColumnConfig } from './common.ts'; export class SingleStoreVectorBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'array vector'; data: Array; driverParam: string | Buffer; @@ -95,7 +94,6 @@ export class SingleStoreVector> } export class SingleStoreBigIntVectorBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'array int64vector'; data: Array; driverParam: string | Buffer; diff --git a/drizzle-orm/src/singlestore-core/columns/year.ts b/drizzle-orm/src/singlestore-core/columns/year.ts index 31f718eed9..eca29a29ae 100644 --- a/drizzle-orm/src/singlestore-core/columns/year.ts +++ b/drizzle-orm/src/singlestore-core/columns/year.ts @@ -4,7 +4,6 @@ import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export class SingleStoreYearBuilder extends SingleStoreColumnBuilder<{ - name: string; dataType: 'number year'; data: number; driverParam: number; diff --git a/drizzle-orm/src/singlestore-core/dialect.ts b/drizzle-orm/src/singlestore-core/dialect.ts index 523eca90c6..8e427d9573 100644 --- a/drizzle-orm/src/singlestore-core/dialect.ts +++ b/drizzle-orm/src/singlestore-core/dialect.ts @@ -1,5 +1,11 @@ import * as V1 from '~/_relations.ts'; -import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; @@ -251,9 +257,13 @@ export class SingleStoreDialect { } } else if (is(field, Column)) { if (isSingleTable) { - chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); } else { - chunk.push(field); + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; diff --git a/drizzle-orm/src/singlestore-core/indexes.ts b/drizzle-orm/src/singlestore-core/indexes.ts index 00513cd80f..f5bda856ff 100644 --- a/drizzle-orm/src/singlestore-core/indexes.ts +++ b/drizzle-orm/src/singlestore-core/indexes.ts @@ -87,9 +87,11 @@ export class Index { static readonly [entityKind]: string = 'SingleStoreIndex'; readonly config: IndexConfig & { table: SingleStoreTable }; + readonly isNameExplicit: boolean; constructor(config: IndexConfig, table: SingleStoreTable) { this.config = { ...config, table }; + this.isNameExplicit = !!config.name; } } diff --git a/drizzle-orm/src/singlestore-core/primary-keys.ts b/drizzle-orm/src/singlestore-core/primary-keys.ts index 47dc0a19cc..36bedd1acf 100644 --- a/drizzle-orm/src/singlestore-core/primary-keys.ts +++ b/drizzle-orm/src/singlestore-core/primary-keys.ts @@ -50,10 +50,12 @@ export class PrimaryKey { readonly columns: SingleStoreColumn[]; readonly name?: string; + readonly isNameExplicit: boolean; constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { this.columns = columns; this.name = name; + this.isNameExplicit = !!name; } getName(): string { diff --git a/drizzle-orm/src/singlestore-core/query-builders/count.ts b/drizzle-orm/src/singlestore-core/query-builders/count.ts index aba5b2f3f5..064b5cacf0 100644 --- a/drizzle-orm/src/singlestore-core/query-builders/count.ts +++ b/drizzle-orm/src/singlestore-core/query-builders/count.ts @@ -9,7 +9,7 @@ export class SingleStoreCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static override readonly [entityKind] = 'SingleStoreCountBuilder'; + static override readonly [entityKind]: string = 'SingleStoreCountBuilder'; [Symbol.toStringTag] = 'SingleStoreCountBuilder'; private session: TSession; diff --git a/drizzle-orm/src/singlestore-core/query-builders/insert.ts b/drizzle-orm/src/singlestore-core/query-builders/insert.ts index e1ce299cc5..e08661d2f4 100644 --- a/drizzle-orm/src/singlestore-core/query-builders/insert.ts +++ b/drizzle-orm/src/singlestore-core/query-builders/insert.ts @@ -34,7 +34,7 @@ export type AnySingleStoreInsertConfig = SingleStoreInsertConfig = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel]: TModel[Key] | SQL | Placeholder; diff --git a/drizzle-orm/src/singlestore-core/query-builders/update.ts b/drizzle-orm/src/singlestore-core/query-builders/update.ts index 70fc3974e8..87b4ceea88 100644 --- a/drizzle-orm/src/singlestore-core/query-builders/update.ts +++ b/drizzle-orm/src/singlestore-core/query-builders/update.ts @@ -33,7 +33,7 @@ export interface SingleStoreUpdateConfig { export type SingleStoreUpdateSetSource< TTable extends SingleStoreTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel & string]?: diff --git a/drizzle-orm/src/singlestore-core/unique-constraint.ts b/drizzle-orm/src/singlestore-core/unique-constraint.ts index 511e466dc3..377ef065d6 100644 --- a/drizzle-orm/src/singlestore-core/unique-constraint.ts +++ b/drizzle-orm/src/singlestore-core/unique-constraint.ts @@ -51,12 +51,14 @@ export class UniqueConstraint { static readonly [entityKind]: string = 'SingleStoreUniqueConstraint'; readonly columns: SingleStoreColumn[]; - readonly name?: string; + readonly name: string; readonly nullsNotDistinct: boolean = false; + readonly isNameExplicit: boolean; constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); + this.isNameExplicit = !!name; } getName() { diff --git a/drizzle-orm/src/singlestore/driver.ts b/drizzle-orm/src/singlestore/driver.ts index 99e0309d13..a6919ac577 100644 --- a/drizzle-orm/src/singlestore/driver.ts +++ b/drizzle-orm/src/singlestore/driver.ts @@ -13,7 +13,7 @@ import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { npmVersion } from '~/version.ts'; import type { SingleStoreDriverClient, @@ -136,9 +136,9 @@ export function drizzle< TClient extends AnySingleStoreDriverConnection = CallbackPool, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, SingleStoreDriverDrizzleConfig, ] | [ ( @@ -163,38 +163,31 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: PoolOptions | string; client?: TClient } - & SingleStoreDriverDrizzleConfig; - - if (client) return construct(client, drizzleConfig) as any; - - let opts: PoolOptions = {}; - opts = typeof connection === 'string' - ? { - uri: connection, - supportBigNumbers: true, - connectAttributes: CONNECTION_ATTRS, - } - : { - ...connection, - connectAttributes: { - ...connection!.connectAttributes, - ...CONNECTION_ATTRS, - }, - }; - - const instance = createPool(opts); - const db = construct(instance, drizzleConfig); - - return db as any; - } + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: PoolOptions | string; client?: TClient } + & SingleStoreDriverDrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; - return construct( - params[0] as TClient, - params[1] as SingleStoreDriverDrizzleConfig | undefined, - ) as any; + let opts: PoolOptions = {}; + opts = typeof connection === 'string' + ? { + uri: connection, + supportBigNumbers: true, + connectAttributes: CONNECTION_ATTRS, + } + : { + ...connection, + connectAttributes: { + ...connection!.connectAttributes, + ...CONNECTION_ATTRS, + }, + }; + + const instance = createPool(opts); + const db = construct(instance, drizzleConfig); + + return db as any; } export namespace drizzle { diff --git a/drizzle-orm/src/singlestore/session.ts b/drizzle-orm/src/singlestore/session.ts index 6aaa9f3f8a..8aaf8e3b20 100644 --- a/drizzle-orm/src/singlestore/session.ts +++ b/drizzle-orm/src/singlestore/session.ts @@ -191,7 +191,7 @@ export class SingleStoreDriverPreparedQuery stream.once('data', resolve))]); if (row === undefined || (Array.isArray(row) && row.length === 0)) { break; - } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof + } else if (row instanceof Error) { // oxlint-disable-line drizzle-internal/no-instanceof throw row; } else { if (hasRowsMapper) { diff --git a/drizzle-orm/src/sql-js/session.ts b/drizzle-orm/src/sql-js/session.ts index ecb08f1129..c6be53d2e6 100644 --- a/drizzle-orm/src/sql-js/session.ts +++ b/drizzle-orm/src/sql-js/session.ts @@ -291,9 +291,9 @@ export class PreparedQuery) => QueryTypingsValue; paramStartIndex?: { value: number }; inlineParams?: boolean; - invokeSource?: 'indexes' | undefined; + invokeSource?: 'indexes' | 'mssql-check' | 'mssql-view-with-schemabinding' | undefined; } export type QueryTypingsValue = 'json' | 'decimal' | 'time' | 'timestamp' | 'uuid' | 'date' | 'none'; @@ -159,6 +159,7 @@ export class SQL implements SQLWrapper { prepareTyping, inlineParams, paramStartIndex, + invokeSource, } = config; return mergeQueries(chunks.map((chunk): QueryWithTypings => { @@ -196,6 +197,15 @@ export class SQL implements SQLWrapper { if (is(chunk, Table)) { const schemaName = chunk[Table.Symbol.Schema]; const tableName = chunk[Table.Symbol.Name]; + + if (invokeSource === 'mssql-view-with-schemabinding') { + return { + sql: (schemaName === undefined ? escapeName('dbo') : escapeName(schemaName)) + '.' + + escapeName(tableName), + params: [], + }; + } + return { sql: schemaName === undefined || chunk[IsAlias] ? escapeName(tableName) @@ -210,9 +220,9 @@ export class SQL implements SQLWrapper { return { sql: escapeName(columnName), params: [] }; } - const schemaName = chunk.table[Table.Symbol.Schema]; + const schemaName = invokeSource === 'mssql-check' ? undefined : chunk.table[Table.Symbol.Schema]; return { - sql: chunk.table[IsAlias] || schemaName === undefined + sql: chunk.isAlias ? escapeName(chunk.name) : chunk.table[IsAlias] || schemaName === undefined ? escapeName(chunk.table[Table.Symbol.Name]) + '.' + escapeName(columnName) : escapeName(schemaName) + '.' + escapeName(chunk.table[Table.Symbol.Name]) + '.' + escapeName(columnName), @@ -307,7 +317,7 @@ export class SQL implements SQLWrapper { if (chunk === null) { return 'null'; } - if (typeof chunk === 'number' || typeof chunk === 'boolean') { + if (typeof chunk === 'number' || typeof chunk === 'boolean' || typeof chunk === 'bigint') { return chunk.toString(); } if (typeof chunk === 'string') { @@ -642,7 +652,7 @@ export abstract class View< TName extends string = string, TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, -> implements SQLWrapper { +> { static readonly [entityKind]: string = 'View'; declare _: { @@ -712,10 +722,6 @@ export abstract class View< isAlias: false, }; } - - getSQL(): SQL { - return new SQL([this]); - } } export function isView(view: unknown): view is View { @@ -738,13 +744,9 @@ export type InferSelectViewModel = Column.prototype.getSQL = function() { return new SQL([this]); }; - -// Defined separately from the Table class to resolve circular dependency -Table.prototype.getSQL = function() { - return new SQL([this]); -}; - // Defined separately from the Column class to resolve circular dependency Subquery.prototype.getSQL = function() { return new SQL([this]); }; + +export type SQLEntity = SQL | SQLWrapper | SQL.Aliased | Table | View; diff --git a/drizzle-orm/src/sqlite-cloud/driver.ts b/drizzle-orm/src/sqlite-cloud/driver.ts index edafeb903c..86ef3838b6 100644 --- a/drizzle-orm/src/sqlite-cloud/driver.ts +++ b/drizzle-orm/src/sqlite-cloud/driver.ts @@ -5,7 +5,7 @@ import { DefaultLogger } from '~/logger.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { SQLiteCloudSession } from './session.ts'; export type SQLiteCloudRunResult = unknown; @@ -92,9 +92,9 @@ export function drizzle< TClient extends Database = Database, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -115,21 +115,17 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: DatabaseOpts; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: DatabaseOpts; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? new Database(connection) - : new Database(connection.path, connection); + const instance = typeof connection === 'string' + ? new Database(connection) + : new Database(connection.path, connection); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/sqlite-core/columns/blob.ts b/drizzle-orm/src/sqlite-core/columns/blob.ts index b89aea5e92..e5ec6af2e8 100644 --- a/drizzle-orm/src/sqlite-core/columns/blob.ts +++ b/drizzle-orm/src/sqlite-core/columns/blob.ts @@ -17,7 +17,6 @@ function hexToText(hexString: string) { } export class SQLiteBigIntBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'bigint int64'; data: bigint; driverParam: Buffer; @@ -50,7 +49,7 @@ export class SQLiteBigInt> extends SQ if (typeof Buffer !== 'undefined' && Buffer.from) { const buf = Buffer.isBuffer(value) ? value - // eslint-disable-next-line no-instanceof/no-instanceof + // oxlint-disable-next-line drizzle-internal/no-instanceof : value instanceof ArrayBuffer ? Buffer.from(value) : value.buffer @@ -68,7 +67,6 @@ export class SQLiteBigInt> extends SQ } export class SQLiteBlobJsonBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'object json'; data: unknown; driverParam: Buffer; @@ -104,7 +102,7 @@ export class SQLiteBlobJson> extends S if (typeof Buffer !== 'undefined' && Buffer.from) { const buf = Buffer.isBuffer(value) ? value - // eslint-disable-next-line no-instanceof/no-instanceof + // oxlint-disable-next-line drizzle-internal/no-instanceof : value instanceof ArrayBuffer ? Buffer.from(value) : value.buffer @@ -122,7 +120,6 @@ export class SQLiteBlobJson> extends S } export class SQLiteBlobBufferBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'object buffer'; data: Buffer; driverParam: Buffer; diff --git a/drizzle-orm/src/sqlite-core/columns/common.ts b/drizzle-orm/src/sqlite-core/columns/common.ts index e76f765e55..155bd9b283 100644 --- a/drizzle-orm/src/sqlite-core/columns/common.ts +++ b/drizzle-orm/src/sqlite-core/columns/common.ts @@ -14,7 +14,6 @@ import type { ForeignKey, UpdateDeleteAction } from '~/sqlite-core/foreign-keys. import { ForeignKeyBuilder } from '~/sqlite-core/foreign-keys.ts'; import type { SQLiteTable } from '~/sqlite-core/table.ts'; import type { Update } from '~/utils.ts'; -import { uniqueKeyName } from '../unique-constraint.ts'; export type SQLiteColumns = Record>; @@ -103,9 +102,6 @@ export abstract class SQLiteColumn< table: SQLiteTable, config: ColumnBuilderRuntimeConfig & TRuntimeConfig, ) { - if (!config.uniqueName) { - config.uniqueName = uniqueKeyName(table, [config.name]); - } super(table, config); this.table = table; } diff --git a/drizzle-orm/src/sqlite-core/columns/custom.ts b/drizzle-orm/src/sqlite-core/columns/custom.ts index afa6edf6c0..3044c9a396 100644 --- a/drizzle-orm/src/sqlite-core/columns/custom.ts +++ b/drizzle-orm/src/sqlite-core/columns/custom.ts @@ -8,7 +8,6 @@ import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { - name: string; dataType: 'custom'; data: T['data']; driverParam: T['driverData']; @@ -362,7 +361,7 @@ export function customType( ): SQLiteCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new SQLiteCustomColumnBuilder( - name as ConvertCustomConfig['name'], + name, config, customTypeParams, ); diff --git a/drizzle-orm/src/sqlite-core/columns/integer.ts b/drizzle-orm/src/sqlite-core/columns/integer.ts index 9c15a6e519..2b2df85beb 100644 --- a/drizzle-orm/src/sqlite-core/columns/integer.ts +++ b/drizzle-orm/src/sqlite-core/columns/integer.ts @@ -50,7 +50,6 @@ export abstract class SQLiteBaseInteger< } export class SQLiteIntegerBuilder extends SQLiteBaseIntegerBuilder<{ - name: string; dataType: 'number int53'; data: number; driverParam: number; @@ -74,7 +73,6 @@ export class SQLiteInteger> extends S } export class SQLiteTimestampBuilder extends SQLiteBaseIntegerBuilder<{ - name: string; dataType: 'object date'; data: Date; driverParam: number; @@ -127,7 +125,6 @@ export class SQLiteTimestamp> } export class SQLiteBooleanBuilder extends SQLiteBaseIntegerBuilder<{ - name: string; dataType: 'boolean'; data: boolean; driverParam: number; diff --git a/drizzle-orm/src/sqlite-core/columns/numeric.ts b/drizzle-orm/src/sqlite-core/columns/numeric.ts index 4ec7f636f4..326b877f18 100644 --- a/drizzle-orm/src/sqlite-core/columns/numeric.ts +++ b/drizzle-orm/src/sqlite-core/columns/numeric.ts @@ -5,7 +5,6 @@ import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export class SQLiteNumericBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'string numeric'; data: string; driverParam: string; @@ -39,7 +38,6 @@ export class SQLiteNumeric> extends } } export class SQLiteNumericNumberBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'number'; data: number; driverParam: string; @@ -76,7 +74,6 @@ export class SQLiteNumericNumber> extends S } export class SQLiteNumericBigIntBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'bigint int64'; data: bigint; driverParam: string; diff --git a/drizzle-orm/src/sqlite-core/columns/real.ts b/drizzle-orm/src/sqlite-core/columns/real.ts index 0a6efe3523..5c7ec00f50 100644 --- a/drizzle-orm/src/sqlite-core/columns/real.ts +++ b/drizzle-orm/src/sqlite-core/columns/real.ts @@ -4,7 +4,6 @@ import type { SQLiteTable } from '../table.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export class SQLiteRealBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'number double'; data: number; driverParam: number; diff --git a/drizzle-orm/src/sqlite-core/columns/text.ts b/drizzle-orm/src/sqlite-core/columns/text.ts index 0023df1a55..2405824014 100644 --- a/drizzle-orm/src/sqlite-core/columns/text.ts +++ b/drizzle-orm/src/sqlite-core/columns/text.ts @@ -8,7 +8,6 @@ export class SQLiteTextBuilder< TEnum extends [string, ...string[]], > extends SQLiteColumnBuilder< { - name: string; dataType: Equal extends true ? 'string' : 'string enum'; data: TEnum[number]; driverParam: string; @@ -53,7 +52,6 @@ export class SQLiteText> } export class SQLiteTextJsonBuilder extends SQLiteColumnBuilder<{ - name: string; dataType: 'object json'; data: unknown; driverParam: string; diff --git a/drizzle-orm/src/sqlite-core/dialect.ts b/drizzle-orm/src/sqlite-core/dialect.ts index fd6186bce3..66d8d55efd 100644 --- a/drizzle-orm/src/sqlite-core/dialect.ts +++ b/drizzle-orm/src/sqlite-core/dialect.ts @@ -1,5 +1,11 @@ import * as V1 from '~/_relations.ts'; -import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; +import { + aliasedTable, + aliasedTableColumn, + getOriginalColumnFromAlias, + mapColumnsInAliasedSQLToAlias, + mapColumnsInSQLToAlias, +} from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import type { AnyColumn } from '~/column.ts'; import { Column } from '~/column.ts'; @@ -193,20 +199,31 @@ export abstract class SQLiteDialect { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { - const tableName = field.table[Table.Symbol.Name]; if (field.columnType === 'SQLiteNumericBigInt') { if (isSingleTable) { - chunk.push(sql`cast(${sql.identifier(this.casing.getColumnCasing(field))} as text)`); + chunk.push( + field.isAlias + ? sql`cast(${ + sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field))) + } as text) as ${field}` + : sql`cast(${sql.identifier(this.casing.getColumnCasing(field))} as text)`, + ); } else { chunk.push( - sql`cast(${sql.identifier(tableName)}.${sql.identifier(this.casing.getColumnCasing(field))} as text)`, + field.isAlias + ? sql`cast(${getOriginalColumnFromAlias(field)} as text) as ${field}` + : sql`cast(${field} as text)`, ); } } else { if (isSingleTable) { - chunk.push(sql.identifier(this.casing.getColumnCasing(field))); + chunk.push( + field.isAlias + ? sql`${sql.identifier(this.casing.getColumnCasing(getOriginalColumnFromAlias(field)))} as ${field}` + : sql.identifier(this.casing.getColumnCasing(field)), + ); } else { - chunk.push(sql`${sql.identifier(tableName)}.${sql.identifier(this.casing.getColumnCasing(field))}`); + chunk.push(field.isAlias ? sql`${getOriginalColumnFromAlias(field)} as ${field}` : field); } } } else if (is(field, Subquery)) { diff --git a/drizzle-orm/src/sqlite-core/foreign-keys.ts b/drizzle-orm/src/sqlite-core/foreign-keys.ts index 1c947f7f59..7134478176 100644 --- a/drizzle-orm/src/sqlite-core/foreign-keys.ts +++ b/drizzle-orm/src/sqlite-core/foreign-keys.ts @@ -91,6 +91,10 @@ export class ForeignKey { ]; return name ?? `${chunks.join('_')}_fk`; } + + isNameExplicit() { + return !!this.reference().name; + } } type ColumnsWithTable< @@ -98,22 +102,6 @@ type ColumnsWithTable< TColumns extends SQLiteColumn[], > = { [Key in keyof TColumns]: AnySQLiteColumn<{ tableName: TTableName }> }; -/** - * @deprecated please use `foreignKey({ columns: [], foreignColumns: [] })` syntax without callback - * @param config - * @returns - */ -export function foreignKey< - TTableName extends string, - TForeignTableName extends string, - TColumns extends [AnySQLiteColumn<{ tableName: TTableName }>, ...AnySQLiteColumn<{ tableName: TTableName }>[]], ->( - config: () => { - name?: string; - columns: TColumns; - foreignColumns: ColumnsWithTable; - }, -): ForeignKeyBuilder; export function foreignKey< TTableName extends string, TForeignTableName extends string, diff --git a/drizzle-orm/src/sqlite-core/indexes.ts b/drizzle-orm/src/sqlite-core/indexes.ts index 70ca62a8ca..6249a4f9f0 100644 --- a/drizzle-orm/src/sqlite-core/indexes.ts +++ b/drizzle-orm/src/sqlite-core/indexes.ts @@ -63,9 +63,11 @@ export class Index { }; readonly config: IndexConfig & { table: SQLiteTable }; + readonly isNameExplicit: boolean; constructor(config: IndexConfig, table: SQLiteTable) { this.config = { ...config, table }; + this.isNameExplicit = !!config.name; } } diff --git a/drizzle-orm/src/sqlite-core/primary-keys.ts b/drizzle-orm/src/sqlite-core/primary-keys.ts index ea2111c63a..96798880c6 100644 --- a/drizzle-orm/src/sqlite-core/primary-keys.ts +++ b/drizzle-orm/src/sqlite-core/primary-keys.ts @@ -53,10 +53,12 @@ export class PrimaryKey { readonly columns: SQLiteColumn[]; readonly name?: string; + readonly isNameExplicit: boolean; constructor(readonly table: SQLiteTable, columns: SQLiteColumn[], name?: string) { this.columns = columns; this.name = name; + this.isNameExplicit = !!name; } getName(): string { diff --git a/drizzle-orm/src/sqlite-core/query-builders/count.ts b/drizzle-orm/src/sqlite-core/query-builders/count.ts index 179c785559..3be2fc6b35 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/count.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/count.ts @@ -9,7 +9,7 @@ export class SQLiteCountBuilder< > extends SQL implements Promise, SQLWrapper { private sql: SQL; - static override readonly [entityKind] = 'SQLiteCountBuilderAsync'; + static override readonly [entityKind]: string = 'SQLiteCountBuilderAsync'; [Symbol.toStringTag] = 'SQLiteCountBuilderAsync'; private session: TSession; diff --git a/drizzle-orm/src/sqlite-core/query-builders/insert.ts b/drizzle-orm/src/sqlite-core/query-builders/insert.ts index 8816513628..6724c75801 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/insert.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/insert.ts @@ -29,7 +29,7 @@ export interface SQLiteInsertConfig { export type SQLiteInsertValue< TTable extends SQLiteTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = Simplify< { [Key in keyof TModel]: TModel[Key] | SQL | Placeholder; @@ -38,7 +38,7 @@ export type SQLiteInsertValue< export type SQLiteInsertSelectQueryBuilder< TTable extends SQLiteTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = TypedQueryBuilder< { [K in keyof TModel]: AnySQLiteColumn | SQL | SQL.Aliased | TModel[K] } >; diff --git a/drizzle-orm/src/sqlite-core/query-builders/update.ts b/drizzle-orm/src/sqlite-core/query-builders/update.ts index 026c093bf7..6fc60171d3 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/update.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/update.ts @@ -38,7 +38,7 @@ export interface SQLiteUpdateConfig { export type SQLiteUpdateSetSource< TTable extends SQLiteTable, - TModel extends InferInsertModel = InferInsertModel, + TModel extends Record = InferInsertModel, > = & { [Key in keyof TModel & string]?: diff --git a/drizzle-orm/src/sqlite-core/unique-constraint.ts b/drizzle-orm/src/sqlite-core/unique-constraint.ts index e9c47e7d3b..4153194889 100644 --- a/drizzle-orm/src/sqlite-core/unique-constraint.ts +++ b/drizzle-orm/src/sqlite-core/unique-constraint.ts @@ -51,10 +51,12 @@ export class UniqueConstraint { static readonly [entityKind]: string = 'SQLiteUniqueConstraint'; readonly columns: SQLiteColumn[]; - readonly name?: string; + readonly name: string; + readonly isNameExplicit: boolean; constructor(readonly table: SQLiteTable, columns: SQLiteColumn[], name?: string) { this.columns = columns; + this.isNameExplicit = !!name; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); } diff --git a/drizzle-orm/src/sqlite-proxy/session.ts b/drizzle-orm/src/sqlite-proxy/session.ts index a30d06163a..65e132575a 100644 --- a/drizzle-orm/src/sqlite-proxy/session.ts +++ b/drizzle-orm/src/sqlite-proxy/session.ts @@ -201,7 +201,6 @@ export class RemotePreparedQuery { readonly dialect: T['dialect']; } -export class Table implements SQLWrapper { +export class Table { static readonly [entityKind]: string = 'Table'; declare readonly _: TableTypeConfig; @@ -105,8 +105,6 @@ export class Table implements SQLWrapper { this[TableSchema] = schema; this[BaseName] = baseName; } - - getSQL = undefined as unknown as (() => SQL); } export function isTable(table: unknown): table is Table { diff --git a/drizzle-orm/src/tidb-serverless/driver.ts b/drizzle-orm/src/tidb-serverless/driver.ts index ba2e63d484..7a80c5d601 100644 --- a/drizzle-orm/src/tidb-serverless/driver.ts +++ b/drizzle-orm/src/tidb-serverless/driver.ts @@ -6,7 +6,7 @@ import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import type { TiDBServerlessPreparedQueryHKT, TiDBServerlessQueryResultHKT } from './session.ts'; import { TiDBServerlessSession } from './session.ts'; @@ -79,9 +79,9 @@ export function drizzle< TClient extends Connection = Connection, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ & ({ @@ -102,23 +102,19 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: Config | string; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: Config | string; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? connect({ - url: connection, - }) - : connect(connection!); - - return construct(instance, drizzleConfig) as any; - } + const instance = typeof connection === 'string' + ? connect({ + url: connection, + }) + : connect(connection!); - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/tracing.ts b/drizzle-orm/src/tracing.ts index 7d5fd165bc..e04067f2ed 100644 --- a/drizzle-orm/src/tracing.ts +++ b/drizzle-orm/src/tracing.ts @@ -2,7 +2,7 @@ import type { Span, Tracer } from '@opentelemetry/api'; import { iife } from '~/tracing-utils.ts'; import { npmVersion } from '~/version.ts'; -let otel: typeof import('@opentelemetry/api') | undefined; +let otel: typeof import('@opentelemetry/api') | undefined; // oxlint-disable-line let rawTracer: Tracer | undefined; // try { // otel = await import('@opentelemetry/api'); @@ -41,7 +41,7 @@ export const tracer = { } catch (e) { span.setStatus({ code: otel.SpanStatusCode.ERROR, - message: e instanceof Error ? e.message : 'Unknown error', // eslint-disable-line no-instanceof/no-instanceof + message: e instanceof Error ? e.message : 'Unknown error', // oxlint-disable-line drizzle-internal/no-instanceof }); throw e; } finally { diff --git a/drizzle-orm/src/tursodatabase/database.ts b/drizzle-orm/src/tursodatabase/database.ts index dad8283a60..15a0df8b4b 100644 --- a/drizzle-orm/src/tursodatabase/database.ts +++ b/drizzle-orm/src/tursodatabase/database.ts @@ -1,6 +1,6 @@ import { Database } from '@tursodatabase/database'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type TursoDatabaseDatabase } from './driver-core.ts'; export type DatabaseOpts = (Database extends { new(path: string, opts: infer D): any } ? D : any) & { @@ -13,9 +13,9 @@ export function drizzle< TClient extends Database = Database, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -36,21 +36,17 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: DatabaseOpts; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: DatabaseOpts; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? new Database(connection) - : new Database(connection.path, connection); + const instance = typeof connection === 'string' + ? new Database(connection) + : new Database(connection.path, connection); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/tursodatabase/session.ts b/drizzle-orm/src/tursodatabase/session.ts index 35b11d181c..886af849f9 100644 --- a/drizzle-orm/src/tursodatabase/session.ts +++ b/drizzle-orm/src/tursodatabase/session.ts @@ -238,8 +238,6 @@ export class TursoDatabasePreparedQuery< private isRqbV2Query?: TIsRqbV2, ) { super('async', executeMethod, query, cache, queryMetadata, cacheConfig); - this.customResultMapper = customResultMapper; - this.fields = fields; } async run(placeholderValues?: Record): Promise { diff --git a/drizzle-orm/src/tursodatabase/wasm.ts b/drizzle-orm/src/tursodatabase/wasm.ts index 9f593dc4d6..7227d9a507 100644 --- a/drizzle-orm/src/tursodatabase/wasm.ts +++ b/drizzle-orm/src/tursodatabase/wasm.ts @@ -1,6 +1,6 @@ import { Database } from '@tursodatabase/database-wasm'; import type { AnyRelations, EmptyRelations } from '~/relations.ts'; -import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { DrizzleConfig } from '~/utils.ts'; import { construct, type TursoDatabaseDatabase } from './driver-core.ts'; export type DatabaseOpts = (Database extends { new(path: string, opts: infer D): any } ? D : any) & { @@ -13,9 +13,9 @@ export function drizzle< TClient extends Database = Database, >( ...params: [ - TClient | string, + string, ] | [ - TClient | string, + string, DrizzleConfig, ] | [ ( @@ -36,21 +36,17 @@ export function drizzle< return construct(instance, params[1]) as any; } - if (isConfig(params[0])) { - const { connection, client, ...drizzleConfig } = params[0] as - & { connection?: DatabaseOpts; client?: TClient } - & DrizzleConfig; + const { connection, client, ...drizzleConfig } = params[0] as + & { connection?: DatabaseOpts; client?: TClient } + & DrizzleConfig; - if (client) return construct(client, drizzleConfig) as any; + if (client) return construct(client, drizzleConfig) as any; - const instance = typeof connection === 'string' - ? new Database(connection) - : new Database(connection.path, connection); + const instance = typeof connection === 'string' + ? new Database(connection) + : new Database(connection.path, connection); - return construct(instance, drizzleConfig) as any; - } - - return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; + return construct(instance, drizzleConfig) as any; } export namespace drizzle { diff --git a/drizzle-orm/src/vercel-postgres/session.ts b/drizzle-orm/src/vercel-postgres/session.ts index 81535f6967..6004b323de 100644 --- a/drizzle-orm/src/vercel-postgres/session.ts +++ b/drizzle-orm/src/vercel-postgres/session.ts @@ -303,7 +303,7 @@ export class VercelPgSession< transaction: (tx: VercelPgTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { - const session = this.client instanceof VercelPool // eslint-disable-line no-instanceof/no-instanceof + const session = this.client instanceof VercelPool // oxlint-disable-line drizzle-internal/no-instanceof ? new VercelPgSession(await this.client.connect(), this.dialect, this.relations, this.schema, this.options) : this; const tx = new VercelPgTransaction( @@ -321,7 +321,7 @@ export class VercelPgSession< await tx.execute(sql`rollback`); throw error; } finally { - if (this.client instanceof VercelPool) { // eslint-disable-line no-instanceof/no-instanceof + if (this.client instanceof VercelPool) { // oxlint-disable-line drizzle-internal/no-instanceof (session.client as VercelPoolClient).release(); } } diff --git a/drizzle-orm/src/version.ts b/drizzle-orm/src/version.ts index 2dd5cc3e76..2a6804c350 100644 --- a/drizzle-orm/src/version.ts +++ b/drizzle-orm/src/version.ts @@ -1,4 +1,5 @@ // @ts-ignore - imported using Rollup json plugin export { version as npmVersion } from '../package.json'; // In version 7, we changed the PostgreSQL indexes API -export const compatibilityVersion = 11; +// In version 12, we changed the migration folder structure and migrate function +export const compatibilityVersion = 12; diff --git a/drizzle-orm/tests/casing/cockroach-to-camel.test.ts b/drizzle-orm/tests/casing/cockroach-to-camel.test.ts new file mode 100644 index 0000000000..757bb8b2d5 --- /dev/null +++ b/drizzle-orm/tests/casing/cockroach-to-camel.test.ts @@ -0,0 +1,309 @@ +import { beforeEach, describe, it } from 'vitest'; +import { relations } from '~/_relations'; +import { drizzle } from '~/cockroach'; +import { alias, boolean, cockroachSchema, cockroachTable, int4, text, union } from '~/cockroach-core'; +import { asc, eq, sql } from '~/sql'; + +const testSchema = cockroachSchema('test'); +const users = cockroachTable('users', { + id: int4().primaryKey().generatedByDefaultAsIdentity(), + first_name: text().notNull(), + last_name: text().notNull(), + // Test that custom aliases remain + age: int4('AGE'), +}); +const usersRelations = relations(users, ({ one }) => ({ + developers: one(developers), +})); +const developers = testSchema.table('developers', { + user_id: int4().primaryKey().generatedByDefaultAsIdentity().references(() => users.id), + uses_drizzle_orm: boolean().notNull(), +}); +const developersRelations = relations(developers, ({ one }) => ({ + user: one(users, { + fields: [developers.user_id], + references: [users.id], + }), +})); +const devs = alias(developers, 'devs'); +const schema = { users, usersRelations, developers, developersRelations }; + +const db = drizzle.mock({ schema, casing: 'camelCase' }); + +const usersCache = { + 'public.users.id': 'id', + 'public.users.first_name': 'firstName', + 'public.users.last_name': 'lastName', + 'public.users.AGE': 'age', +}; +const developersCache = { + 'test.developers.user_id': 'userId', + 'test.developers.uses_drizzle_orm': 'usesDrizzleOrm', +}; +const cache = { + ...usersCache, + ...developersCache, +}; + +const fullName = sql`${users.first_name} || ' ' || ${users.last_name}`.as('name'); + +describe('cockroach to camel case', () => { + beforeEach(() => { + db.dialect.casing.clearCache(); + }); + + it('select', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age }) + .from(users) + .leftJoin(developers, eq(users.id, developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" from "users" left join "test"."developers" on "users"."id" = "test"."developers"."userId" order by "users"."firstName" asc', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('select (with alias)', ({ expect }) => { + const query = db + .select({ first_name: users.first_name }) + .from(users) + .leftJoin(devs, eq(users.id, devs.user_id)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."firstName" from "users" left join "test"."developers" "devs" on "users"."id" = "devs"."userId"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('with CTE', ({ expect }) => { + const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('with CTE (with query builder)', ({ expect }) => { + const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator', ({ expect }) => { + const query = db + .select({ first_name: users.first_name }) + .from(users) + .union(db.select({ first_name: users.first_name }).from(users)); + + expect(query.toSQL()).toEqual({ + sql: '(select "firstName" from "users") union (select "firstName" from "users")', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator (function)', ({ expect }) => { + const query = union( + db.select({ first_name: users.first_name }).from(users), + db.select({ first_name: users.first_name }).from(users), + ); + + expect(query.toSQL()).toEqual({ + sql: '(select "firstName" from "users") union (select "firstName" from "users")', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('query (find first)', ({ expect }) => { + const query = db._query.users.findFirst({ + columns: { + id: true, + age: true, + }, + extras: { + fullName, + }, + where: eq(users.id, 1), + with: { + developers: { + columns: { + uses_drizzle_orm: true, + }, + }, + }, + }); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."id", "users"."AGE", "users"."firstName" || \' \' || "users"."lastName" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."usesDrizzleOrm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."userId" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2 limit $3', + params: [1, 1, 1], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('query (find many)', ({ expect }) => { + const query = db._query.users.findMany({ + columns: { + id: true, + age: true, + }, + extras: { + fullName, + }, + where: eq(users.id, 1), + with: { + developers: { + columns: { + uses_drizzle_orm: true, + }, + }, + }, + }); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."id", "users"."AGE", "users"."firstName" || \' \' || "users"."lastName" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."usesDrizzleOrm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."userId" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2', + params: [1, 1], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('insert (on conflict do nothing)', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }) + .onConflictDoNothing({ target: users.first_name }) + .returning({ first_name: users.first_name, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("firstName") do nothing returning "firstName", "AGE"', + params: ['John', 'Doe', 30], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('insert (on conflict do update)', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.first_name, set: { age: 31 } }) + .returning({ first_name: users.first_name, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("firstName") do update set "AGE" = $4 returning "firstName", "AGE"', + params: ['John', 'Doe', 30, 31], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('update', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ first_name: users.first_name, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "firstName" = $1, "lastName" = $2, "AGE" = $3 where "users"."id" = $4 returning "firstName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('delete', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ first_name: users.first_name, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "firstName", "AGE"', + params: [1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "test"."developers" on "userId" = "test"."developers"."userId" order by "users"."firstName" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.first_name.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.first_name, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("userFirstName") do update set "AGE" = $4 returning "firstName", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "firstName" = $1, "lastName" = $2, "AGE" = $3 where "users"."id" = $4 returning "firstName" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "firstName", "AGE" as "usersAge"', + params: [1], + }); + }); +}); diff --git a/drizzle-orm/tests/casing/cockroach-to-snake.test.ts b/drizzle-orm/tests/casing/cockroach-to-snake.test.ts new file mode 100644 index 0000000000..b136697ad8 --- /dev/null +++ b/drizzle-orm/tests/casing/cockroach-to-snake.test.ts @@ -0,0 +1,311 @@ +import { beforeEach, describe, it } from 'vitest'; +import { relations } from '~/_relations'; +import { drizzle } from '~/cockroach'; +import { alias, boolean, cockroachSchema, cockroachTable, int4, text, union } from '~/cockroach-core'; +import { asc, eq, sql } from '~/sql'; + +const testSchema = cockroachSchema('test'); +const users = cockroachTable('users', { + id: int4().primaryKey().generatedByDefaultAsIdentity(), + firstName: text().notNull(), + lastName: text().notNull(), + // Test that custom aliases remain + age: int4('AGE'), +}); +const usersRelations = relations(users, ({ one }) => ({ + developers: one(developers), +})); +const developers = testSchema.table('developers', { + userId: int4().primaryKey().generatedByDefaultAsIdentity().references(() => users.id), + usesDrizzleORM: boolean().notNull(), +}); +const developersRelations = relations(developers, ({ one }) => ({ + user: one(users, { + fields: [developers.userId], + references: [users.id], + }), +})); +const devs = alias(developers, 'devs'); +const schema = { users, usersRelations, developers, developersRelations }; + +const db = drizzle.mock({ schema, casing: 'snake_case' }); + +const usersCache = { + 'public.users.id': 'id', + 'public.users.firstName': 'first_name', + 'public.users.lastName': 'last_name', + 'public.users.AGE': 'age', +}; +const developersCache = { + 'test.developers.userId': 'user_id', + 'test.developers.usesDrizzleORM': 'uses_drizzle_orm', +}; +const cache = { + ...usersCache, + ...developersCache, +}; + +const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); + +describe('cockroach to snake case', () => { + beforeEach(() => { + db.dialect.casing.clearCache(); + }); + + it('select', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age }) + .from(users) + .leftJoin(developers, eq(users.id, developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" from "users" left join "test"."developers" on "users"."id" = "test"."developers"."user_id" order by "users"."first_name" asc', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('select (with alias)', ({ expect }) => { + const query = db + .select({ firstName: users.firstName }) + .from(users) + .leftJoin(devs, eq(users.id, devs.userId)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."first_name" from "users" left join "test"."developers" "devs" on "users"."id" = "devs"."user_id"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('with CTE', ({ expect }) => { + const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: + 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('with CTE (with query builder)', ({ expect }) => { + const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: + 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator', ({ expect }) => { + const query = db + .select({ firstName: users.firstName }) + .from(users) + .union(db.select({ firstName: users.firstName }).from(users)); + + expect(query.toSQL()).toEqual({ + sql: '(select "first_name" from "users") union (select "first_name" from "users")', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator (function)', ({ expect }) => { + const query = union( + db.select({ firstName: users.firstName }).from(users), + db.select({ firstName: users.firstName }).from(users), + ); + + expect(query.toSQL()).toEqual({ + sql: '(select "first_name" from "users") union (select "first_name" from "users")', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('query (find first)', ({ expect }) => { + const query = db._query.users.findFirst({ + columns: { + id: true, + age: true, + }, + extras: { + fullName, + }, + where: eq(users.id, 1), + with: { + developers: { + columns: { + usesDrizzleORM: true, + }, + }, + }, + }); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."id", "users"."AGE", "users"."first_name" || \' \' || "users"."last_name" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."uses_drizzle_orm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2 limit $3', + params: [1, 1, 1], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('query (find many)', ({ expect }) => { + const query = db._query.users.findMany({ + columns: { + id: true, + age: true, + }, + extras: { + fullName, + }, + where: eq(users.id, 1), + with: { + developers: { + columns: { + usesDrizzleORM: true, + }, + }, + }, + }); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."id", "users"."AGE", "users"."first_name" || \' \' || "users"."last_name" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."uses_drizzle_orm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2', + params: [1, 1], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('insert (on conflict do nothing)', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }) + .onConflictDoNothing({ target: users.firstName }) + .returning({ firstName: users.firstName, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("first_name") do nothing returning "first_name", "AGE"', + params: ['John', 'Doe', 30], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('insert (on conflict do update)', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.firstName, set: { age: 31 } }) + .returning({ firstName: users.firstName, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("first_name") do update set "AGE" = $4 returning "first_name", "AGE"', + params: ['John', 'Doe', 30, 31], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('update', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "first_name" = $1, "last_name" = $2, "AGE" = $3 where "users"."id" = $4 returning "first_name", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('delete', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName, age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "first_name", "AGE"', + params: [1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "test"."developers" on "userId" = "test"."developers"."user_id" order by "users"."first_name" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.firstName.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.firstName, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("userFirstName") do update set "AGE" = $4 returning "first_name", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "first_name" = $1, "last_name" = $2, "AGE" = $3 where "users"."id" = $4 returning "first_name" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "first_name", "AGE" as "usersAge"', + params: [1], + }); + }); +}); diff --git a/drizzle-orm/tests/casing/mssql-to-camel.test.ts b/drizzle-orm/tests/casing/mssql-to-camel.test.ts new file mode 100644 index 0000000000..88bda35ea9 --- /dev/null +++ b/drizzle-orm/tests/casing/mssql-to-camel.test.ts @@ -0,0 +1,260 @@ +import mssql from 'mssql'; +import { beforeEach, describe, it } from 'vitest'; +import { relations } from '~/_relations'; +import { alias, bit, int, mssqlSchema, mssqlTable, text, union } from '~/mssql-core'; +import { drizzle } from '~/node-mssql'; +import { asc, eq, sql } from '~/sql'; + +const testSchema = mssqlSchema('test'); +const users = mssqlTable('users', { + // TODO: Investigate reasons for existence of next commented line + // id: int().primaryKey().identity(1, 1), + id: int().primaryKey().identity({ + seed: 1, + increment: 1, + }), + first_name: text().notNull(), + last_name: text().notNull(), + // Test that custom aliases remain + age: int('AGE'), +}); +const usersRelations = relations(users, ({ one }) => ({ + developers: one(developers), +})); +const developers = testSchema.table('developers', { + // TODO: Investigate reasons for existence of next commented line + // user_id: int().primaryKey().primaryKey().references('name1', () => users.id), + user_id: int().primaryKey().primaryKey().references(() => users.id), + uses_drizzle_orm: bit().notNull(), +}); +const developersRelations = relations(developers, ({ one }) => ({ + user: one(users, { + fields: [developers.user_id], + references: [users.id], + }), +})); +const devs = alias(developers, 'devs'); +const schema = { users, usersRelations, developers, developersRelations }; + +const db = drizzle({ client: new mssql.ConnectionPool({ server: '' }), schema, casing: 'camelCase' }); + +const usersCache = { + 'public.users.id': 'id', + 'public.users.first_name': 'firstName', + 'public.users.last_name': 'lastName', + 'public.users.AGE': 'age', +}; +const developersCache = { + 'test.developers.user_id': 'userId', + 'test.developers.uses_drizzle_orm': 'usesDrizzleOrm', +}; +const cache = { + ...usersCache, + ...developersCache, +}; + +const fullName = sql`${users.first_name} || ' ' || ${users.last_name}`.as('name'); + +describe('mssql to camel case', () => { + beforeEach(() => { + db.dialect.casing.clearCache(); + }); + + it('select', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age }) + .from(users) + .leftJoin(developers, eq(users.id, developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + "select [users].[firstName] || ' ' || [users].[lastName] as [name], [users].[AGE] from [users] left join [test].[developers] on [users].[id] = [test].[developers].[userId] order by [users].[firstName] asc", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('select #2', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age }) + .from(users) + .leftJoin(developers, eq(users.id, developers.user_id)) + .where(eq(users.id, 15)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + "select [users].[firstName] || ' ' || [users].[lastName] as [name], [users].[AGE] from [users] left join [test].[developers] on [users].[id] = [test].[developers].[userId] where [users].[id] = @par0 order by [users].[firstName] asc", + params: [15], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('select (with alias)', ({ expect }) => { + const query = db + .select({ firstName: users.first_name }) + .from(users) + .leftJoin(devs, eq(users.id, devs.user_id)); + + expect(query.toSQL()).toEqual({ + sql: + 'select [users].[firstName] from [users] left join [test].[developers] [devs] on [users].[id] = [devs].[userId]', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('with CTE', ({ expect }) => { + const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: "with [cte] as (select [firstName] || ' ' || [lastName] as [name] from [users]) select [name] from [cte]", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('with CTE (with query builder)', ({ expect }) => { + const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: "with [cte] as (select [firstName] || ' ' || [lastName] as [name] from [users]) select [name] from [cte]", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator', ({ expect }) => { + const query = db + .select({ firstName: users.first_name }) + .from(users) + .union(db.select({ firstName: users.first_name }).from(users)); + + expect(query.toSQL()).toEqual({ + sql: '(select [firstName] from [users]) union (select [firstName] from [users])', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator (function)', ({ expect }) => { + const query = union( + db.select({ firstName: users.first_name }).from(users), + db.select({ firstName: users.first_name }).from(users), + ); + + expect(query.toSQL()).toEqual({ + sql: '(select [firstName] from [users]) union (select [firstName] from [users])', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('insert', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }); + + expect(query.toSQL()).toEqual({ + sql: 'insert into [users] ([firstName], [lastName], [AGE]) values (@par0, @par1, @par2)', + params: ['John', 'Doe', 30], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('update', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'update [users] set [firstName] = @par0, [lastName] = @par1, [AGE] = @par2 where [users].[id] = @par3', + params: ['John', 'Doe', 30, 1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('delete', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'delete from [users] where [users].[id] = @par0', + params: [1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select [AGE] as [ageOfUser], [id] as [userId] from [users] order by [userId] asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + "select [users].[firstName] || ' ' || [users].[lastName] as [name], [users].[AGE] as [ageOfUser], [users].[id] as [userId] from [users] left join [test].[developers] on [userId] = [test].[developers].[userId] order by [users].[firstName] asc", + params: [], + }); + }); + + it('insert output as', ({ expect }) => { + const query = db + .insert(users) + .output({ firstName: users.first_name, age: users.age.as('userAge') }) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into [users] ([firstName], [lastName], [AGE]) output INSERTED.[firstName], INSERTED.[AGE] as [userAge] values (@par0, @par1, @par2)', + params: ['John', 'Doe', 30], + }); + }); + + it('update output as', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .output({ + inserted: { firstName: users.first_name.as('usersNameIn'), age: users.age.as('ageIn') }, + deleted: { firstName: users.first_name.as('usersNameOut'), age: users.age.as('ageOut') }, + }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: + 'update [users] set [firstName] = @par0, [lastName] = @par1, [AGE] = @par2 output INSERTED.[firstName] as [usersNameIn], INSERTED.[AGE] as [ageIn], DELETED.[firstName] as [usersNameOut], DELETED.[AGE] as [ageOut] where [users].[id] = @par3', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete output as', ({ expect }) => { + const query = db + .delete(users) + .output({ firstName: users.first_name, age: users.age.as('usersAge') }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'delete from [users] output DELETED.[firstName], DELETED.[AGE] as [usersAge] where [users].[id] = @par0', + params: [1], + }); + }); +}); diff --git a/drizzle-orm/tests/casing/mssql-to-snake.test.ts b/drizzle-orm/tests/casing/mssql-to-snake.test.ts new file mode 100644 index 0000000000..abe0e36470 --- /dev/null +++ b/drizzle-orm/tests/casing/mssql-to-snake.test.ts @@ -0,0 +1,244 @@ +import mssql from 'mssql'; +import { beforeEach, describe, it } from 'vitest'; +import { relations } from '~/_relations'; +import { alias, bit, int, mssqlSchema, mssqlTable, text, union } from '~/mssql-core'; +import { drizzle } from '~/node-mssql'; +import { asc, eq, sql } from '~/sql'; + +const testSchema = mssqlSchema('test'); +const users = mssqlTable('users', { + // TODO: Investigate reasons for existence of next commented line + // id: int().primaryKey().identity(1, 1), + id: int().primaryKey().identity({ + seed: 1, + increment: 1, + }), + firstName: text().notNull(), + lastName: text().notNull(), + // Test that custom aliases remain + age: int('AGE'), +}); +const usersRelations = relations(users, ({ one }) => ({ + developers: one(developers), +})); +const developers = testSchema.table('developers', { + // TODO: Investigate reasons for existence of next commented line + // userId: int().primaryKey().references('name1', () => users.id), + userId: int().primaryKey().references(() => users.id), + usesDrizzleORM: bit().notNull(), +}); +const developersRelations = relations(developers, ({ one }) => ({ + user: one(users, { + fields: [developers.userId], + references: [users.id], + }), +})); +const devs = alias(developers, 'devs'); +const schema = { users, usersRelations, developers, developersRelations }; + +const db = drizzle({ client: new mssql.ConnectionPool({ server: '' }), schema, casing: 'snake_case' }); + +const usersCache = { + 'public.users.id': 'id', + 'public.users.firstName': 'first_name', + 'public.users.lastName': 'last_name', + 'public.users.AGE': 'age', +}; +const developersCache = { + 'test.developers.userId': 'user_id', + 'test.developers.usesDrizzleORM': 'uses_drizzle_orm', +}; +const cache = { + ...usersCache, + ...developersCache, +}; + +const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); + +describe('mssql to snake case', () => { + beforeEach(() => { + db.dialect.casing.clearCache(); + }); + + it('select', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age }) + .from(users) + .leftJoin(developers, eq(users.id, developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + "select [users].[first_name] || ' ' || [users].[last_name] as [name], [users].[AGE] from [users] left join [test].[developers] on [users].[id] = [test].[developers].[user_id] order by [users].[first_name] asc", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('select (with alias)', ({ expect }) => { + const query = db + .select({ firstName: users.firstName }) + .from(users) + .leftJoin(devs, eq(users.id, devs.userId)); + + expect(query.toSQL()).toEqual({ + sql: + 'select [users].[first_name] from [users] left join [test].[developers] [devs] on [users].[id] = [devs].[user_id]', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(cache); + }); + + it('with CTE', ({ expect }) => { + const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: "with [cte] as (select [first_name] || ' ' || [last_name] as [name] from [users]) select [name] from [cte]", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('with CTE (with query builder)', ({ expect }) => { + const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); + const query = db.with(cte).select().from(cte); + + expect(query.toSQL()).toEqual({ + sql: "with [cte] as (select [first_name] || ' ' || [last_name] as [name] from [users]) select [name] from [cte]", + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator', ({ expect }) => { + const query = db + .select({ firstName: users.firstName }) + .from(users) + .union(db.select({ firstName: users.firstName }).from(users)); + + expect(query.toSQL()).toEqual({ + sql: '(select [first_name] from [users]) union (select [first_name] from [users])', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('set operator (function)', ({ expect }) => { + const query = union( + db.select({ firstName: users.firstName }).from(users), + db.select({ firstName: users.firstName }).from(users), + ); + + expect(query.toSQL()).toEqual({ + sql: '(select [first_name] from [users]) union (select [first_name] from [users])', + params: [], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('insert', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }); + + expect(query.toSQL()).toEqual({ + sql: 'insert into [users] ([first_name], [last_name], [AGE]) values (@par0, @par1, @par2)', + params: ['John', 'Doe', 30], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('update', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'update [users] set [first_name] = @par0, [last_name] = @par1, [AGE] = @par2 where [users].[id] = @par3', + params: ['John', 'Doe', 30, 1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('delete', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'delete from [users] where [users].[id] = @par0', + params: [1], + }); + expect(db.dialect.casing.cache).toEqual(usersCache); + }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select [AGE] as [ageOfUser], [id] as [userId] from [users] order by [userId] asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + "select [users].[first_name] || ' ' || [users].[last_name] as [name], [users].[AGE] as [ageOfUser], [users].[id] as [userId] from [users] left join [test].[developers] on [userId] = [test].[developers].[user_id] order by [users].[first_name] asc", + params: [], + }); + }); + + it('insert output as', ({ expect }) => { + const query = db + .insert(users) + .output({ firstName: users.firstName, age: users.age.as('userAge') }) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into [users] ([first_name], [last_name], [AGE]) output INSERTED.[first_name], INSERTED.[AGE] as [userAge] values (@par0, @par1, @par2)', + params: ['John', 'Doe', 30], + }); + }); + + it('update output as', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .output({ + inserted: { firstName: users.firstName.as('usersNameIn'), age: users.age.as('ageIn') }, + deleted: { firstName: users.firstName.as('usersNameOut'), age: users.age.as('ageOut') }, + }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: + 'update [users] set [first_name] = @par0, [last_name] = @par1, [AGE] = @par2 output INSERTED.[first_name] as [usersNameIn], INSERTED.[AGE] as [ageIn], DELETED.[first_name] as [usersNameOut], DELETED.[AGE] as [ageOut] where [users].[id] = @par3', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete output as', ({ expect }) => { + const query = db + .delete(users) + .output({ firstName: users.firstName, age: users.age.as('usersAge') }) + .where(eq(users.id, 1)); + + expect(query.toSQL()).toEqual({ + sql: 'delete from [users] output DELETED.[first_name], DELETED.[AGE] as [usersAge] where [users].[id] = @par0', + params: [1], + }); + }); +}); diff --git a/drizzle-orm/tests/casing/mysql-to-camel.test.ts b/drizzle-orm/tests/casing/mysql-to-camel.test.ts index 36fb633db6..353eac91a1 100644 --- a/drizzle-orm/tests/casing/mysql-to-camel.test.ts +++ b/drizzle-orm/tests/casing/mysql-to-camel.test.ts @@ -31,8 +31,8 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = mysql(connect({}), { schema, casing: 'camelCase' }); -const ps = planetscale(new Client({}), { schema, casing: 'camelCase' }); +const db = mysql({ client: connect({}), schema, casing: 'camelCase' }); +const ps = planetscale({ client: new Client({}), schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', @@ -296,4 +296,30 @@ describe('mysql to snake case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select `AGE` as `ageOfUser`, `id` as `userId` from `users` order by `userId` asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + "select `users`.`firstName` || ' ' || `users`.`lastName` as `name`, `users`.`AGE` as `ageOfUser`, `users`.`id` as `userId` from `users` left join `test`.`developers` on `userId` = `test`.`developers`.`userId` order by `users`.`firstName` asc", + params: [], + }); + }); }); diff --git a/drizzle-orm/tests/casing/mysql-to-snake.test.ts b/drizzle-orm/tests/casing/mysql-to-snake.test.ts index fece67f0b9..6878a4a711 100644 --- a/drizzle-orm/tests/casing/mysql-to-snake.test.ts +++ b/drizzle-orm/tests/casing/mysql-to-snake.test.ts @@ -31,8 +31,8 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = mysql(connect({}), { schema, casing: 'snake_case' }); -const ps = planetscale(new Client({}), { schema, casing: 'snake_case' }); +const db = mysql({ client: connect({}), schema, casing: 'snake_case' }); +const ps = planetscale({ client: new Client({}), schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', @@ -296,4 +296,30 @@ describe('mysql to snake case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select `AGE` as `ageOfUser`, `id` as `userId` from `users` order by `userId` asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + "select `users`.`first_name` || ' ' || `users`.`last_name` as `name`, `users`.`AGE` as `ageOfUser`, `users`.`id` as `userId` from `users` left join `test`.`developers` on `userId` = `test`.`developers`.`user_id` order by `users`.`first_name` asc", + params: [], + }); + }); }); diff --git a/drizzle-orm/tests/casing/pg-to-camel.test.ts b/drizzle-orm/tests/casing/pg-to-camel.test.ts index 5a760210fd..8a6baaad4f 100644 --- a/drizzle-orm/tests/casing/pg-to-camel.test.ts +++ b/drizzle-orm/tests/casing/pg-to-camel.test.ts @@ -29,7 +29,7 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = drizzle(postgres(''), { schema, casing: 'camelCase' }); +const db = drizzle({ client: postgres(''), schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', @@ -243,4 +243,70 @@ describe('postgres to camel case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "test"."developers" on "userId" = "test"."developers"."userId" order by "users"."firstName" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.first_name.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.first_name, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("userFirstName") do update set "AGE" = $4 returning "firstName", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "firstName" = $1, "lastName" = $2, "AGE" = $3 where "users"."id" = $4 returning "firstName" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "firstName", "AGE" as "usersAge"', + params: [1], + }); + }); }); diff --git a/drizzle-orm/tests/casing/pg-to-snake.test.ts b/drizzle-orm/tests/casing/pg-to-snake.test.ts index 729c634107..cba5b1b881 100644 --- a/drizzle-orm/tests/casing/pg-to-snake.test.ts +++ b/drizzle-orm/tests/casing/pg-to-snake.test.ts @@ -29,7 +29,7 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = drizzle(postgres(''), { schema, casing: 'snake_case' }); +const db = drizzle({ client: postgres(''), schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', @@ -245,4 +245,70 @@ describe('postgres to snake case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "test"."developers" on "userId" = "test"."developers"."user_id" order by "users"."first_name" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.firstName.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.firstName, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("userFirstName") do update set "AGE" = $4 returning "first_name", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "first_name" = $1, "last_name" = $2, "AGE" = $3 where "users"."id" = $4 returning "first_name" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = $1 returning "first_name", "AGE" as "usersAge"', + params: [1], + }); + }); }); diff --git a/drizzle-orm/tests/casing/sqlite-to-camel.test.ts b/drizzle-orm/tests/casing/sqlite-to-camel.test.ts index 3a57a77c36..b2ffc4b652 100644 --- a/drizzle-orm/tests/casing/sqlite-to-camel.test.ts +++ b/drizzle-orm/tests/casing/sqlite-to-camel.test.ts @@ -28,7 +28,7 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = drizzle(new Database(':memory:'), { schema, casing: 'camelCase' }); +const db = drizzle({ client: new Database(':memory:'), schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', @@ -241,4 +241,70 @@ describe('sqlite to camel case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.user_id)) + .orderBy(asc(users.first_name)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "developers" on "userId" = "developers"."userId" order by "users"."firstName" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ first_name: 'John', last_name: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.first_name.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.first_name, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "firstName", "lastName", "AGE") values (null, ?, ?, ?) on conflict ("userFirstName") do update set "AGE" = ? returning "firstName", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ first_name: 'John', last_name: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "firstName" = ?, "lastName" = ?, "AGE" = ? where "users"."id" = ? returning "firstName" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.first_name, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = ? returning "firstName", "AGE" as "usersAge"', + params: [1], + }); + }); }); diff --git a/drizzle-orm/tests/casing/sqlite-to-snake.test.ts b/drizzle-orm/tests/casing/sqlite-to-snake.test.ts index 1d5b570f7e..0c41dcd29f 100644 --- a/drizzle-orm/tests/casing/sqlite-to-snake.test.ts +++ b/drizzle-orm/tests/casing/sqlite-to-snake.test.ts @@ -28,7 +28,7 @@ const developersRelations = relations(developers, ({ one }) => ({ const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; -const db = drizzle(new Database(':memory:'), { schema, casing: 'snake_case' }); +const db = drizzle({ client: new Database(':memory:'), schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', @@ -47,7 +47,7 @@ const cache = { const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); -describe('sqlite to camel case', () => { +describe('sqlite to snake case', () => { beforeEach(() => { db.dialect.casing.clearCache(); }); @@ -243,4 +243,70 @@ describe('sqlite to camel case', () => { }); expect(db.dialect.casing.cache).toEqual(usersCache); }); + + it('select columns as', ({ expect }) => { + const query = db + .select({ age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .orderBy(asc(users.id.as('userId'))); + + expect(query.toSQL()).toEqual({ + sql: 'select "AGE" as "ageOfUser", "id" as "userId" from "users" order by "userId" asc', + params: [], + }); + }); + + it('select join columns as', ({ expect }) => { + const query = db + .select({ name: fullName, age: users.age.as('ageOfUser'), id: users.id.as('userId') }) + .from(users) + .leftJoin(developers, eq(users.id.as('userId'), developers.userId)) + .orderBy(asc(users.firstName)); + + expect(query.toSQL()).toEqual({ + sql: + 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" as "ageOfUser", "users"."id" as "userId" from "users" left join "developers" on "userId" = "developers"."user_id" order by "users"."first_name" asc', + params: [], + }); + }); + + it('insert (on conflict do update) returning as', ({ expect }) => { + const query = db + .insert(users) + .values({ firstName: 'John', lastName: 'Doe', age: 30 }) + .onConflictDoUpdate({ target: users.firstName.as('userFirstName'), set: { age: 31 } }) + .returning({ firstName: users.firstName, age: users.age.as('userAge') }); + + expect(query.toSQL()).toEqual({ + sql: + 'insert into "users" ("id", "first_name", "last_name", "AGE") values (null, ?, ?, ?) on conflict ("userFirstName") do update set "AGE" = ? returning "first_name", "AGE" as "userAge"', + params: ['John', 'Doe', 30, 31], + }); + }); + + it('update returning as', ({ expect }) => { + const query = db + .update(users) + .set({ firstName: 'John', lastName: 'Doe', age: 30 }) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName.as('usersName'), age: users.age }); + + expect(query.toSQL()).toEqual({ + sql: + 'update "users" set "first_name" = ?, "last_name" = ?, "AGE" = ? where "users"."id" = ? returning "first_name" as "usersName", "AGE"', + params: ['John', 'Doe', 30, 1], + }); + }); + + it('delete returning as', ({ expect }) => { + const query = db + .delete(users) + .where(eq(users.id, 1)) + .returning({ firstName: users.firstName, age: users.age.as('usersAge') }); + + expect(query.toSQL()).toEqual({ + sql: 'delete from "users" where "users"."id" = ? returning "first_name", "AGE" as "usersAge"', + params: [1], + }); + }); }); diff --git a/drizzle-orm/tests/relation.test.ts b/drizzle-orm/tests/relation.test.ts index 16853a1289..19b5fa1c2a 100644 --- a/drizzle-orm/tests/relation.test.ts +++ b/drizzle-orm/tests/relation.test.ts @@ -14,20 +14,18 @@ test('tables with same name in different schemas', () => { }, }; - const relationalSchema = { - ...Object.fromEntries( - Object.entries(schema) - .flatMap(([key, val]) => { - // have unique keys across schemas + const relationalSchema = Object.fromEntries( + Object.entries(schema) + .flatMap(([key, val]) => { + // have unique keys across schemas - const mappedTableEntries = Object.entries(val).map((tableEntry) => { - return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; - }); + const mappedTableEntries = Object.entries(val).map((tableEntry) => { + return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; + }); - return mappedTableEntries; - }), - ), - }; + return mappedTableEntries; + }), + ); const relationsConfig = extractTablesRelationalConfig( relationalSchema, diff --git a/drizzle-orm/tests/type-hints.test.ts b/drizzle-orm/tests/type-hints.test.ts index 40b40b9586..904e8c92f0 100644 --- a/drizzle-orm/tests/type-hints.test.ts +++ b/drizzle-orm/tests/type-hints.test.ts @@ -6,7 +6,8 @@ import { drizzle } from '~/aws-data-api/pg'; import { customType, json, PgDialect, pgTable, text, timestamp, uuid, varchar } from '~/pg-core'; import { sql } from '~/sql/sql'; -const db = drizzle(new RDSDataClient(), { +const db = drizzle({ + client: new RDSDataClient(), database: '', resourceArn: '', secretArn: '', diff --git a/drizzle-orm/tsconfig.json b/drizzle-orm/tsconfig.json index 3177a915f9..42af07e2ed 100644 --- a/drizzle-orm/tsconfig.json +++ b/drizzle-orm/tsconfig.json @@ -7,11 +7,8 @@ }, "declaration": true, "outDir": "dist", - "noEmit": true + "noEmit": true, + "allowImportingTsExtensions": true }, - "include": [ - "src", - "scripts", - "types-bench.ts" - ] + "include": ["src", "scripts", "types-bench.ts"] } diff --git a/drizzle-orm/type-tests/cockroach/1-to-1-fk.ts b/drizzle-orm/type-tests/cockroach/1-to-1-fk.ts new file mode 100644 index 0000000000..8e5248865a --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/1-to-1-fk.ts @@ -0,0 +1,28 @@ +import { type CockroachColumn, int4 } from '~/cockroach-core/columns/index.ts'; +import { cockroachTable } from '~/cockroach-core/table.ts'; + +{ + const test1 = cockroachTable('test1_table', { + id: int4('id').primaryKey(), + test2Id: int4('test2_id').references(() => test2.id), + }); + + const test1Id = int4('test1_id').references(() => test1.id); + + const test2 = cockroachTable('test2_table', { + id: int4('id').primaryKey(), + test1Id, + }); +} + +{ + const test1 = cockroachTable('test1_table', { + id: int4('id').primaryKey(), + test2Id: int4('test2_id').references((): CockroachColumn => test2.id), + }); + + const test2 = cockroachTable('test2_table', { + id: int4('id').primaryKey(), + test1Id: int4('test1_id').references(() => test1.id), + }); +} diff --git a/drizzle-orm/type-tests/cockroach/array.ts b/drizzle-orm/type-tests/cockroach/array.ts new file mode 100644 index 0000000000..9bc171084a --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/array.ts @@ -0,0 +1,36 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { cockroachTable, int4 } from '~/cockroach-core/index.ts'; +import type { Column } from '~/column.ts'; + +{ + const table = cockroachTable('table', { + a: int4('a').array().notNull(), + }); + Expect< + Equal< + // @ts-ignore - TODO: Remake type checks for new columns + Column< + { + name: 'a'; + tableName: 'table'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: false; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }, + {}, + {} + >, + typeof table['a']['_']['baseColumn'] + > + >; +} diff --git a/drizzle-orm/type-tests/cockroach/count.ts b/drizzle-orm/type-tests/cockroach/count.ts new file mode 100644 index 0000000000..70c6f696af --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/count.ts @@ -0,0 +1,61 @@ +import { Expect } from 'type-tests/utils.ts'; +import { cockroachTable, int4, text } from '~/cockroach-core/index.ts'; +import { and, gt, ne } from '~/sql/expressions/index.ts'; +import type { Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = cockroachTable('names', { + id: int4('id').primaryKey(), + name: text('name'), + authorId: int4('author_id'), +}); + +const separate = await db.$count(names); + +const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); + +const embedded = await db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: db.$count(names).as('count1'), + }) + .from(names); + +const embeddedFilters = await db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), + }) + .from(names); + +Expect>; + +Expect>; + +Expect< + Equal< + { + id: number; + name: string | null; + authorId: number | null; + count1: number; + }[], + typeof embedded + > +>; + +Expect< + Equal< + { + id: number; + name: string | null; + authorId: number | null; + count1: number; + }[], + typeof embeddedFilters + > +>; diff --git a/drizzle-orm/type-tests/cockroach/db-rel.ts b/drizzle-orm/type-tests/cockroach/db-rel.ts new file mode 100644 index 0000000000..66b90be6d1 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/db-rel.ts @@ -0,0 +1,122 @@ +import pg from 'pg'; +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { drizzle } from '~/cockroach/index.ts'; +import { sql } from '~/sql/sql.ts'; +import * as schema from './tables-rel.ts'; + +const { Pool } = pg; + +const pdb = new Pool({ connectionString: process.env['COCKROACH_CONNECTION_STRING'] }); +const db = drizzle({ client: pdb, schema }); + +{ + const result = await db._query.users.findMany({ + where: (users, { sql }) => sql`char_length(${users.name} > 1)`, + limit: sql.placeholder('l'), + orderBy: (users, { asc, desc }) => [asc(users.name), desc(users.id)], + with: { + posts: { + where: (posts, { sql }) => sql`char_length(${posts.title} > 1)`, + limit: sql.placeholder('l'), + columns: { + id: false, + title: undefined, + }, + with: { + author: true, + comments: { + where: (comments, { sql }) => sql`char_length(${comments.text} > 1)`, + limit: sql.placeholder('l'), + columns: { + text: true, + }, + with: { + author: { + columns: { + id: undefined, + }, + with: { + city: { + with: { + users: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }); + + Expect< + Equal<{ + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + posts: { + title: string; + authorId: number | null; + comments: { + text: string; + author: { + city: { + id: number; + name: string; + users: { + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + }[]; + }; + } | null; + }[]; + author: { + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + } | null; + }[]; + }[], typeof result> + >; +} + +{ + const result = await db._query.users.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + authorId: true, + }, + extras: { + lower: sql`lower(${schema.posts.title})`.as('lower_name'), + }, + }, + }, + }); + + Expect< + Equal< + { + id: number; + name: string; + posts: { + authorId: number | null; + lower: string; + }[]; + }[], + typeof result + > + >; +} diff --git a/drizzle-orm/type-tests/cockroach/db.ts b/drizzle-orm/type-tests/cockroach/db.ts new file mode 100644 index 0000000000..e75d78a57f --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/db.ts @@ -0,0 +1,6 @@ +import pg from 'pg'; +import { drizzle } from '~/cockroach/index.ts'; + +const { Client } = pg; + +export const db = drizzle({ client: new Client() }); diff --git a/drizzle-orm/type-tests/cockroach/delete.ts b/drizzle-orm/type-tests/cockroach/delete.ts new file mode 100644 index 0000000000..36509ff6b5 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/delete.ts @@ -0,0 +1,78 @@ +import type { QueryResult } from 'pg'; +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import type { CockroachDelete } from '~/cockroach-core/index.ts'; +import { eq } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const deleteAll = await db.delete(users); +Expect, typeof deleteAll>>; + +const deleteAllStmt = db.delete(users).prepare('deleteAllStmt'); +const deleteAllPrepared = await deleteAllStmt.execute(); +Expect, typeof deleteAllPrepared>>; + +const deleteWhere = await db.delete(users).where(eq(users.id, 1)); +Expect, typeof deleteWhere>>; + +const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare('deleteWhereStmt'); +const deleteWherePrepared = await deleteWhereStmt.execute(); +Expect, typeof deleteWherePrepared>>; + +const deleteReturningAll = await db.delete(users).returning(); +Expect>; + +const deleteReturningAllStmt = db.delete(users).returning().prepare('deleteReturningAllStmt'); +const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); +Expect>; + +const deleteReturningPartial = await db.delete(users).returning({ + myId: users.id, + myHomeCity: users.homeCity, +}); +Expect>; + +const deleteReturningPartialStmt = db.delete(users).returning({ + myId: users.id, + myHomeCity: users.homeCity, +}).prepare('deleteReturningPartialStmt'); +const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``).returning(); + } + + const qbBase = db.delete(users).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + function withReturning(qb: T) { + return qb.returning(); + } + + const qbBase = db.delete(users).$dynamic(); + const qb = withReturning(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .delete(users) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); + + db + .delete(users) + .returning() + // @ts-expect-error method was already called + .returning(); +} diff --git a/drizzle-orm/type-tests/cockroach/generated-columns.ts b/drizzle-orm/type-tests/cockroach/generated-columns.ts new file mode 100644 index 0000000000..9450494a97 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/generated-columns.ts @@ -0,0 +1,220 @@ +import { type Equal, Expect } from 'type-tests/utils'; +import { cockroachTable, int4, text, varchar } from '~/cockroach-core'; +import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; +import { drizzle } from '~/node-postgres'; +import { db } from './db'; + +const users = cockroachTable( + 'users', + { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + firstName: varchar('first_name', { length: 255 }), + lastName: varchar('last_name', { length: 255 }), + email: text('email').notNull(), + fullName: text('full_name').generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`).notNull(), + upperName: text('upper_name').generatedAlwaysAs( + sql` case when first_name is null then null else upper(first_name) end `, + ), + }, +); +{ + type User = typeof users.$inferSelect; + type NewUser = typeof users.$inferInsert; + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string; + upperName: string | null; + }, + User + > + >(); + + Expect< + Equal< + { + email: string; + firstName?: string | null | undefined; + lastName?: string | null | undefined; + }, + NewUser + > + >(); +} + +{ + type User = InferSelectModel; + type NewUser = InferInsertModel; + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string; + upperName: string | null; + }, + User + > + >(); + + Expect< + Equal< + { + email: string; + firstName?: string | null | undefined; + lastName?: string | null | undefined; + }, + NewUser + > + >(); +} + +{ + const dbUsers = await db.select().from(users); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string; + upperName: string | null; + }[], + typeof dbUsers + > + >(); +} + +{ + const db = drizzle({} as any, { schema: { users } }); + + const dbUser = await db._query.users.findFirst(); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string; + upperName: string | null; + } | undefined, + typeof dbUser + > + >(); +} + +{ + const db = drizzle({} as any, { schema: { users } }); + + const dbUser = await db._query.users.findMany(); + + Expect< + Equal< + { + id: number; + firstName: string | null; + lastName: string | null; + email: string; + fullName: string; + upperName: string | null; + }[], + typeof dbUser + > + >(); +} + +{ + // @ts-expect-error - Can't use the fullName because it's a generated column + await db.insert(users).values({ + firstName: 'test', + lastName: 'test', + email: 'test', + fullName: 'test', + }); +} + +{ + await db.update(users).set({ + firstName: 'test', + lastName: 'test', + email: 'test', + // @ts-expect-error - Can't use the fullName because it's a generated column + fullName: 'test', + }); +} + +const users2 = cockroachTable( + 'users', + { + id: int4('id').generatedByDefaultAsIdentity(), + id2: int4('id').generatedAlwaysAsIdentity(), + }, +); + +{ + type User = typeof users2.$inferSelect; + type NewUser = typeof users2.$inferInsert; + + Expect< + Equal< + { + id: number; + id2: number; + }, + User + > + >(); + + Expect< + Equal< + { + id?: number | undefined; + }, + NewUser + > + >(); +} + +const usersSeq = cockroachTable( + 'users', + { + id: int4('id').generatedByDefaultAsIdentity(), + id2: int4('id').generatedAlwaysAsIdentity(), + }, +); + +{ + type User = typeof usersSeq.$inferSelect; + type NewUser = typeof usersSeq.$inferInsert; + + Expect< + Equal< + { + id: number; + id2: number; + }, + User + > + >(); + + Expect< + Equal< + { + id?: number | undefined; + }, + NewUser + > + >(); +} diff --git a/drizzle-orm/type-tests/cockroach/insert.ts b/drizzle-orm/type-tests/cockroach/insert.ts new file mode 100644 index 0000000000..30c99c28cc --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/insert.ts @@ -0,0 +1,295 @@ +import type { QueryResult } from 'pg'; +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { bool, cockroachTable, int4, QueryBuilder, text } from '~/cockroach-core/index.ts'; +import type { CockroachInsert } from '~/cockroach-core/query-builders/insert.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { identityColumnsTable, users } from './tables.ts'; + +const insert = await db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }); +Expect, typeof insert>>; + +const insertStmt = db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }) + .prepare('insertStmt'); +const insertPrepared = await insertStmt.execute(); +Expect, typeof insertPrepared>>; + +const insertSql = await db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, + arrayCol: [''], +}); +Expect, typeof insertSql>>; + +const insertSqlStmt = db + .insert(users) + .values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, + arrayCol: [''], + }) + .prepare('insertSqlStmt'); +const insertSqlPrepared = await insertSqlStmt.execute(); +Expect, typeof insertSqlPrepared>>; + +const insertReturning = await db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }) + .returning(); +Expect>; + +const insertReturningStmt = db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }) + .returning() + .prepare('insertReturningStmt'); +const insertReturningPrepared = await insertReturningStmt.execute(); +Expect>; + +const insertReturningPartial = await db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }) + .returning({ + id: users.id, + homeCity: users.homeCity, + mySubclass: users.subClass, + }); +Expect< + Equal<{ + id: number; + homeCity: number; + mySubclass: 'B' | 'D' | null; + }[], typeof insertReturningPartial> +>; + +const insertReturningPartialStmt = db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', + arrayCol: [''], + }) + .returning({ + id: users.id, + homeCity: users.homeCity, + mySubclass: users.subClass, + }) + .prepare('insertReturningPartialStmt'); +const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); +Expect< + Equal<{ + id: number; + homeCity: number; + mySubclass: 'B' | 'D' | null; + }[], typeof insertReturningPartialPrepared> +>; + +const insertReturningSql = await db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', + arrayCol: [''], + }) + .returning({ + id: users.id, + homeCity: users.homeCity, + subclassLower: sql`lower(${users.subClass})`, + classLower: sql`lower(${users.class})`, + }); +Expect< + Equal<{ + id: number; + homeCity: number; + subclassLower: unknown; + classLower: string; + }[], typeof insertReturningSql> +>; + +const insertReturningSqlStmt = db + .insert(users) + .values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', + arrayCol: [''], + }) + .returning({ + id: users.id, + homeCity: users.homeCity, + subclassLower: sql`lower(${users.subClass})`, + classLower: sql`lower(${users.class})`, + }) + .prepare('insertReturningSqlStmt'); +const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); +Expect< + Equal<{ + id: number; + homeCity: number; + subclassLower: unknown; + classLower: string; + }[], typeof insertReturningSqlPrepared> +>; + +{ + function dynamic(qb: T) { + return qb.returning().onConflictDoNothing().onConflictDoUpdate({ set: {}, target: users.id, where: sql`` }); + } + + const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + function withReturning(qb: T) { + return qb.returning(); + } + + const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }).$dynamic(); + const qb = withReturning(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .insert(users) + .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }) + .returning() + // @ts-expect-error method was already called + .returning(); +} + +{ + const users1 = cockroachTable('users1', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name').notNull(), + admin: bool('admin').notNull().default(false), + }); + const users2 = cockroachTable('users2', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + firstName: text('first_name').notNull(), + lastName: text('last_name').notNull(), + admin: bool('admin').notNull().default(false), + phoneNumber: text('phone_number'), + }); + + const qb = new QueryBuilder(); + + db.insert(users1).select(sql`select * from users1`); + db.insert(users1).select(() => sql`select * from users1`); + + db + .insert(users1) + .select( + qb.select({ + name: users2.firstName, + admin: users2.admin, + }).from(users2), + ); + + db + .insert(users1) + .select( + qb.select({ + name: users2.firstName, + admin: users2.admin, + }).from(users2).where(sql``), + ); + + db + .insert(users2) + .select( + qb.select({ + firstName: users2.firstName, + lastName: users2.lastName, + admin: users2.admin, + }).from(users2), + ); + + db + .insert(users1) + .select( + qb.select({ + name: sql`${users2.firstName} || ' ' || ${users2.lastName}`.as('name'), + admin: users2.admin, + }).from(users2), + ); + + db + .insert(users1) + .select( + // @ts-expect-error name is undefined + qb.select({ admin: users1.admin }).from(users1), + ); + + db.insert(users1).select(db.select().from(users1)); + db.insert(users1).select(() => db.select().from(users1)); + db.insert(users1).select((qb) => qb.select().from(users1)); + // @ts-expect-error tables have different keys + db.insert(users1).select(db.select().from(users2)); + // @ts-expect-error tables have different keys + db.insert(users1).select(() => db.select().from(users2)); +} + +{ + db.insert(identityColumnsTable).values([ + { byDefaultAsIdentity: 4, name: 'fdf' }, + ]); + + // @ts-expect-error + db.insert(identityColumnsTable).values([ + { alwaysAsIdentity: 2 }, + ]); + + // @ts-expect-error + db.insert(identityColumnsTable).values([ + { generatedCol: 2 }, + ]); +} diff --git a/drizzle-orm/type-tests/cockroach/no-strict-null-checks/test.ts b/drizzle-orm/type-tests/cockroach/no-strict-null-checks/test.ts new file mode 100644 index 0000000000..8f701aeebf --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/no-strict-null-checks/test.ts @@ -0,0 +1,106 @@ +import { drizzle } from '~/cockroach'; +import { cockroachTable, int4, text } from '~/cockroach-core'; + +export const test = cockroachTable( + 'test', + { + id: text('id') + .primaryKey() + .generatedAlwaysAs('genstr'), + intId: int4('int_id') + .primaryKey() + .generatedAlwaysAsIdentity(), + int2Id: int4('int2_id').generatedByDefaultAsIdentity(), + name: text('name').$defaultFn(() => '' as string), + title: text('title').notNull(), + description: text('description'), + dbdef: text('dbdef').default('dbdefval'), + }, +); + +const db = drizzle.mock(); + +db.update(test) + .set({ + // @ts-expect-error + id: '1', + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', + }); + +db.update(test) + .set({ + // @ts-expect-error + intId: 1, + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', + }); + +db.update(test) + .set({ + int2Id: 1, + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', + }); + +db.update(test) + .set({ + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', + }); + +db.insert(test).values({ + // @ts-expect-error + id: '1', + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', +}); + +db.insert(test).values({ + // @ts-expect-error + intId: 1, + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', +}); + +db.insert(test).values({ + int2Id: 1, + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', +}); + +db.insert(test).values({ + name: 'name', + title: 'title', + description: 'desc', + dbdef: 'upddef', +}); + +db.insert(test).values({ + title: 'title', + description: 'desc', + dbdef: 'upddef', +}); + +db.insert(test).values({ + title: 'title', + description: 'desc', +}); + +db.insert(test).values({ + title: 'title', +}); diff --git a/drizzle-orm/type-tests/cockroach/no-strict-null-checks/tsconfig.json b/drizzle-orm/type-tests/cockroach/no-strict-null-checks/tsconfig.json new file mode 100644 index 0000000000..6d5a4b7c0c --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/no-strict-null-checks/tsconfig.json @@ -0,0 +1,10 @@ +{ + "extends": "../../tsconfig.json", + "compilerOptions": { + "noEmit": true, + "strictNullChecks": false, + "strictPropertyInitialization": false, + "exactOptionalPropertyTypes": false + }, + "include": ["./test.ts"] +} diff --git a/drizzle-orm/type-tests/cockroach/other.ts b/drizzle-orm/type-tests/cockroach/other.ts new file mode 100644 index 0000000000..82d6b5d8da --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/other.ts @@ -0,0 +1,16 @@ +import type { QueryResult } from 'pg'; +import { eq, inArray } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; + +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const rawQuery = await db.execute( + sql`select ${users.id}, ${users.class} from ${users} where ${inArray(users.id, [1, 2, 3])} and ${ + eq(users.class, 'A') + }`, +); + +Expect>, typeof rawQuery>>; diff --git a/drizzle-orm/type-tests/cockroach/select.ts b/drizzle-orm/type-tests/cockroach/select.ts new file mode 100644 index 0000000000..448dc89b10 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/select.ts @@ -0,0 +1,1457 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; + +import { alias } from '~/cockroach-core/alias.ts'; +import { + bool, + cockroachMaterializedView, + type CockroachSelect, + type CockroachSelectQueryBuilder, + cockroachTable, + cockroachView, + int4, + QueryBuilder, + text, +} from '~/cockroach-core/index.ts'; +import { + and, + arrayContained, + arrayContains, + arrayOverlaps, + between, + eq, + exists, + gt, + gte, + ilike, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + not, + notBetween, + notExists, + notIlike, + notInArray, + notLike, + or, +} from '~/sql/expressions/index.ts'; +import { type InferSelectViewModel, type SQL, sql } from '~/sql/sql.ts'; + +import { db } from './db.ts'; +import { cities, classes, newYorkers, newYorkers2, users } from './tables.ts'; + +const city = alias(cities, 'city'); +const city1 = alias(cities, 'city1'); + +const leftJoinFull = await db.select().from(users).leftJoin(city, eq(users.id, city.id)); + +Expect< + Equal< + { + users_table: typeof users.$inferSelect; + city: typeof cities.$inferSelect | null; + }[], + typeof leftJoinFull + > +>; + +const rightJoinFull = await db.select().from(users).rightJoin(city, eq(users.id, city.id)); + +Expect< + Equal< + { + users_table: typeof users.$inferSelect | null; + city: typeof city.$inferSelect; + }[], + typeof rightJoinFull + > +>; + +const innerJoinFull = await db.select().from(users).innerJoin(city, eq(users.id, city.id)); + +Expect< + Equal< + { + users_table: typeof users.$inferSelect; + city: typeof city.$inferSelect; + }[], + typeof innerJoinFull + > +>; + +const fullJoinFull = await db.select().from(users).fullJoin(city, eq(users.id, city.id)); + +Expect< + Equal< + { + users_table: typeof users.$inferSelect | null; + city: typeof city.$inferSelect | null; + }[], + typeof fullJoinFull + > +>; + +const crossJoinFull = await db.select().from(users).crossJoin(city); + +Expect< + Equal< + { + users_table: typeof users.$inferSelect; + city: typeof city.$inferSelect; + }[], + typeof crossJoinFull + > +>; + +const leftJoinFlat = await db + .select({ + userId: users.id, + userText: users.text, + cityId: city.id, + cityName: city.name, + }) + .from(users) + .leftJoin(city, eq(users.id, city.id)); + +Expect< + Equal<{ + userId: number; + userText: string | null; + cityId: number | null; + cityName: string | null; + }[], typeof leftJoinFlat> +>; + +const rightJoinFlat = await db + .select({ + userId: users.id, + userText: users.text, + cityId: city.id, + cityName: city.name, + }) + .from(users) + .rightJoin(city, eq(users.id, city.id)); + +Expect< + Equal<{ + userId: number | null; + userText: string | null; + cityId: number; + cityName: string; + }[], typeof rightJoinFlat> +>; + +const innerJoinFlat = await db + .select({ + userId: users.id, + userText: users.text, + cityId: city.id, + cityName: city.name, + }) + .from(users) + .innerJoin(city, eq(users.id, city.id)); + +Expect< + Equal<{ + userId: number; + userText: string | null; + cityId: number; + cityName: string; + }[], typeof innerJoinFlat> +>; + +const fullJoinFlat = await db + .select({ + userId: users.id, + userText: users.text, + cityId: city.id, + cityName: city.name, + }) + .from(users) + .fullJoin(city, eq(users.id, city.id)); + +Expect< + Equal<{ + userId: number | null; + userText: string | null; + cityId: number | null; + cityName: string | null; + }[], typeof fullJoinFlat> +>; + +const crossJoinFlat = await db + .select({ + userId: users.id, + userText: users.text, + cityId: city.id, + cityName: city.name, + }) + .from(users) + .crossJoin(city); + +Expect< + Equal<{ + userId: number; + userText: string | null; + cityId: number; + cityName: string; + }[], typeof crossJoinFlat> +>; + +const leftJoinMixed = await db + .select({ + id: users.id, + text: users.text, + textUpper: sql`upper(${users.text})`, + idComplex: sql`${users.id}::text || ${city.id}::text`, + city: { + id: city.id, + name: city.name, + }, + }) + .from(users) + .leftJoin(city, eq(users.id, city.id)); + +Expect< + Equal< + { + id: number; + text: string | null; + textUpper: string | null; + idComplex: string | null; + city: { + id: number; + name: string; + } | null; + }[], + typeof leftJoinMixed + > +>; + +const leftJoinMixed2 = await db + .select({ + id: users.id, + text: users.text, + foo: { + bar: users.uuid, + baz: cities.id, + }, + }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)); + +Expect< + Equal< + { + id: number; + text: string | null; + foo: { + bar: string; + baz: number | null; + }; + }[], + typeof leftJoinMixed2 + > +>; + +const join1 = await db + .select({ + user: { + id: users.id, + text: users.text, + }, + city: { + id: city.id, + name: city.name, + nameUpper: sql`upper(${city.name})`, + }, + }) + .from(users) + .leftJoin(city, eq(users.id, city.id)); + +Expect< + Equal<{ + user: { + id: number; + text: string | null; + }; + city: { + id: number; + name: string; + nameUpper: string; + } | null; + }[], typeof join1> +>; + +const join = await db + .select({ + users, + cities, + city, + city1: { + id: city1.id, + }, + }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)) + .rightJoin(city, eq(city.id, users.id)) + .rightJoin(city1, eq(city1.id, users.id)); + +Expect< + Equal< + { + users: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + } | null; + cities: { + id: number; + name: string; + population: number | null; + } | null; + city: { + id: number; + name: string; + population: number | null; + } | null; + city1: { + id: number; + }; + }[], + typeof join + > +>; + +const join2 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)); + +Expect< + Equal< + { + user: { + id: number; + } | null; + city: { + id: number; + } | null; + }[], + typeof join2 + > +>; + +const join3 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + class: { + id: classes.id, + }, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)) + .rightJoin(classes, eq(users.id, classes.id)); + +Expect< + Equal< + { + user: { + id: number; + } | null; + city: { + id: number; + } | null; + class: { + id: number; + }; + }[], + typeof join3 + > +>; + +db.select() + .from(users) + .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); + +function mapFunkyFuncResult(valueFromDriver: unknown) { + return { + foo: (valueFromDriver as Record)['foo'], + }; +} + +const age = 1; + +const allOperators = await db + .select({ + col2: sql`5 - ${users.id} + 1`, // unknown + col3: sql`${users.id} + 1`, // number + col33: sql`${users.id} + 1`.mapWith(users.id), // number + col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number + col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number + col5: sql`true`, // unknown + col6: sql`true`, // boolean + col7: sql`random()`, // number + col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } + col9: sql`greatest(${users.createdAt}, ${sql.param(new Date(), users.createdAt)})`, // unknown + col10: sql`date_or_false(${users.createdAt}, ${ + sql.param( + new Date(), + users.createdAt, + ) + })`, // Date | boolean + col11: sql`${users.age1} + ${age}`, // unknown + col12: sql`${users.age1} + ${sql.param(age, users.age1)}`, // unknown + col13: sql`lower(${users.class})`, // unknown + col14: sql`length(${users.class})`, // number + count: sql`count(*)::int`, // number + }) + .from(users) + .where( + and( + eq(users.id, 1), + ne(users.id, 1), + or(eq(users.id, 1), ne(users.id, 1)), + not(eq(users.id, 1)), + gt(users.id, 1), + gte(users.id, 1), + lt(users.id, 1), + lte(users.id, 1), + inArray(users.id, [1, 2, 3]), + inArray(users.id, db.select({ id: users.id }).from(users)), + inArray(users.id, sql`select id from ${users}`), + notInArray(users.id, [1, 2, 3]), + notInArray(users.id, db.select({ id: users.id }).from(users)), + notInArray(users.id, sql`select id from ${users}`), + isNull(users.subClass), + isNotNull(users.id), + exists(db.select({ id: users.id }).from(users)), + exists(sql`select id from ${users}`), + notExists(db.select({ id: users.id }).from(users)), + notExists(sql`select id from ${users}`), + between(users.id, 1, 2), + notBetween(users.id, 1, 2), + like(users.id, '%1%'), + notLike(users.id, '%1%'), + ilike(users.id, '%1%'), + notIlike(users.id, '%1%'), + arrayContains(users.arrayCol, ['abc']), + arrayContains(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), + arrayContains(users.arrayCol, sql`select array_col from ${users}`), + arrayContained(users.arrayCol, ['abc']), + arrayContained(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), + arrayContained(users.arrayCol, sql`select array_col from ${users}`), + arrayOverlaps(users.arrayCol, ['abc']), + arrayOverlaps(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), + arrayOverlaps(users.arrayCol, sql`select array_col from ${users}`), + ), + ); + +Expect< + Equal<{ + col2: unknown; + col3: number; + col33: number; + col34: { foo: any }; + col4: string | number; + col5: unknown; + col6: boolean; + col7: number; + col8: { + foo: any; + }; + col9: unknown; + col10: boolean | Date; + col11: unknown; + col12: unknown; + col13: unknown; + col14: number; + count: number; + }[], typeof allOperators> +>; + +const textSelect = await db + .select({ + t: users.text, + }) + .from(users); + +Expect>; + +const homeCity = alias(cities, 'homeCity'); +const c = alias(classes, 'c'); +const otherClass = alias(classes, 'otherClass'); +const anotherClass = alias(classes, 'anotherClass'); +const friend = alias(users, 'friend'); +const currentCity = alias(cities, 'currentCity'); +const subscriber = alias(users, 'subscriber'); +const closestCity = alias(cities, 'closestCity'); +const closestCity2 = alias(cities, 'closestCity2'); +const closestCity3 = alias(cities, 'closestCity3'); +const closestCity4 = alias(cities, 'closestCity4'); +const closestCity5 = alias(cities, 'closestCity5'); +const closestCity6 = alias(cities, 'closestCity6'); +const closestCity7 = alias(cities, 'closestCity7'); + +const megaJoin = await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .innerJoin(c, eq(c.id, users.class)) + .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .innerJoin(friend, sql`${users.id} = ${friend.id}`) + .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .limit(1) + .offset(1); + +Expect< + Equal< + { + user: { + id: number; + maxAge: unknown; + }; + city: { + id: number; + }; + homeCity: { + id: number; + name: string; + population: number | null; + }; + c: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + otherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + anotherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + }; + currentCity: { + id: number; + name: string; + population: number | null; + }; + subscriber: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + }; + closestCity: { + id: number; + name: string; + population: number | null; + }; + }[], + typeof megaJoin + > +>; + +const megaLeftJoin = await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + closestCity2, + closestCity3, + closestCity4, + closestCity5, + closestCity6, + closestCity7, + }) + .from(users) + .leftJoin(cities, sql`${users.id} = ${cities.id}`) + .leftJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .leftJoin(c, eq(c.id, users.class)) + .leftJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .leftJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .leftJoin(friend, sql`${users.id} = ${friend.id}`) + .leftJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .leftJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .leftJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) + .leftJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .limit(1) + .offset(1); + +Expect< + Equal< + { + user: { + id: number; + maxAge: unknown; + }; + city: { + id: number; + } | null; + homeCity: { + id: number; + name: string; + population: number | null; + } | null; + c: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + } | null; + otherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + } | null; + anotherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + } | null; + friend: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + } | null; + currentCity: { + id: number; + name: string; + population: number | null; + } | null; + subscriber: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + } | null; + closestCity: { + id: number; + name: string; + population: number | null; + } | null; + closestCity2: { + id: number; + name: string; + population: number | null; + } | null; + closestCity3: { + id: number; + name: string; + population: number | null; + } | null; + closestCity4: { + id: number; + name: string; + population: number | null; + } | null; + closestCity5: { + id: number; + name: string; + population: number | null; + } | null; + closestCity6: { + id: number; + name: string; + population: number | null; + } | null; + closestCity7: { + id: number; + name: string; + population: number | null; + } | null; + }[], + typeof megaLeftJoin + > +>; + +await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + closestCity2, + closestCity3, + closestCity4, + closestCity5, + closestCity6, + closestCity7, + }) + .from(users) + .fullJoin(cities, sql`${users.id} = ${cities.id}`) + .fullJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .fullJoin(c, eq(c.id, users.class)) + .fullJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .fullJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .fullJoin(friend, sql`${users.id} = ${friend.id}`) + .fullJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .fullJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .fullJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) + .fullJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .limit(1) + .offset(1); + +const friends = alias(users, 'friends'); + +const join4 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + class: classes, + friend: friends, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(classes, sql`${cities.id} = ${classes.id}`) + .innerJoin(friends, sql`${friends.id} = ${users.id}`) + .where(sql`${users.age1} > 0`); + +Expect< + Equal<{ + user: { + id: number; + }; + city: { + id: number; + }; + class: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + }; + }[], typeof join4> +>; + +{ + const authenticated = false as boolean; + + const result = await db + .select({ + id: users.id, + ...(authenticated ? { city: users.homeCity } : {}), + }) + .from(users); + + Expect< + Equal< + { + id: number; + city?: number; + }[], + typeof result + > + >; +} + +await db + .select() + .from(users) + .for('update'); + +await db + .select() + .from(users) + .for('no key update', { of: users }); + +await db + .select() + .from(users) + .for('no key update', { of: users, skipLocked: true }); + +await db + .select() + .from(users) + .for('share', { of: users, noWait: true }); + +await db + .select() + .from(users) + // @ts-expect-error - can't use both skipLocked and noWait + .for('share', { of: users, noWait: true, skipLocked: true }); + +await db + .select({ + id: cities.id, + name: sql`upper(${cities.name})`.as('name'), + usersCount: sql`count(${users.id})`.as('users'), + }) + .from(cities) + .leftJoin(users, eq(users.homeCity, cities.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(cities.id) + .having(({ usersCount }) => sql`${usersCount} > 0`); + +{ + const result = await db.select().from(newYorkers); + Expect< + Equal< + { + userId: number; + cityId: number | null; + }[], + typeof result + > + >; +} + +{ + const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); + Expect< + Equal< + { + userId: number; + }[], + typeof result + > + >; +} + +{ + const result = await db.select().from(newYorkers2); + Expect< + Equal< + { + userId: number; + cityId: number | null; + }[], + typeof result + > + >; +} + +{ + const result = await db.select({ userId: newYorkers.userId }).from(newYorkers2); + Expect< + Equal< + { + userId: number; + }[], + typeof result + > + >; +} + +{ + db + .select() + .from(users) + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + .limit(10) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const qb = db.select().from(users).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +{ + // TODO: add to docs + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); + } + + const query = new QueryBuilder().select().from(users).$dynamic(); + dynamic(query); +} + +{ + // TODO: add to docs + function paginated(qb: T, page: number) { + return qb.limit(10).offset((page - 1) * 10); + } + + const qb = db.select().from(users).$dynamic(); + const result = await paginated(qb, 1); + + Expect>; +} + +{ + db + .select() + .from(users) + .where(sql``) + .limit(10) + // @ts-expect-error method was already called + .where(sql``); + + db + .select() + .from(users) + .having(sql``) + .limit(10) + // @ts-expect-error method was already called + .having(sql``); + + db + .select() + .from(users) + .groupBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .groupBy(sql``); + + db + .select() + .from(users) + .orderBy(sql``) + .limit(10) + // @ts-expect-error method was already called + .orderBy(sql``); + + db + .select() + .from(users) + .limit(10) + .where(sql``) + // @ts-expect-error method was already called + .limit(10); + + db + .select() + .from(users) + .offset(10) + .limit(10) + // @ts-expect-error method was already called + .offset(10); + + db + .select() + .from(users) + .for('update') + .limit(10) + // @ts-expect-error method was already called + .for('update'); +} + +{ + const users = cockroachTable('users', { + developer: bool('developer'), + application: text('application', { enum: ['pending', 'approved'] }), + }); + + const startIt = (whereCallback: (condition: SQL) => SQL | undefined = (c) => c) => { + return db.select().from(users).where(whereCallback(eq(users.developer, true))); + }; + + startIt((c) => and(c, eq(users.application, 'approved'))); +} + +{ + const school = cockroachTable('school', { + faculty: int4('faculty'), + studentid: int4('studentid'), + }); + + const student = cockroachTable('student', { + id: int4('id'), + email: text('email'), + }); + + await db + .select() + .from(school) + .where( + and( + eq(school.faculty, 2), + eq( + school.studentid, + db.select({ id: student.id }).from(student).where(eq(student.email, 'foo@demo.com')), + ), + ), + ); +} + +{ + const table1 = cockroachTable('table1', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const table2 = cockroachTable('table2', { + id: int4().primaryKey(), + age: int4().notNull(), + }); + const table3 = cockroachTable('table3', { + id: int4().primaryKey(), + phone: text().notNull(), + }); + const view = cockroachView('view').as((qb) => + qb.select({ + table: table1, + column: table2.age, + nested: { + column: table3.phone, + }, + }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) + ); + const result = await db.select().from(view); + + Expect< + Equal + >; + Expect>; + Expect[]>>; +} + +{ + const table1 = cockroachTable('table1', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const table2 = cockroachTable('table2', { + id: int4().primaryKey(), + age: int4().notNull(), + }); + const table3 = cockroachTable('table3', { + id: int4().primaryKey(), + phone: text().notNull(), + }); + const view = cockroachMaterializedView('view').as((qb) => + qb.select({ + table: table1, + column: table2.age, + nested: { + column: table3.phone, + }, + }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) + ); + const result = await db.select().from(view); + + Expect< + Equal + >; + Expect>; + Expect[]>>; +} + +{ + const table1 = cockroachTable('table1', { + id: int4().primaryKey(), + name: text().notNull(), + }); + + const table2 = cockroachTable('table2', { + id: int4().primaryKey(), + age: int4().notNull(), + table1Id: int4().references(() => table1.id).notNull(), + }); + + const view = cockroachView('view').as((qb) => qb.select().from(table2)); + + const leftLateralRawRes = await db.select({ + table1, + sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), + }).from(table1).leftJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); + + Expect< + Equal + >; + + const leftLateralSubRes = await db.select().from(table1).leftJoinLateral( + db.select().from(table2).as('sub'), + sql`true`, + ); + + Expect< + Equal + >; + + const sqLeftLateral = db.select().from(table2).as('sub'); + + const leftLateralSubSelectionRes = await db.select( + { + id: table1.id, + sId: sqLeftLateral.id, + }, + ).from(table1).leftJoinLateral( + sqLeftLateral, + sql`true`, + ); + + Expect< + Equal + >; + + await db.select().from(table1) + // @ts-expect-error + .leftJoinLateral(table2, sql`true`); + + await db.select().from(table1) + // @ts-expect-error + .leftJoinLateral(view, sql`true`); + + const innerLateralRawRes = await db.select({ + table1, + sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), + }).from(table1).innerJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); + + Expect< + Equal + >; + + const innerLateralSubRes = await db.select().from(table1).innerJoinLateral( + db.select().from(table2).as('sub'), + sql`true`, + ); + + Expect< + Equal + >; + + const sqInnerLateral = db.select().from(table2).as('sub'); + + const innerLateralSubSelectionRes = await db.select( + { + id: table1.id, + sId: sqLeftLateral.id, + }, + ).from(table1).innerJoinLateral( + sqInnerLateral, + sql`true`, + ); + + Expect< + Equal + >; + + await db.select().from(table1) + // @ts-expect-error + .innerJoinLateral(table2, sql`true`); + + await db.select().from(table1) + // @ts-expect-error + .innerJoinLateral(view, sql`true`); + + const crossLateralRawRes = await db.select({ + table1, + sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), + }).from(table1).crossJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`); + + Expect< + Equal + >; + + const crossLateralSubRes = await db.select().from(table1).crossJoinLateral( + db.select().from(table2).as('sub'), + ); + + Expect< + Equal + >; + + const sqCrossLateral = db.select().from(table2).as('sub'); + + const crossLateralSubSelectionRes = await db.select( + { + id: table1.id, + sId: sqCrossLateral.id, + }, + ).from(table1).crossJoinLateral( + sqInnerLateral, + ); + + Expect< + Equal + >; + + await db.select().from(table1) + // @ts-expect-error + .crossJoinLateral(table2); + + await db.select().from(table1) + // @ts-expect-error + .crossJoinLateral(view); +} diff --git a/drizzle-orm/type-tests/cockroach/set-operators.ts b/drizzle-orm/type-tests/cockroach/set-operators.ts new file mode 100644 index 0000000000..c14de2ab2f --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/set-operators.ts @@ -0,0 +1,288 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { + type CockroachSetOperator, + except, + exceptAll, + intersect, + intersectAll, + union, + unionAll, +} from '~/cockroach-core/index.ts'; +import { eq } from '~/sql/expressions/index.ts'; +import { desc, sql } from '~/sql/index.ts'; +import { db } from './db.ts'; +import { cities, classes, newYorkers, users } from './tables.ts'; + +const unionTest = await db + .select({ id: users.id }) + .from(users) + .union( + db + .select({ id: users.id }) + .from(users), + ); + +Expect>; + +const unionAllTest = await db + .select({ id: users.id, age: users.age1 }) + .from(users) + .unionAll( + db.select({ id: users.id, age: users.age1 }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const intersectTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .intersect(({ intersect }) => + intersect( + db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users), + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ) + ); + +Expect>; + +const intersectAllTest = await db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .intersect( + db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const exceptTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ); + +Expect>; + +const exceptAllTest = await db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql<'A' | 'C'>`${users.class}` }) + .from(users), + ); + +Expect>; + +const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); + +Expect>; + +const unionAll2Test = await unionAll( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select().from(cities), +); + +Expect>; + +const intersect2Test = await intersect( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), +); + +Expect>; + +const intersectAll2Test = await intersectAll( + union( + db.select({ + id: cities.id, + }).from(cities), + db.select({ + id: cities.id, + }) + .from(cities).where(sql``), + ), + db.select({ + id: cities.id, + }) + .from(cities), +).orderBy(desc(cities.id)).limit(23); + +Expect>; + +const except2Test = await except( + db.select({ + userId: newYorkers.userId, + }) + .from(newYorkers), + db.select({ + userId: newYorkers.userId, + }).from(newYorkers), +); + +Expect>; + +const exceptAll2Test = await exceptAll( + db.select({ + userId: newYorkers.userId, + cityId: newYorkers.cityId, + }) + .from(newYorkers).where(sql``), + db.select({ + userId: newYorkers.userId, + cityId: newYorkers.cityId, + }).from(newYorkers).leftJoin(users, sql``), +); + +Expect>; + +const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); + +Expect< + Equal<{ + id: number; + uuid: string; + homeCity: number; + currentCity: number | null; + int4Nullable: number | null; + int4NotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + arrayCol: string[]; + }[], typeof unionfull> +>; + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +union(db.select().from(users), db.select().from(users)) + .offset(1) + // @ts-expect-error - method was already called + .offset(2); + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +{ + function dynamic(qb: T) { + return qb.orderBy(sql``).limit(1).offset(2); + } + + const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + db.select({ id: cities.id, name: cities.name }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select().from(cities), +); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), + db.select({ id: cities.id, name: cities.name }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: newYorkers.userId }).from(newYorkers), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities).where(sql``), + db.select({ id: sql`${cities.id}` }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), +); diff --git a/drizzle-orm/type-tests/cockroach/subquery.ts b/drizzle-orm/type-tests/cockroach/subquery.ts new file mode 100644 index 0000000000..31aff14a8d --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/subquery.ts @@ -0,0 +1,97 @@ +import { Expect } from 'type-tests/utils.ts'; +import { alias, cockroachTable, int4, text } from '~/cockroach-core/index.ts'; +import { and, eq } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; +import type { DrizzleTypeError, Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = cockroachTable('names', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name'), + authorId: int4('author_id'), +}); + +const n1 = db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: sql`count(1)::int`.as('count1'), + }) + .from(names) + .groupBy(names.id, names.name, names.authorId) + .as('n1'); + +const n2 = db + .select({ + id: names.id, + authorId: names.authorId, + totalCount: sql`count(1)::int`.as('totalCount'), + }) + .from(names) + .groupBy(names.id, names.authorId) + .as('n2'); + +const result = await db + .select({ + name: n1.name, + authorId: n1.authorId, + count1: n1.count1, + totalCount: n2.totalCount, + }) + .from(n1) + .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); + +Expect< + Equal< + { + name: string | null; + authorId: number | null; + count1: number; + totalCount: number; + }[], + typeof result + > +>; + +const names2 = alias(names, 'names2'); + +const sq1 = db + .select({ + id: names.id, + name: names.name, + id2: names2.id, + }) + .from(names) + .leftJoin(names2, eq(names.name, names2.name)) + .as('sq1'); + +const res = await db.select().from(sq1); + +Expect< + Equal< + { + id: number; + name: string | null; + id2: number | null; + }[], + typeof res + > +>; + +{ + const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); + Expect ? true : false>; +} + +const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); + +const resUnion = await db.select().from(sqUnion); + +Expect< + Equal<{ + id: number; + name: string | null; + authorId: number | null; + }[], typeof resUnion> +>; diff --git a/drizzle-orm/type-tests/cockroach/tables-rel.ts b/drizzle-orm/type-tests/cockroach/tables-rel.ts new file mode 100644 index 0000000000..e5bce4870c --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/tables-rel.ts @@ -0,0 +1,79 @@ +import { relations } from '~/_relations.ts'; +import { cockroachTable, foreignKey, int4, text, timestamp } from '~/cockroach-core/index.ts'; + +export const users = cockroachTable('users', { + id: int4('id').primaryKey(), + name: text('name').notNull(), + cityId: int4('city_id').references(() => cities.id).notNull(), + homeCityId: int4('home_city_id').references(() => cities.id), + createdAt: timestamp('created_at', { withTimezone: true }).notNull(), +}); +export const usersConfig = relations(users, ({ one, many }) => ({ + city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), + homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), + posts: many(posts), + comments: many(comments), +})); + +export const cities = cockroachTable('cities', { + id: int4('id').primaryKey(), + name: text('name').notNull(), +}); +export const citiesConfig = relations(cities, ({ many }) => ({ + users: many(users, { relationName: 'UsersInCity' }), +})); + +export const posts = cockroachTable('posts', { + id: int4('id').primaryKey(), + title: text('title').notNull(), + authorId: int4('author_id').references(() => users.id), +}); +export const postsConfig = relations(posts, ({ one, many }) => ({ + author: one(users, { fields: [posts.authorId], references: [users.id] }), + comments: many(comments), +})); + +export const comments = cockroachTable('comments', { + id: int4('id').primaryKey(), + postId: int4('post_id').references(() => posts.id).notNull(), + authorId: int4('author_id').references(() => users.id), + text: text('text').notNull(), +}); +export const commentsConfig = relations(comments, ({ one }) => ({ + post: one(posts, { fields: [comments.postId], references: [posts.id] }), + author: one(users, { fields: [comments.authorId], references: [users.id] }), +})); + +export const books = cockroachTable('books', { + id: int4('id').primaryKey(), + name: text('name').notNull(), +}); +export const booksConfig = relations(books, ({ many }) => ({ + authors: many(bookAuthors), +})); + +export const bookAuthors = cockroachTable('book_authors', { + bookId: int4('book_id').references(() => books.id).notNull(), + authorId: int4('author_id').references(() => users.id).notNull(), + role: text('role').notNull(), +}); +export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ + book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), + author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), +})); + +export const node = cockroachTable('node', { + id: int4('id').primaryKey(), + parentId: int4('parent_id'), + leftId: int4('left_id'), + rightId: int4('right_id'), +}, (node) => [ + foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), +]); +export const nodeRelations = relations(node, ({ one }) => ({ + parent: one(node, { fields: [node.parentId], references: [node.id] }), + left: one(node, { fields: [node.leftId], references: [node.id] }), + right: one(node, { fields: [node.rightId], references: [node.id] }), +})); diff --git a/drizzle-orm/type-tests/cockroach/tables.ts b/drizzle-orm/type-tests/cockroach/tables.ts new file mode 100644 index 0000000000..39526ef28a --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/tables.ts @@ -0,0 +1,1423 @@ +import crypto from 'node:crypto'; +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { z } from 'zod'; +import { + bigint, + bit, + bool, + char, + check, + type CockroachColumn, + cockroachEnum, + cockroachTable, + type CockroachTableWithColumns, + customType, + date, + decimal, + doublePrecision, + foreignKey, + geometry, + index, + inet, + int2, + int4, + int8, + jsonb, + numeric, + primaryKey, + real, + smallint, + string, + text, + time, + timestamp, + uniqueIndex, + uuid, + varchar, + vector, +} from '~/cockroach-core/index.ts'; +import { cockroachSchema } from '~/cockroach-core/schema.ts'; +import { + cockroachMaterializedView, + type CockroachMaterializedViewWithSelection, + cockroachView, + type CockroachViewWithSelection, +} from '~/cockroach-core/view.ts'; +import { eq, gt } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; +import type { InferInsertModel, InferSelectModel } from '~/table.ts'; +import type { Simplify } from '~/utils.ts'; +import { db } from './db.ts'; + +export const myEnum = cockroachEnum('my_enum', ['a', 'b', 'c']); + +export const identityColumnsTable = cockroachTable('identity_columns_table', { + generatedCol: int4('generated_col').generatedAlwaysAs(1), + alwaysAsIdentity: int4('always_as_identity').generatedAlwaysAsIdentity(), + byDefaultAsIdentity: int4('by_default_as_identity').generatedByDefaultAsIdentity(), + name: text('name'), +}); + +Expect, typeof identityColumnsTable['$inferSelect']>>; +Expect, typeof identityColumnsTable['$inferInsert']>>; +Expect< + Equal< + InferInsertModel, + Simplify + > +>; + +export const users = cockroachTable( + 'users_table', + { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + uuid: uuid('uuid').defaultRandom().notNull(), + homeCity: int4('home_city') + .notNull() + .references(() => cities.id), + currentCity: int4('current_city').references(() => cities.id), + int4Nullable: int4('int41'), + int4NotNull: int4('int42').generatedAlwaysAsIdentity(), + class: text('class', { enum: ['A', 'C'] }).notNull(), + subClass: text('sub_class', { enum: ['B', 'D'] }), + text: text('text'), + age1: int4('age1').notNull(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + enumCol: myEnum('enum_col').notNull(), + arrayCol: text('array_col').array().notNull(), + }, + (users) => [ + uniqueIndex('usersAge1Idx').on(users.class.asc(), sql``), + index('usersAge2Idx').on(sql``), + uniqueIndex('uniqueClass') + .using('btree', users.class.desc(), users.subClass) + .where(sql`${users.class} is not null`), + check('legalAge', sql`${users.age1} > 18`), + foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }) + .onUpdate('cascade') + .onDelete('cascade'), + foreignKey({ + columns: [users.class, users.subClass], + foreignColumns: [classes.class, classes.subClass], + }), + primaryKey({ columns: [users.age1, users.class] }), + ], +); + +Expect, typeof users['$inferSelect']>>; +Expect, typeof users['$inferInsert']>>; + +export const cities = cockroachTable('cities_table', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name').notNull(), + population: int4('population').default(0), +}, (cities) => [index().on(cities.id)]); + +export const smallintTest = cockroachTable('cities_table', { + id: smallint('id').primaryKey(), + name: text('name').notNull(), + population: int4('population').default(0), +}); + +Expect< + Equal<{ + id: number; + name: string; + population?: number | null; + }, typeof smallintTest.$inferInsert> +>; + +export const classes = cockroachTable('classes_table', { + id: int4('id').primaryKey(), + class: text('class', { enum: ['A', 'C'] }), + subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), +}); + +Expect< + Equal<{ + id: number; + class?: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }, typeof classes.$inferInsert> +>; + +export const network = cockroachTable('network_table', { + inet: inet('inet').notNull(), +}); + +Expect< + Equal<{ + inet: string; + }, typeof network.$inferSelect> +>; + +export const salEmp = cockroachTable('sal_emp', { + name: text('name').notNull(), + payByQuarter: int4('pay_by_quarter').array().notNull(), + schedule: text('schedule').array().notNull(), +}); + +export const customSchema = cockroachSchema('custom'); + +export const citiesCustom = customSchema.table('cities_table', { + id: int4('id').primaryKey(), + name: text('name').notNull(), + population: int4('population').default(0), +}, (cities) => [index().on(cities.id)]); + +export const newYorkers = cockroachView('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + +Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: true; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > +>; + +{ + const newYorkers = customSchema.view('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: true; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = cockroachView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }) + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }) + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = cockroachView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }).existing(); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', true, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }).existing(); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachViewWithSelection<'new_yorkers', true, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +export const newYorkers2 = cockroachMaterializedView('new_yorkers') + .withNoData() + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + +Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: true; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > +>; + +{ + const newYorkers2 = customSchema.materializedView('new_yorkers') + .withNoData() + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: true; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: 'always'; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > + >; +} + +{ + const newYorkers2 = cockroachMaterializedView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }) + .withNoData() + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > + >; +} + +{ + const newYorkers2 = customSchema.materializedView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }) + .withNoData() + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', false, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > + >; +} + +{ + const newYorkers2 = cockroachMaterializedView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }).existing(); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', true, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > + >; +} + +{ + const newYorkers2 = customSchema.materializedView('new_yorkers', { + userId: int4('user_id').notNull(), + cityId: int4('city_id'), + }).existing(); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + CockroachMaterializedViewWithSelection<'new_yorkers', true, { + userId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'user_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + cityId: CockroachColumn<{ + tableName: 'new_yorkers'; + name: 'city_id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + notNull: false; + hasDefault: false; + data: number; + driverParam: string | number; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers2 + > + >; +} + +await db.refreshMaterializedView(newYorkers2).concurrently(); +await db.refreshMaterializedView(newYorkers2).withNoData(); +await db.refreshMaterializedView(newYorkers2).concurrently().withNoData(); +await db.refreshMaterializedView(newYorkers2).withNoData().concurrently(); + +// await migrate(db, { +// migrationsFolder: './drizzle/cockroach', +// onMigrationError(error) { +// if (['0001_drizli_klaud', '0002_beep_boop'].includes(error.migration.name)) { +// return; +// } +// throw error; +// }, +// }); + +{ + const customTextRequired = customType<{ + data: string; + driverData: string; + config: { length: number }; + configRequired: true; + }>({ + dataType(config) { + Expect>; + return `varchar(${config.length})`; + }, + + toDriver(value) { + Expect>(); + return value; + }, + + fromDriver(value) { + Expect>(); + return value; + }, + }); + + customTextRequired('t', { length: 10 }); + customTextRequired({ length: 10 }); + // @ts-expect-error - config is required + customTextRequired('t'); + // @ts-expect-error - config is required + customTextRequired(); +} + +{ + const customTextOptional = customType<{ + data: string; + driverData: string; + config: { length: number }; + }>({ + dataType(config) { + Expect>; + return config ? `varchar(${config.length})` : `text`; + }, + + toDriver(value) { + Expect>(); + return value; + }, + + fromDriver(value) { + Expect>(); + return value; + }, + }); + + customTextOptional('t', { length: 10 }); + customTextOptional('t'); + customTextOptional({ length: 10 }); + customTextOptional(); +} + +{ + const cities1 = cockroachTable('cities_table', { + id: int4('id').primaryKey(), + name: text('name').notNull().primaryKey(), + role: text('role', { enum: ['admin', 'user'] }).default('user').notNull(), + role1: string('role1', { enum: ['admin', 'user'], length: 200 }).default('user').notNull(), + population: int4('population').default(0), + }); + const cities2 = cockroachTable('cities_table', ({ int4, text }) => ({ + id: int4('id').primaryKey(), + name: text('name').notNull().primaryKey(), + role: text('role', { enum: ['admin', 'user'] }).default('user').notNull(), + role1: string('role1', { enum: ['admin', 'user'], length: 200 }).default('user').notNull(), + population: int4('population').default(0), + })); + + type Expected = CockroachTableWithColumns<{ + name: 'cities_table'; + schema: undefined; + dialect: 'cockroach'; + columns: { + id: CockroachColumn<{ + tableName: 'cities_table'; + name: 'id'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + hasDefault: false; + notNull: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + name: CockroachColumn< + { + tableName: 'cities_table'; + name: 'name'; + dataType: 'string'; + columnType: 'CockroachString'; + data: string; + driverParam: string; + hasDefault: false; + enumValues: [string, ...string[]]; + notNull: true; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + } + >; + role: CockroachColumn< + { + tableName: 'cities_table'; + name: 'role'; + dataType: 'string'; + columnType: 'CockroachString'; + data: 'admin' | 'user'; + driverParam: string; + hasDefault: true; + enumValues: ['admin', 'user']; + notNull: true; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + } + >; + role1: CockroachColumn< + { + tableName: 'cities_table'; + name: 'role1'; + dataType: 'string'; + columnType: 'CockroachString'; + data: 'admin' | 'user'; + driverParam: string; + hasDefault: true; + enumValues: ['admin', 'user']; + notNull: true; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + } + >; + population: CockroachColumn<{ + tableName: 'cities_table'; + name: 'population'; + dataType: 'number'; + columnType: 'CockroachInteger'; + data: number; + driverParam: string | number; + notNull: false; + hasDefault: true; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + }>; + }; + }>; + + // @ts-ignore - TODO: Remake type checks for new columns + Expect>; + // @ts-ignore - TODO: Remake type checks for new columns + Expect>; +} + +{ + cockroachTable('test', { + bigint: bigint('bigint', { mode: 'bigint' }).default(BigInt(10)), + bigintNumber: bigint('bigintNumber', { mode: 'number' }), + timestamp: timestamp('timestamp').default(new Date()), + timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), + timestamp3: timestamp('timestamp3', { mode: undefined }).default(new Date()), + timestamp4: timestamp('timestamp4', { mode: 'string' }).default('2020-01-01'), + }); +} + +{ + const test = cockroachTable('test', { + col1: decimal('col1', { precision: 10, scale: 2 }).notNull().default('10.2'), + }); + Expect>; +} + +{ + const a = ['a', 'b', 'c'] as const; + const b = cockroachEnum('test', a); + z.enum(b.enumValues); +} + +{ + const b = cockroachEnum('test', ['a', 'b', 'c']); + z.enum(b.enumValues); +} + +{ + const getUsersTable = (schemaName: TSchema) => { + return cockroachSchema(schemaName).table('users', { + id: int4('id').primaryKey(), + name: text('name').notNull(), + }); + }; + + const users1 = getUsersTable('id1'); + Expect>; + + const users2 = getUsersTable('id2'); + Expect>; +} + +{ + const internalStaff = cockroachTable('internal_staff', { + userId: int4('user_id').notNull(), + }); + + const customUser = cockroachTable('custom_user', { + id: int4('id').notNull(), + }); + + const ticket = cockroachTable('ticket', { + staffId: int4('staff_id').notNull(), + }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin( + customUser, + eq(internalStaff.userId, customUser.id), + ).as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + Expect< + Equal<{ + internal_staff: { + internal_staff: { + userId: number; + }; + custom_user: { + id: number | null; + }; + } | null; + ticket: { + staffId: number; + }; + }[], typeof mainQuery> + >; +} + +{ + const newYorkers = cockroachView('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); +} + +{ + const testSchema = cockroachSchema('test'); + + const e1 = cockroachEnum('test', ['a', 'b', 'c']); + const e2 = cockroachEnum('test', ['a', 'b', 'c'] as const); + const e3 = testSchema.enum('test', ['a', 'b', 'c']); + const e4 = testSchema.enum('test', ['a', 'b', 'c'] as const); + + const test = cockroachTable('test', { + col1: char('col1', { enum: ['a', 'b', 'c'] as const }), + col2: char('col2', { enum: ['a', 'b', 'c'] }), + col3: char('col3'), + col4: e1('col4'), + col5: e2('col5'), + col6: text('col6', { enum: ['a', 'b', 'c'] as const }), + col7: text('col7', { enum: ['a', 'b', 'c'] }), + col8: text('col8'), + col9: varchar('col9', { enum: ['a', 'b', 'c'] as const }), + col10: varchar('col10', { enum: ['a', 'b', 'c'] }), + col11: varchar('col11'), + col12: e3('col4'), + col13: e4('col5'), + }); + + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +{ + const testSchema = cockroachSchema('test'); + + const e1 = cockroachEnum('test', ['a', 'b', 'c']); + const e2 = cockroachEnum('test', ['a', 'b', 'c'] as const); + const e3 = testSchema.enum('test', ['a', 'b', 'c']); + const e4 = testSchema.enum('test', ['a', 'b', 'c'] as const); + + const test = cockroachTable('test', { + col1: char('col1', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + col2: char('col2', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + col3: char('col3').generatedAlwaysAs(sql``), + col4: e1('col4').generatedAlwaysAs(sql``), + col5: e2('col5').generatedAlwaysAs(sql``), + col6: text('col6', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + col7: text('col7', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + col8: text('col8').generatedAlwaysAs(sql``), + col9: varchar('col9', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), + col10: varchar('col10', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), + col11: varchar('col11').generatedAlwaysAs(sql``), + col12: e3('col4').generatedAlwaysAs(sql``), + col13: e4('col5').generatedAlwaysAs(sql``), + }); + + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +{ + const test = cockroachTable('test', { + id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), + }); + + Expect< + Equal<{ + id?: string; + }, typeof test.$inferInsert> + >; +} + +{ + cockroachTable('test', { + id: int4('id').$default(() => 1), + id2: int4('id').$defaultFn(() => 1), + // @ts-expect-error - should be number + id3: int4('id').$default(() => '1'), + // @ts-expect-error - should be number + id4: int4('id').$defaultFn(() => '1'), + }); +} + +{ + const enum_ = cockroachEnum('enum', ['a', 'b', 'c']); + + cockroachTable('all_columns', { + enum: enum_('enum'), + enumdef: enum_('enumdef').default('a'), + sm: smallint('smallint'), // same as int2 + smdef: smallint('smallint_def').default(10), // same as int2 + int2col: int2('int2col'), + int2colDef: int2('int2col_dev').default(10), + int: int4('int4'), + intdef: int4('int4_def').default(10), + numeric: numeric('numeric'), + numeric2: numeric('numeric2', { precision: 5 }), + numeric3: numeric('numeric3', { scale: 2 }), + numeric4: numeric('numeric4', { precision: 5, scale: 2 }), + numericdef: numeric('numeridef').default('100'), + bigint: bigint('bigint', { mode: 'number' }), + bigintdef: bigint('bigintdef', { mode: 'number' }).default(100), + bool: bool('boolean'), + booldef: bool('boolean_def').default(true), + text: text('text'), + textdef: text('textdef').default('text'), + varchar: varchar('varchar'), + varchardef: varchar('varchardef').default('text'), + int4: int4('int4'), + decimal: decimal('decimal', { precision: 100, scale: 2 }), + decimaldef: decimal('decimaldef', { precision: 100, scale: 2 }).default('100.0'), + doublePrecision: doublePrecision('doublePrecision'), + doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), + real: real('real'), + realdef: real('realdef').default(100), + jsonb: jsonb('jsonb').$type<{ attr: string }>(), + jsonbdef: jsonb('jsonbdef').$type<{ attr: string }>().default({ attr: 'value' }), + time: time('time'), + time2: time('time2', { precision: 6, withTimezone: true }), + timedef: time('timedef').default('00:00:00'), + timestamp: timestamp('timestamp'), + timestamp2: timestamp('timestamp2', { precision: 6, withTimezone: true }), + timestamp3: timestamp('timestamp3', { withTimezone: true }), + timestamp4: timestamp('timestamp4', { precision: 4 }), + timestampdef: timestamp('timestampdef').default(new Date()), + date: date('date', { mode: 'date' }), + datedef: date('datedef').default('2024-01-01'), + datedefnow: date('datedefnow').defaultNow(), + }); + + cockroachTable('all_postgis_columns', { + geometry: geometry('geometry'), + geometry2: geometry('geometry2', { srid: 2, mode: 'xy' }), + geometry3: geometry('geometry3', { srid: 3, mode: 'tuple' }), + geometry4: geometry('geometry4', { mode: 'tuple' }), + geometrydef: geometry('geometrydef').default([1, 2]), + }); + + cockroachTable('all_vector_columns', { + bit: bit('bit', { length: 1 }), + bitdef: bit('bitdef', { length: 1 }).default('1'), + vector: vector('vector', { dimensions: 1 }), + vectordef: vector('vectordef', { dimensions: 1 }).default([1]), + }); +} + +{ + const keysAsColumnNames = cockroachTable('test', { + id: int4(), + name: text(), + }); + + Expect>; + Expect>; +} + +{ + const enum_ = cockroachEnum('enum', ['a', 'b', 'c']); + + cockroachTable('all_columns_without_name', { + enum: enum_(), + enumdef: enum_().default('a'), + sm: smallint(), + smdef: smallint().default(10), + int: int4(), + intdef: int4().default(10), + numeric: numeric(), + numeric2: numeric({ precision: 5 }), + numeric3: numeric({ scale: 2 }), + numeric4: numeric({ precision: 5, scale: 2 }), + numericdef: numeric().default('100'), + bigint: bigint({ mode: 'number' }), + bigintdef: bigint({ mode: 'number' }).default(100), + int8column: int8({ mode: 'number' }), + int8columndef: int8({ mode: 'number' }).default(100), + bool: bool(), + booldef: bool().default(true), + text: text(), + textdef: text().default('text'), + varchar: varchar(), + varchardef: varchar().default('text'), + int4: int4(), + decimal: decimal({ precision: 100, scale: 2 }), + decimaldef: decimal({ precision: 100, scale: 2 }).default('100.0'), + doublePrecision: doublePrecision(), + doublePrecisiondef: doublePrecision().default(100), + real: real(), + realdef: real().default(100), + jsonb: jsonb().$type<{ attr: string }>(), + jsonbdef: jsonb().$type<{ attr: string }>().default({ attr: 'value' }), + time: time(), + time2: time({ precision: 6, withTimezone: true }), + timedef: time().default('00:00:00'), + timedefnow: time(), + timestamp: timestamp(), + timestamp2: timestamp({ precision: 6, withTimezone: true }), + timestamp3: timestamp({ withTimezone: true }), + timestamp4: timestamp({ precision: 4 }), + timestampdef: timestamp().default(new Date()), + date: date({ mode: 'date' }), + datedef: date().default('2024-01-01'), + datedefnow: date().defaultNow(), + }); + + cockroachTable('all_postgis_columns', { + geometry: geometry(), + geometry2: geometry({ srid: 2, mode: 'xy' }), + geometry3: geometry({ srid: 3, mode: 'tuple' }), + geometry4: geometry({ mode: 'tuple' }), + geometrydef: geometry().default([1, 2]), + }); + + cockroachTable('all_vector_columns', { + bit: bit({ length: 1 }), + bitdef: bit({ length: 1 }).default('1'), + vector: vector({ dimensions: 1 }), + vectordef: vector({ dimensions: 1 }).default([1]), + }); +} + +// ts enums test +{ + enum Role { + admin = 'admin', + user = 'user', + guest = 'guest', + } + + const role = cockroachEnum('role', Role); + + enum RoleNonString { + admin, + user, + guest, + } + + // @ts-expect-error + cockroachEnum('role', RoleNonString); + + enum RolePartiallyString { + admin, + user = 'user', + guest = 'guest', + } + + // @ts-expect-error + cockroachEnum('role', RolePartiallyString); + + const table = cockroachTable('table', { + enum: role('enum'), + }); + + const res = await db.select().from(table); + + Expect>; + + const mySchema = cockroachSchema('my_schema'); + + const schemaRole = mySchema.enum('role', Role); + + // @ts-expect-error + mySchema.enum('role', RoleNonString); + + // @ts-expect-error + mySchema.enum('role', RolePartiallyString); + + const schemaTable = mySchema.table('table', { + enum: schemaRole('enum'), + }); + + const schemaRes = await db.select().from(schemaTable); + + Expect>; +} diff --git a/drizzle-orm/type-tests/cockroach/update.ts b/drizzle-orm/type-tests/cockroach/update.ts new file mode 100644 index 0000000000..25d5e6e776 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/update.ts @@ -0,0 +1,278 @@ +import type { QueryResult } from 'pg'; +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import type { CockroachUpdate } from '~/cockroach-core/index.ts'; +import { eq } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; +import type { Simplify } from '~/utils.ts'; +import { db } from './db.ts'; +import { cities, salEmp, users } from './tables.ts'; + +const update = await db.update(users) + .set({ + text: 'John', + age1: 30, + }) + .where(eq(users.id, 1)); +Expect, typeof update>>; + +const updateStmt = db.update(users) + .set({ + text: 'John', + age1: 30, + }) + .where(eq(users.id, 1)) + .prepare('updateStmt'); +const updatePrepared = await updateStmt.execute(); +Expect, typeof updatePrepared>>; + +const updateReturning = await db.update(users) + .set({ + text: 'John', + age1: 30, + }) + .where(eq(users.id, 1)) + .returning({ + text: users.text, + }); +Expect>; + +const updateReturningStmt = db.update(users) + .set({ + text: 'John', + age1: 30, + }) + .where(eq(users.id, 1)) + .returning({ + text: users.text, + }) + .prepare('updateReturningStmt'); +const updateReturningPrepared = await updateReturningStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``).returning(); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + function withReturning(qb: T) { + return qb.returning(); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = withReturning(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .update(users) + .set({}) + .returning() + // @ts-expect-error method was already called + .returning(); + + db + .update(users) + .set({}) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); +} + +{ + db + .update(users) + .set({}) + .from(sql``) + .leftJoin(sql``, (table, from) => { + Expect>; + Expect>; + return sql``; + }); + + db + .update(users) + .set({}) + .from(cities) + .leftJoin(sql``, (table, from) => { + Expect>; + Expect>; + return sql``; + }); + + const citiesSq = db.$with('cities_sq').as(db.select({ id: cities.id }).from(cities)); + + db + .with(citiesSq) + .update(users) + .set({}) + .from(citiesSq) + .leftJoin(sql``, (table, from) => { + Expect>; + Expect>; + return sql``; + }); + + db + .with(citiesSq) + .update(users) + .set({ + homeCity: citiesSq.id, + }) + .from(citiesSq); +} + +{ + const result = await db.update(users).set({}).from(cities).returning(); + Expect< + Equal[], typeof result> + >; +} + +{ + const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning(); + Expect< + Equal[], typeof result1> + >; + + const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning(); + Expect< + Equal[], typeof result2> + >; + + const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning(); + Expect< + Equal[], typeof result3> + >; + + const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning(); + Expect< + Equal[], typeof result4> + >; +} + +{ + const result = await db.update(users).set({}).from(cities).returning({ + id: users.id, + cities: cities, + cityName: cities.name, + }); + Expect< + Equal[], typeof result> + >; +} + +{ + const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning({ + id: users.id, + cities: cities, + cityName: cities.name, + salEmp: salEmp, + salEmpName: salEmp.name, + }); + Expect< + Equal[], typeof result1> + >; + + const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning({ + id: users.id, + cities: cities, + cityName: cities.name, + salEmp: salEmp, + salEmpName: salEmp.name, + }); + Expect< + Equal[], typeof result2> + >; + + const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning({ + id: users.id, + cities: cities, + cityName: cities.name, + salEmp: salEmp, + salEmpName: salEmp.name, + }); + Expect< + Equal[], typeof result3> + >; + + const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning({ + id: users.id, + cities: cities, + cityName: cities.name, + salEmp: salEmp, + salEmpName: salEmp.name, + }); + Expect< + Equal[], typeof result4> + >; +} + +{ + await db + .update(users) + .set({}) + // @ts-expect-error can't use joins before from + .fullJoin(salEmp, sql``); +} diff --git a/drizzle-orm/type-tests/cockroach/with.ts b/drizzle-orm/type-tests/cockroach/with.ts new file mode 100644 index 0000000000..5a962d5851 --- /dev/null +++ b/drizzle-orm/type-tests/cockroach/with.ts @@ -0,0 +1,329 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { cockroachTable, int4, text } from '~/cockroach-core/index.ts'; +import { gt, inArray, like } from '~/sql/expressions/index.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +{ + const orders = cockroachTable('orders', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int4('amount').notNull(), + quantity: int4('quantity').notNull(), + generated: text('generatedText').generatedAlwaysAs(sql``), + }); + + const regionalSales = db + .$with('regional_sales') + .as((qb) => + qb + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region) + ); + + const topRegions = db + .$with('top_regions') + .as((qb) => + qb + .select({ + region: orders.region, + totalSales: orders.amount, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ) + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})`, + productSales: sql`sum(${orders.amount})`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); + + Expect< + Equal<{ + region: string; + product: string; + productUnits: number; + productSales: number; + }[], typeof result> + >; + + const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); + const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); + + Expect< + Equal<{ + id: number; + region: string; + product: string; + amount: number; + quantity: number; + generated: string | null; + }[], typeof allFromWith> + >; + + const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); + db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); +} + +{ + const providers = cockroachTable('providers', { + id: int4().primaryKey().generatedAlwaysAsIdentity(), + providerName: text().notNull(), + }); + const products = cockroachTable('products', { + id: int4().primaryKey().generatedAlwaysAsIdentity(), + productName: text().notNull(), + }); + + const sq1 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }), + ); + const sq2 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }).returning(), + ); + const sq3 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }).returning({ productName: products.productName }), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect>; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal< + typeof q6, + { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] + > + >; +} + +{ + const providers = cockroachTable('providers', { + id: int4().primaryKey(), + providerName: text().notNull(), + }); + const products = cockroachTable('products', { + id: int4().primaryKey(), + productName: text().notNull(), + }); + const otherProducts = cockroachTable('other_products', { + id: int4().primaryKey(), + productName: text().notNull(), + }); + + const sq1 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }), + ); + const sq2 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).returning(), + ); + const sq3 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).returning({ productName: products.productName }), + ); + const sq4 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).from(otherProducts).returning(), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect< + Equal + >; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal + >; + + const q7 = await db.with(sq4).select().from(sq4); + Expect< + Equal + >; + const q8 = await db.with(sq4).select().from(providers).leftJoin(sq4, sql``); + Expect< + Equal + >; +} + +{ + const providers = cockroachTable('providers', { + id: int4().primaryKey(), + providerName: text().notNull(), + }); + const products = cockroachTable('products', { + id: int4().primaryKey(), + productName: text().notNull(), + }); + + const sq1 = db.$with('inserted_products').as( + db.delete(products), + ); + const sq2 = db.$with('inserted_products').as( + db.delete(products).returning(), + ); + const sq3 = db.$with('inserted_products').as( + db.delete(products).returning({ productName: products.productName }), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect>; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal< + typeof q6, + { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] + > + >; +} + +{ + const providers = cockroachTable('providers', { + id: int4().primaryKey(), + providerName: text().notNull(), + }); + + const sq1 = db.$with('providers_sq', { + name: providers.providerName, + }).as(sql`select provider_name as name from providers`); + const q1 = await db.with(sq1).select().from(sq1); + Expect>; + + const sq2 = db.$with('providers_sq', { + nested: { + id: providers.id, + }, + }).as(() => sql`select id from providers`); + const q2 = await db.with(sq2).select().from(sq2); + Expect>; + + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); +} diff --git a/drizzle-orm/type-tests/common/aliased-table.ts b/drizzle-orm/type-tests/common/aliased-table.ts index 9c2be8c5fa..3c21bd8dbf 100644 --- a/drizzle-orm/type-tests/common/aliased-table.ts +++ b/drizzle-orm/type-tests/common/aliased-table.ts @@ -1,4 +1,7 @@ import { type Equal, Expect } from 'type-tests/utils.ts'; +import { alias as cockroachAliasFn } from '~/cockroach-core/alias.ts'; +import { cockroachView } from '~/cockroach-core/view.ts'; +import { drizzle as cockroachd } from '~/cockroach/index.ts'; import { eq } from '~/index.ts'; import { drizzle as sqlited } from '~/libsql/index.ts'; import { alias as mysqlAliasFn } from '~/mysql-core/alias.ts'; @@ -9,6 +12,7 @@ import { pgView } from '~/pg-core/view.ts'; import { drizzle as pgd } from '~/postgres-js/index.ts'; import { alias as sqliteAliasFn } from '~/sqlite-core/alias.ts'; import { sqliteView } from '~/sqlite-core/view.ts'; +import { users as cockroachUsers } from '../cockroach/tables.ts'; import { users as mysqlUsers } from '../mysql/tables.ts'; import { users as pgUsers } from '../pg/tables.ts'; import { users as sqliteUsers } from '../sqlite/tables.ts'; @@ -16,24 +20,36 @@ import { users as sqliteUsers } from '../sqlite/tables.ts'; const pg = pgd.mock(); const sqlite = sqlited.mock(); const mysql = mysqld.mock(); +const cockroach = cockroachd.mock(); const pgvUsers = pgView('users_view').as((qb) => qb.select().from(pgUsers)); +const cockroachvUsers = cockroachView('users_view').as((qb) => qb.select().from(cockroachUsers)); const sqlitevUsers = sqliteView('users_view').as((qb) => qb.select().from(sqliteUsers)); const mysqlvUsers = mysqlView('users_view').as((qb) => qb.select().from(mysqlUsers)); const pgAlias = pgAliasFn(pgUsers, 'usersAlias'); +const cockroachAlias = cockroachAliasFn(cockroachUsers, 'usersAlias'); const sqliteAlias = sqliteAliasFn(sqliteUsers, 'usersAlias'); const mysqlAlias = mysqlAliasFn(mysqlUsers, 'usersAlias'); const pgvAlias = pgAliasFn(pgvUsers, 'usersvAlias'); +const cockroachvAlias = cockroachAliasFn(cockroachvUsers, 'usersvAlias'); const sqlitevAlias = sqliteAliasFn(sqlitevUsers, 'usersvAlias'); const mysqlvAlias = mysqlAliasFn(mysqlvUsers, 'usersvAlias'); const pgRes = await pg.select().from(pgUsers).leftJoin(pgAlias, eq(pgAlias.id, pgUsers.id)); +const cockroachRes = await cockroach.select().from(cockroachUsers).leftJoin( + cockroachAlias, + eq(pgAlias.id, pgUsers.id), +); const sqliteRes = await sqlite.select().from(sqliteUsers).leftJoin(sqliteAlias, eq(sqliteAlias.id, sqliteUsers.id)); const mysqlRes = await mysql.select().from(mysqlUsers).leftJoin(mysqlAlias, eq(mysqlAlias.id, mysqlUsers.id)); const pgvRes = await pg.select().from(pgUsers).leftJoin(pgvAlias, eq(pgvAlias.id, pgUsers.id)); +const cockroachvRes = await cockroach.select().from(cockroachUsers).leftJoin( + cockroachvAlias, + eq(cockroachvAlias.id, cockroachUsers.id), +); const sqlitevRes = await sqlite.select().from(sqliteUsers).leftJoin(sqlitevAlias, eq(sqlitevAlias.id, sqliteUsers.id)); const mysqlvRes = await mysql.select().from(mysqlUsers).leftJoin(mysqlvAlias, eq(mysqlvAlias.id, mysqlUsers.id)); @@ -72,6 +88,41 @@ Expect< }[]> >; +Expect< + Equal +>; + Expect< Equal >; +Expect< + Equal +>; + Expect< Equal ({ - fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), - fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), - fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), -})); +}, (node) => [ + foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), +]); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), diff --git a/drizzle-orm/type-tests/geldb/tables.ts b/drizzle-orm/type-tests/geldb/tables.ts index 0a64d80f39..c29dcd71c4 100644 --- a/drizzle-orm/type-tests/geldb/tables.ts +++ b/drizzle-orm/type-tests/geldb/tables.ts @@ -106,9 +106,7 @@ export const cities = gelTable('cities_table', { id: integer('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), -}, (cities) => ({ - citiesNameIdx: index().on(cities.id), -})); +}, (cities) => [index().on(cities.id)]); export const classes = gelTable('classes_table', { id: integer('id').primaryKey(), diff --git a/drizzle-orm/type-tests/mssql/1-to-1-fk.ts b/drizzle-orm/type-tests/mssql/1-to-1-fk.ts new file mode 100644 index 0000000000..8a712ad2ec --- /dev/null +++ b/drizzle-orm/type-tests/mssql/1-to-1-fk.ts @@ -0,0 +1,14 @@ +import { int } from '~/mssql-core/columns/index.ts'; +import { mssqlTable } from '~/mssql-core/table.ts'; + +const test1 = mssqlTable('test1_table', { + id: int('id').identity().primaryKey(), + test2Id: int('test2_id').references(() => test2.id), +}); + +const test1Id = int('test1_id').references(() => test1.id); + +const test2 = mssqlTable('test2_table', { + id: int('id').identity().primaryKey(), + test1Id, +}); diff --git a/drizzle-orm/type-tests/mssql/1000columns.ts b/drizzle-orm/type-tests/mssql/1000columns.ts new file mode 100644 index 0000000000..5fc3db821f --- /dev/null +++ b/drizzle-orm/type-tests/mssql/1000columns.ts @@ -0,0 +1,904 @@ +import { bigint, int, mssqlTable, varchar } from '~/mssql-core/index.ts'; + +mssqlTable('test', { + col0: int('col1').primaryKey().identity(), + col1: int('col1').primaryKey().identity(), + col2: int('col1').primaryKey().identity(), + col3: int('col1').primaryKey().identity(), + col4: int('col1').primaryKey().identity(), + col5: int('col1').primaryKey().identity(), + col6: int('col1').primaryKey().identity(), + col8: int('col1').primaryKey().identity(), + col9: int('col1').primaryKey().identity(), + col10: int('col1').primaryKey().identity(), + col11: int('col1').primaryKey().identity(), + col12: int('col1').primaryKey().identity(), + col13: int('col1').primaryKey().identity(), + col14: int('col1').primaryKey().identity(), + col15: int('col1').primaryKey().identity(), + col16: int('col1').primaryKey().identity(), + col18: int('col1').primaryKey().identity(), + col19: int('col1').primaryKey().identity(), + col20: int('col1').primaryKey().identity(), + col21: int('col1').primaryKey().identity(), + col22: int('col1').primaryKey().identity(), + col23: int('col1').primaryKey().identity(), + col24: int('col1').primaryKey().identity(), + col25: int('col1').primaryKey().identity(), + col26: int('col1').primaryKey().identity(), + col28: int('col1').primaryKey().identity(), + col29: int('col1').primaryKey().identity(), + col30: int('col1').primaryKey().identity(), + col31: int('col1').primaryKey().identity(), + col32: int('col1').primaryKey().identity(), + col33: int('col1').primaryKey().identity(), + col34: int('col1').primaryKey().identity(), + col35: int('col1').primaryKey().identity(), + col36: int('col1').primaryKey().identity(), + col38: int('col1').primaryKey().identity(), + col39: int('col1').primaryKey().identity(), + col40: int('col1').primaryKey().identity(), + col41: int('col1').primaryKey().identity(), + col42: int('col1').primaryKey().identity(), + col43: int('col1').primaryKey().identity(), + col44: int('col1').primaryKey().identity(), + col45: int('col1').primaryKey().identity(), + col46: int('col1').primaryKey().identity(), + col48: int('col1').primaryKey().identity(), + col49: int('col1').primaryKey().identity(), + col50: int('col1').primaryKey().identity(), + col51: int('col1').primaryKey().identity(), + col52: int('col1').primaryKey().identity(), + col53: int('col1').primaryKey().identity(), + col54: int('col1').primaryKey().identity(), + col55: int('col1').primaryKey().identity(), + col56: int('col1').primaryKey().identity(), + col58: int('col1').primaryKey().identity(), + col59: int('col1').primaryKey().identity(), + col60: int('col1').primaryKey().identity(), + col61: int('col1').primaryKey().identity(), + col62: int('col1').primaryKey().identity(), + col63: int('col1').primaryKey().identity(), + col64: int('col1').primaryKey().identity(), + col65: int('col1').primaryKey().identity(), + col66: int('col1').primaryKey().identity(), + col68: int('col1').primaryKey().identity(), + col69: int('col1').primaryKey().identity(), + col70: int('col1').primaryKey().identity(), + col71: int('col1').primaryKey().identity(), + col72: int('col1').primaryKey().identity(), + col73: int('col1').primaryKey().identity(), + col74: int('col1').primaryKey().identity(), + col75: int('col1').primaryKey().identity(), + col76: int('col1').primaryKey().identity(), + col78: int('col1').primaryKey().identity(), + col79: int('col1').primaryKey().identity(), + col80: int('col1').primaryKey().identity(), + col81: int('col1').primaryKey().identity(), + col82: int('col1').primaryKey().identity(), + col83: int('col1').primaryKey().identity(), + col84: int('col1').primaryKey().identity(), + col85: int('col1').primaryKey().identity(), + col86: int('col1').primaryKey().identity(), + col88: int('col1').primaryKey().identity(), + col89: int('col1').primaryKey().identity(), + col90: int('col1').primaryKey().identity(), + col91: int('col1').primaryKey().identity(), + col92: int('col1').primaryKey().identity(), + col93: int('col1').primaryKey().identity(), + col94: int('col1').primaryKey().identity(), + col95: int('col1').primaryKey().identity(), + col96: int('col1').primaryKey().identity(), + col98: int('col1').primaryKey().identity(), + col99: int('col1').primaryKey().identity(), + col100: int('col1').primaryKey().identity(), + col101: int('col1').primaryKey().identity(), + col102: int('col1').primaryKey().identity(), + col103: int('col1').primaryKey().identity(), + col104: int('col1').primaryKey().identity(), + col105: int('col1').primaryKey().identity(), + col106: int('col1').primaryKey().identity(), + col108: int('col1').primaryKey().identity(), + col109: int('col1').primaryKey().identity(), + col110: int('col11').primaryKey().identity(), + col111: int('col11').primaryKey().identity(), + col112: int('col11').primaryKey().identity(), + col113: int('col11').primaryKey().identity(), + col114: int('col11').primaryKey().identity(), + col115: int('col11').primaryKey().identity(), + col116: int('col11').primaryKey().identity(), + col118: int('col11').primaryKey().identity(), + col119: int('col11').primaryKey().identity(), + col120: int('col11').primaryKey().identity(), + col121: int('col11').primaryKey().identity(), + col122: int('col11').primaryKey().identity(), + col123: int('col11').primaryKey().identity(), + col124: int('col11').primaryKey().identity(), + col125: int('col11').primaryKey().identity(), + col126: int('col11').primaryKey().identity(), + col128: int('col11').primaryKey().identity(), + col129: int('col11').primaryKey().identity(), + col130: int('col11').primaryKey().identity(), + col131: int('col11').primaryKey().identity(), + col132: int('col11').primaryKey().identity(), + col133: int('col11').primaryKey().identity(), + col134: int('col11').primaryKey().identity(), + col135: int('col11').primaryKey().identity(), + col136: int('col11').primaryKey().identity(), + col138: int('col11').primaryKey().identity(), + col139: int('col11').primaryKey().identity(), + col140: int('col11').primaryKey().identity(), + col141: int('col11').primaryKey().identity(), + col142: int('col11').primaryKey().identity(), + col143: int('col11').primaryKey().identity(), + col144: int('col11').primaryKey().identity(), + col145: int('col11').primaryKey().identity(), + col146: int('col11').primaryKey().identity(), + col148: int('col11').primaryKey().identity(), + col149: int('col11').primaryKey().identity(), + col150: int('col11').primaryKey().identity(), + col151: int('col11').primaryKey().identity(), + col152: int('col11').primaryKey().identity(), + col153: int('col11').primaryKey().identity(), + col154: int('col11').primaryKey().identity(), + col155: int('col11').primaryKey().identity(), + col156: int('col11').primaryKey().identity(), + col158: int('col11').primaryKey().identity(), + col159: int('col11').primaryKey().identity(), + col160: int('col11').primaryKey().identity(), + col161: int('col11').primaryKey().identity(), + col162: int('col11').primaryKey().identity(), + col163: int('col11').primaryKey().identity(), + col164: int('col11').primaryKey().identity(), + col165: int('col11').primaryKey().identity(), + col166: int('col11').primaryKey().identity(), + col168: int('col11').primaryKey().identity(), + col169: int('col11').primaryKey().identity(), + col170: int('col11').primaryKey().identity(), + col171: int('col11').primaryKey().identity(), + col172: int('col11').primaryKey().identity(), + col173: int('col11').primaryKey().identity(), + col174: int('col11').primaryKey().identity(), + col175: int('col11').primaryKey().identity(), + col176: int('col11').primaryKey().identity(), + col178: int('col11').primaryKey().identity(), + col179: int('col11').primaryKey().identity(), + col180: int('col11').primaryKey().identity(), + col181: int('col11').primaryKey().identity(), + col182: int('col11').primaryKey().identity(), + col183: int('col11').primaryKey().identity(), + col184: int('col11').primaryKey().identity(), + col185: int('col11').primaryKey().identity(), + col186: int('col11').primaryKey().identity(), + col188: int('col11').primaryKey().identity(), + col189: int('col11').primaryKey().identity(), + col190: int('col11').primaryKey().identity(), + col191: int('col11').primaryKey().identity(), + col192: int('col11').primaryKey().identity(), + col193: int('col11').primaryKey().identity(), + col194: int('col11').primaryKey().identity(), + col195: int('col11').primaryKey().identity(), + col196: int('col11').primaryKey().identity(), + col198: int('col11').primaryKey().identity(), + col199: int('col11').primaryKey().identity(), + col200: int('col2').primaryKey().identity(), + col201: int('col2').primaryKey().identity(), + col202: int('col2').primaryKey().identity(), + col203: int('col2').primaryKey().identity(), + col204: int('col2').primaryKey().identity(), + col205: int('col2').primaryKey().identity(), + col206: int('col2').primaryKey().identity(), + col208: int('col2').primaryKey().identity(), + col209: int('col2').primaryKey().identity(), + col210: int('col21').primaryKey().identity(), + col211: int('col21').primaryKey().identity(), + col212: int('col21').primaryKey().identity(), + col213: int('col21').primaryKey().identity(), + col214: int('col21').primaryKey().identity(), + col215: int('col21').primaryKey().identity(), + col216: int('col21').primaryKey().identity(), + col218: int('col21').primaryKey().identity(), + col219: int('col21').primaryKey().identity(), + col220: int('col21').primaryKey().identity(), + col221: int('col21').primaryKey().identity(), + col222: int('col21').primaryKey().identity(), + col223: int('col21').primaryKey().identity(), + col224: int('col21').primaryKey().identity(), + col225: int('col21').primaryKey().identity(), + col226: int('col21').primaryKey().identity(), + col228: int('col21').primaryKey().identity(), + col229: int('col21').primaryKey().identity(), + col230: int('col21').primaryKey().identity(), + col231: int('col21').primaryKey().identity(), + col232: int('col21').primaryKey().identity(), + col233: int('col21').primaryKey().identity(), + col234: int('col21').primaryKey().identity(), + col235: int('col21').primaryKey().identity(), + col236: int('col21').primaryKey().identity(), + col238: int('col21').primaryKey().identity(), + col239: int('col21').primaryKey().identity(), + col240: int('col21').primaryKey().identity(), + col241: int('col21').primaryKey().identity(), + col242: int('col21').primaryKey().identity(), + col243: int('col21').primaryKey().identity(), + col244: int('col21').primaryKey().identity(), + col245: int('col21').primaryKey().identity(), + col246: int('col21').primaryKey().identity(), + col248: int('col21').primaryKey().identity(), + col249: int('col21').primaryKey().identity(), + col250: int('col21').primaryKey().identity(), + col251: int('col21').primaryKey().identity(), + col252: int('col21').primaryKey().identity(), + col253: int('col21').primaryKey().identity(), + col254: int('col21').primaryKey().identity(), + col255: int('col21').primaryKey().identity(), + col256: int('col21').primaryKey().identity(), + col258: int('col21').primaryKey().identity(), + col259: int('col21').primaryKey().identity(), + col260: int('col21').primaryKey().identity(), + col261: int('col21').primaryKey().identity(), + col262: int('col21').primaryKey().identity(), + col263: int('col21').primaryKey().identity(), + col264: int('col21').primaryKey().identity(), + col265: int('col21').primaryKey().identity(), + col266: int('col21').primaryKey().identity(), + col268: int('col21').primaryKey().identity(), + col269: int('col21').primaryKey().identity(), + col270: int('col21').primaryKey().identity(), + col271: int('col21').primaryKey().identity(), + col272: int('col21').primaryKey().identity(), + col273: int('col21').primaryKey().identity(), + col274: int('col21').primaryKey().identity(), + col275: int('col21').primaryKey().identity(), + col276: int('col21').primaryKey().identity(), + col278: int('col21').primaryKey().identity(), + col279: int('col21').primaryKey().identity(), + col280: int('col21').primaryKey().identity(), + col281: int('col21').primaryKey().identity(), + col282: int('col21').primaryKey().identity(), + col283: int('col21').primaryKey().identity(), + col284: int('col21').primaryKey().identity(), + col285: int('col21').primaryKey().identity(), + col286: int('col21').primaryKey().identity(), + col288: int('col21').primaryKey().identity(), + col289: int('col21').primaryKey().identity(), + col290: int('col21').primaryKey().identity(), + col291: int('col21').primaryKey().identity(), + col292: int('col21').primaryKey().identity(), + col293: int('col21').primaryKey().identity(), + col294: int('col21').primaryKey().identity(), + col295: int('col21').primaryKey().identity(), + col296: int('col21').primaryKey().identity(), + col298: int('col21').primaryKey().identity(), + col299: int('col21').primaryKey().identity(), + col300: int('col3').primaryKey().identity(), + col301: int('col3').primaryKey().identity(), + col302: int('col3').primaryKey().identity(), + col303: int('col3').primaryKey().identity(), + col304: int('col3').primaryKey().identity(), + col305: int('col3').primaryKey().identity(), + col306: int('col3').primaryKey().identity(), + col308: int('col3').primaryKey().identity(), + col309: int('col3').primaryKey().identity(), + col310: int('col31').primaryKey().identity(), + col311: int('col31').primaryKey().identity(), + col312: int('col31').primaryKey().identity(), + col313: int('col31').primaryKey().identity(), + col314: int('col31').primaryKey().identity(), + col315: int('col31').primaryKey().identity(), + col316: int('col31').primaryKey().identity(), + col318: int('col31').primaryKey().identity(), + col319: int('col31').primaryKey().identity(), + col320: int('col31').primaryKey().identity(), + col321: int('col31').primaryKey().identity(), + col322: int('col31').primaryKey().identity(), + col323: int('col31').primaryKey().identity(), + col324: int('col31').primaryKey().identity(), + col325: int('col31').primaryKey().identity(), + col326: int('col31').primaryKey().identity(), + col328: int('col31').primaryKey().identity(), + col329: int('col31').primaryKey().identity(), + col330: int('col31').primaryKey().identity(), + col331: int('col31').primaryKey().identity(), + col332: int('col31').primaryKey().identity(), + col333: int('col31').primaryKey().identity(), + col334: int('col31').primaryKey().identity(), + col335: int('col31').primaryKey().identity(), + col336: int('col31').primaryKey().identity(), + col338: int('col31').primaryKey().identity(), + col339: int('col31').primaryKey().identity(), + col340: int('col31').primaryKey().identity(), + col341: int('col31').primaryKey().identity(), + col342: int('col31').primaryKey().identity(), + col343: int('col31').primaryKey().identity(), + col344: int('col31').primaryKey().identity(), + col345: int('col31').primaryKey().identity(), + col346: int('col31').primaryKey().identity(), + col348: int('col31').primaryKey().identity(), + col349: int('col31').primaryKey().identity(), + col350: int('col31').primaryKey().identity(), + col351: int('col31').primaryKey().identity(), + col352: int('col31').primaryKey().identity(), + col353: int('col31').primaryKey().identity(), + col354: int('col31').primaryKey().identity(), + col355: int('col31').primaryKey().identity(), + col356: int('col31').primaryKey().identity(), + col358: int('col31').primaryKey().identity(), + col359: int('col31').primaryKey().identity(), + col360: int('col31').primaryKey().identity(), + col361: int('col31').primaryKey().identity(), + col362: int('col31').primaryKey().identity(), + col363: int('col31').primaryKey().identity(), + col364: int('col31').primaryKey().identity(), + col365: int('col31').primaryKey().identity(), + col366: int('col31').primaryKey().identity(), + col368: int('col31').primaryKey().identity(), + col369: int('col31').primaryKey().identity(), + col370: int('col31').primaryKey().identity(), + col371: int('col31').primaryKey().identity(), + col372: int('col31').primaryKey().identity(), + col373: int('col31').primaryKey().identity(), + col374: int('col31').primaryKey().identity(), + col375: int('col31').primaryKey().identity(), + col376: int('col31').primaryKey().identity(), + col378: int('col31').primaryKey().identity(), + col379: int('col31').primaryKey().identity(), + col380: int('col31').primaryKey().identity(), + col381: int('col31').primaryKey().identity(), + col382: int('col31').primaryKey().identity(), + col383: int('col31').primaryKey().identity(), + col384: int('col31').primaryKey().identity(), + col385: int('col31').primaryKey().identity(), + col386: int('col31').primaryKey().identity(), + col388: int('col31').primaryKey().identity(), + col389: int('col31').primaryKey().identity(), + col390: int('col31').primaryKey().identity(), + col391: int('col31').primaryKey().identity(), + col392: int('col31').primaryKey().identity(), + col393: int('col31').primaryKey().identity(), + col394: int('col31').primaryKey().identity(), + col395: int('col31').primaryKey().identity(), + col396: int('col31').primaryKey().identity(), + col398: int('col31').primaryKey().identity(), + col399: int('col31').primaryKey().identity(), + col400: int('col4').primaryKey().identity(), + col401: int('col4').primaryKey().identity(), + col402: int('col4').primaryKey().identity(), + col403: int('col4').primaryKey().identity(), + col404: int('col4').primaryKey().identity(), + col405: int('col4').primaryKey().identity(), + col406: int('col4').primaryKey().identity(), + col408: int('col4').primaryKey().identity(), + col409: int('col4').primaryKey().identity(), + col410: int('col41').primaryKey().identity(), + col411: int('col41').primaryKey().identity(), + col412: int('col41').primaryKey().identity(), + col413: int('col41').primaryKey().identity(), + col414: int('col41').primaryKey().identity(), + col415: int('col41').primaryKey().identity(), + col416: int('col41').primaryKey().identity(), + col418: int('col41').primaryKey().identity(), + col419: int('col41').primaryKey().identity(), + col420: int('col41').primaryKey().identity(), + col421: int('col41').primaryKey().identity(), + col422: int('col41').primaryKey().identity(), + col423: int('col41').primaryKey().identity(), + col424: int('col41').primaryKey().identity(), + col425: int('col41').primaryKey().identity(), + col426: int('col41').primaryKey().identity(), + col428: int('col41').primaryKey().identity(), + col429: int('col41').primaryKey().identity(), + col430: int('col41').primaryKey().identity(), + col431: int('col41').primaryKey().identity(), + col432: int('col41').primaryKey().identity(), + col433: int('col41').primaryKey().identity(), + col434: int('col41').primaryKey().identity(), + col435: int('col41').primaryKey().identity(), + col436: int('col41').primaryKey().identity(), + col438: int('col41').primaryKey().identity(), + col439: int('col41').primaryKey().identity(), + col440: int('col41').primaryKey().identity(), + col441: int('col41').primaryKey().identity(), + col442: int('col41').primaryKey().identity(), + col443: int('col41').primaryKey().identity(), + col444: int('col41').primaryKey().identity(), + col445: int('col41').primaryKey().identity(), + col446: int('col41').primaryKey().identity(), + col448: int('col41').primaryKey().identity(), + col449: int('col41').primaryKey().identity(), + col450: int('col41').primaryKey().identity(), + col451: int('col41').primaryKey().identity(), + col452: int('col41').primaryKey().identity(), + col453: int('col41').primaryKey().identity(), + col454: int('col41').primaryKey().identity(), + col455: int('col41').primaryKey().identity(), + col456: int('col41').primaryKey().identity(), + col458: int('col41').primaryKey().identity(), + col459: int('col41').primaryKey().identity(), + col460: int('col41').primaryKey().identity(), + col461: int('col41').primaryKey().identity(), + col462: int('col41').primaryKey().identity(), + col463: int('col41').primaryKey().identity(), + col464: int('col41').primaryKey().identity(), + col465: int('col41').primaryKey().identity(), + col466: int('col41').primaryKey().identity(), + col468: int('col41').primaryKey().identity(), + col469: int('col41').primaryKey().identity(), + col470: int('col41').primaryKey().identity(), + col471: int('col41').primaryKey().identity(), + col472: int('col41').primaryKey().identity(), + col473: int('col41').primaryKey().identity(), + col474: int('col41').primaryKey().identity(), + col475: int('col41').primaryKey().identity(), + col476: int('col41').primaryKey().identity(), + col478: int('col41').primaryKey().identity(), + col479: int('col41').primaryKey().identity(), + col480: int('col41').primaryKey().identity(), + col481: int('col41').primaryKey().identity(), + col482: int('col41').primaryKey().identity(), + col483: int('col41').primaryKey().identity(), + col484: int('col41').primaryKey().identity(), + col485: int('col41').primaryKey().identity(), + col486: int('col41').primaryKey().identity(), + col488: int('col41').primaryKey().identity(), + col489: int('col41').primaryKey().identity(), + col490: int('col41').primaryKey().identity(), + col491: int('col41').primaryKey().identity(), + col492: int('col41').primaryKey().identity(), + col493: int('col41').primaryKey().identity(), + col494: int('col41').primaryKey().identity(), + col495: int('col41').primaryKey().identity(), + col496: int('col41').primaryKey().identity(), + col498: int('col41').primaryKey().identity(), + col499: int('col41').primaryKey().identity(), + col500: int('col5').primaryKey().identity(), + col501: int('col5').primaryKey().identity(), + col502: int('col5').primaryKey().identity(), + col503: int('col5').primaryKey().identity(), + col504: int('col5').primaryKey().identity(), + col505: int('col5').primaryKey().identity(), + col506: int('col5').primaryKey().identity(), + col508: int('col5').primaryKey().identity(), + col509: int('col5').primaryKey().identity(), + col510: int('col51').primaryKey().identity(), + col511: int('col51').primaryKey().identity(), + col512: int('col51').primaryKey().identity(), + col513: int('col51').primaryKey().identity(), + col514: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col515: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col516: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col518: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col519: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col520: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col521: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col522: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col523: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col524: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col525: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col526: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col528: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col529: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col530: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col531: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col532: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col533: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col534: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col535: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col536: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col538: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col539: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col540: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col541: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col542: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col543: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col544: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col545: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col546: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col548: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col549: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col550: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col551: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col552: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col553: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col554: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col555: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col556: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col558: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col559: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col560: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col561: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col562: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col563: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col564: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col565: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col566: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col568: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col569: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col570: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col571: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col572: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col573: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col574: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col575: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col576: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col578: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col579: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col580: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col581: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col582: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col583: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col584: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col585: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col586: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col588: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col589: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col590: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col591: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col592: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col593: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col594: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col595: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col596: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col598: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col599: bigint('col51', { mode: 'number' }).primaryKey().identity(), + col600: bigint('col6', { mode: 'number' }).primaryKey().identity(), + col601: int('col6').primaryKey().identity(), + col602: int('col6').primaryKey().identity(), + col603: int('col6').primaryKey().identity(), + col604: int('col6').primaryKey().identity(), + col605: int('col6').primaryKey().identity(), + col606: int('col6').primaryKey().identity(), + col608: int('col6').primaryKey().identity(), + col609: int('col6').primaryKey().identity(), + col610: int('col61').primaryKey().identity(), + col611: int('col61').primaryKey().identity(), + col612: int('col61').primaryKey().identity(), + col613: int('col61').primaryKey().identity(), + col614: int('col61').primaryKey().identity(), + col615: int('col61').primaryKey().identity(), + col616: int('col61').primaryKey().identity(), + col618: int('col61').primaryKey().identity(), + col619: int('col61').primaryKey().identity(), + col620: int('col61').primaryKey().identity(), + col621: int('col61').primaryKey().identity(), + col622: int('col61').primaryKey().identity(), + col623: int('col61').primaryKey().identity(), + col624: int('col61').primaryKey().identity(), + col625: int('col61').primaryKey().identity(), + col626: int('col61').primaryKey().identity(), + col628: int('col61').primaryKey().identity(), + col629: int('col61').primaryKey().identity(), + col630: int('col61').primaryKey().identity(), + col631: int('col61').primaryKey().identity(), + col632: int('col61').primaryKey().identity(), + col633: int('col61').primaryKey().identity(), + col634: int('col61').primaryKey().identity(), + col635: int('col61').primaryKey().identity(), + col636: int('col61').primaryKey().identity(), + col638: int('col61').primaryKey().identity(), + col639: int('col61').primaryKey().identity(), + col640: int('col61').primaryKey().identity(), + col641: int('col61').primaryKey().identity(), + col642: int('col61').primaryKey().identity(), + col643: int('col61').primaryKey().identity(), + col644: int('col61').primaryKey().identity(), + col645: int('col61').primaryKey().identity(), + col646: int('col61').primaryKey().identity(), + col648: int('col61').primaryKey().identity(), + col649: int('col61').primaryKey().identity(), + col650: int('col61').primaryKey().identity(), + col651: int('col61').primaryKey().identity(), + col652: int('col61').primaryKey().identity(), + col653: int('col61').primaryKey().identity(), + col654: int('col61').primaryKey().identity(), + col655: int('col61').primaryKey().identity(), + col656: int('col61').primaryKey().identity(), + col658: int('col61').primaryKey().identity(), + col659: int('col61').primaryKey().identity(), + col660: int('col61').primaryKey().identity(), + col661: int('col61').primaryKey().identity(), + col662: int('col61').primaryKey().identity(), + col663: int('col61').primaryKey().identity(), + col664: int('col61').primaryKey().identity(), + col665: int('col61').primaryKey().identity(), + col666: int('col61').primaryKey().identity(), + col668: int('col61').primaryKey().identity(), + col669: int('col61').primaryKey().identity(), + col670: int('col61').primaryKey().identity(), + col671: int('col61').primaryKey().identity(), + col672: int('col61').primaryKey().identity(), + col673: int('col61').primaryKey().identity(), + col674: int('col61').primaryKey().identity(), + col675: int('col61').primaryKey().identity(), + col676: int('col61').primaryKey().identity(), + col678: int('col61').primaryKey().identity(), + col679: int('col61').primaryKey().identity(), + col680: int('col61').primaryKey().identity(), + col681: int('col61').primaryKey().identity(), + col682: int('col61').primaryKey().identity(), + col683: int('col61').primaryKey().identity(), + col684: int('col61').primaryKey().identity(), + col685: int('col61').primaryKey().identity(), + col686: int('col61').primaryKey().identity(), + col688: int('col61').primaryKey().identity(), + col689: int('col61').primaryKey().identity(), + col690: int('col61').primaryKey().identity(), + col691: int('col61').primaryKey().identity(), + col692: int('col61').primaryKey().identity(), + col693: int('col61').primaryKey().identity(), + col694: int('col61').primaryKey().identity(), + col695: int('col61').primaryKey().identity(), + col696: int('col61').primaryKey().identity(), + col698: int('col61').primaryKey().identity(), + col699: int('col61').primaryKey().identity(), + col700: int('col7').primaryKey().identity(), + col701: int('col7').primaryKey().identity(), + col702: int('col7').primaryKey().identity(), + col703: int('col7').primaryKey().identity(), + col704: int('col7').primaryKey().identity(), + col705: int('col7').primaryKey().identity(), + col706: int('col7').primaryKey().identity(), + col708: int('col7').primaryKey().identity(), + col709: int('col7').primaryKey().identity(), + col710: int('col71').primaryKey().identity(), + col711: int('col71').primaryKey().identity(), + col712: int('col71').primaryKey().identity(), + col713: int('col71').primaryKey().identity(), + col714: int('col71').primaryKey().identity(), + col715: int('col71').primaryKey().identity(), + col716: int('col71').primaryKey().identity(), + col718: int('col71').primaryKey().identity(), + col719: int('col71').primaryKey().identity(), + col720: int('col71').primaryKey().identity(), + col721: int('col71').primaryKey().identity(), + col722: int('col71').primaryKey().identity(), + col723: int('col71').primaryKey().identity(), + col724: int('col71').primaryKey().identity(), + col725: int('col71').primaryKey().identity(), + col726: int('col71').primaryKey().identity(), + col728: int('col71').primaryKey().identity(), + col729: int('col71').primaryKey().identity(), + col730: int('col71').primaryKey().identity(), + col731: int('col71').primaryKey().identity(), + col732: int('col71').primaryKey().identity(), + col733: int('col71').primaryKey().identity(), + col734: int('col71').primaryKey().identity(), + col735: int('col71').primaryKey().identity(), + col736: int('col71').primaryKey().identity(), + col738: int('col71').primaryKey().identity(), + col739: int('col71').primaryKey().identity(), + col740: int('col71').primaryKey().identity(), + col741: int('col71').primaryKey().identity(), + col742: int('col71').primaryKey().identity(), + col743: int('col71').primaryKey().identity(), + col744: int('col71').primaryKey().identity(), + col745: int('col71').primaryKey().identity(), + col746: int('col71').primaryKey().identity(), + col748: int('col71').primaryKey().identity(), + col749: int('col71').primaryKey().identity(), + col750: int('col71').primaryKey().identity(), + col751: int('col71').primaryKey().identity(), + col752: int('col71').primaryKey().identity(), + col753: int('col71').primaryKey().identity(), + col754: int('col71').primaryKey().identity(), + col755: int('col71').primaryKey().identity(), + col756: int('col71').primaryKey().identity(), + col758: int('col71').primaryKey().identity(), + col759: int('col71').primaryKey().identity(), + col760: int('col71').primaryKey().identity(), + col761: int('col71').primaryKey().identity(), + col762: int('col71').primaryKey().identity(), + col763: int('col71').primaryKey().identity(), + col764: int('col71').primaryKey().identity(), + col765: int('col71').primaryKey().identity(), + col766: int('col71').primaryKey().identity(), + col768: int('col71').primaryKey().identity(), + col769: int('col71').primaryKey().identity(), + col770: int('col71').primaryKey().identity(), + col771: int('col71').primaryKey().identity(), + col772: int('col71').primaryKey().identity(), + col773: int('col71').primaryKey().identity(), + col774: int('col71').primaryKey().identity(), + col775: int('col71').primaryKey().identity(), + col776: int('col71').primaryKey().identity(), + col778: int('col71').primaryKey().identity(), + col779: int('col71').primaryKey().identity(), + col780: int('col71').primaryKey().identity(), + col781: int('col71').primaryKey().identity(), + col782: int('col71').primaryKey().identity(), + col783: int('col71').primaryKey().identity(), + col784: int('col71').primaryKey().identity(), + col785: int('col71').primaryKey().identity(), + col786: int('col71').primaryKey().identity(), + col788: int('col71').primaryKey().identity(), + col789: int('col71').primaryKey().identity(), + col790: int('col71').primaryKey().identity(), + col791: int('col71').primaryKey().identity(), + col792: int('col71').primaryKey().identity(), + col793: int('col71').primaryKey().identity(), + col794: int('col71').primaryKey().identity(), + col795: int('col71').primaryKey().identity(), + col796: int('col71').primaryKey().identity(), + col798: int('col71').primaryKey().identity(), + col799: int('col71').primaryKey().identity(), + col800: int('col8').primaryKey().identity(), + col801: int('col8').primaryKey().identity(), + col802: int('col8').primaryKey().identity(), + col803: int('col8').primaryKey().identity(), + col804: int('col8').primaryKey().identity(), + col805: int('col8').primaryKey().identity(), + col806: int('col8').primaryKey().identity(), + col808: int('col8').primaryKey().identity(), + col809: int('col8').primaryKey().identity(), + col810: int('col81').primaryKey().identity(), + col811: int('col81').primaryKey().identity(), + col812: int('col81').primaryKey().identity(), + col813: int('col81').primaryKey().identity(), + col814: int('col81').primaryKey().identity(), + col815: int('col81').primaryKey().identity(), + col816: int('col81').primaryKey().identity(), + col818: int('col81').primaryKey().identity(), + col819: int('col81').primaryKey().identity(), + col820: int('col81').primaryKey().identity(), + col821: int('col81').primaryKey().identity(), + col822: int('col81').primaryKey().identity(), + col823: int('col81').primaryKey().identity(), + col824: int('col81').primaryKey().identity(), + col825: int('col81').primaryKey().identity(), + col826: int('col81').primaryKey().identity(), + col828: int('col81').primaryKey().identity(), + col829: int('col81').primaryKey().identity(), + col830: int('col81').primaryKey().identity(), + col831: int('col81').primaryKey().identity(), + col832: int('col81').primaryKey().identity(), + col833: int('col81').primaryKey().identity(), + col834: int('col81').primaryKey().identity(), + col835: int('col81').primaryKey().identity(), + col836: int('col81').primaryKey().identity(), + col838: int('col81').primaryKey().identity(), + col839: int('col81').primaryKey().identity(), + col840: int('col81').primaryKey().identity(), + col841: int('col81').primaryKey().identity(), + col842: int('col81').primaryKey().identity(), + col843: int('col81').primaryKey().identity(), + col844: int('col81').primaryKey().identity(), + col845: int('col81').primaryKey().identity(), + col846: int('col81').primaryKey().identity(), + col848: int('col81').primaryKey().identity(), + col849: int('col81').primaryKey().identity(), + col850: int('col81').primaryKey().identity(), + col851: int('col81').primaryKey().identity(), + col852: int('col81').primaryKey().identity(), + col853: int('col81').primaryKey().identity(), + col854: int('col81').primaryKey().identity(), + col855: int('col81').primaryKey().identity(), + col856: int('col81').primaryKey().identity(), + col858: int('col81').primaryKey().identity(), + col859: int('col81').primaryKey().identity(), + col860: int('col81').primaryKey().identity(), + col861: int('col81').primaryKey().identity(), + col862: int('col81').primaryKey().identity(), + col863: int('col81').primaryKey().identity(), + col864: int('col81').primaryKey().identity(), + col865: int('col81').primaryKey().identity(), + col866: int('col81').primaryKey().identity(), + col868: int('col81').primaryKey().identity(), + col869: int('col81').primaryKey().identity(), + col870: int('col81').primaryKey().identity(), + col871: int('col81').primaryKey().identity(), + col872: int('col81').primaryKey().identity(), + col873: int('col81').primaryKey().identity(), + col874: int('col81').primaryKey().identity(), + col875: int('col81').primaryKey().identity(), + col876: int('col81').primaryKey().identity(), + col878: int('col81').primaryKey().identity(), + col879: int('col81').primaryKey().identity(), + col880: int('col81').primaryKey().identity(), + col881: int('col81').primaryKey().identity(), + col882: int('col81').primaryKey().identity(), + col883: int('col81').primaryKey().identity(), + col884: int('col81').primaryKey().identity(), + col885: int('col81').primaryKey().identity(), + col886: int('col81').primaryKey().identity(), + col888: int('col81').primaryKey().identity(), + col889: int('col81').primaryKey().identity(), + col890: int('col81').primaryKey().identity(), + col891: int('col81').primaryKey().identity(), + col892: int('col81').primaryKey().identity(), + col893: int('col81').primaryKey().identity(), + col894: int('col81').primaryKey().identity(), + col895: int('col81').primaryKey().identity(), + col896: int('col81').primaryKey().identity(), + col898: int('col81').primaryKey().identity(), + col899: int('col81').primaryKey().identity(), + col900: int('col9').primaryKey().identity(), + col901: int('col9').primaryKey().identity(), + col902: int('col9').primaryKey().identity(), + col903: int('col9').primaryKey().identity(), + col904: int('col9').primaryKey().identity(), + col905: int('col9').primaryKey().identity(), + col906: int('col9').primaryKey().identity(), + col908: int('col9').primaryKey().identity(), + col909: int('col9').primaryKey().identity(), + col910: int('col91').primaryKey().identity(), + col911: int('col91').primaryKey().identity(), + col912: int('col91').primaryKey().identity(), + col913: int('col91').primaryKey().identity(), + col914: int('col91').primaryKey().identity(), + col915: int('col91').primaryKey().identity(), + col916: int('col91').primaryKey().identity(), + col918: int('col91').primaryKey().identity(), + col919: int('col91').primaryKey().identity(), + col920: int('col91').primaryKey().identity(), + col921: int('col91').primaryKey().identity(), + col922: int('col91').primaryKey().identity(), + col923: int('col91').primaryKey().identity(), + col924: int('col91').primaryKey().identity(), + col925: int('col91').primaryKey().identity(), + col926: int('col91').primaryKey().identity(), + col928: int('col91').primaryKey().identity(), + col929: int('col91').primaryKey().identity(), + col930: int('col91').primaryKey().identity(), + col931: int('col91').primaryKey().identity(), + col932: int('col91').primaryKey().identity(), + col933: int('col91').primaryKey().identity(), + col934: int('col91').primaryKey().identity(), + col935: int('col91').primaryKey().identity(), + col936: int('col91').primaryKey().identity(), + col938: int('col91').primaryKey().identity(), + col939: int('col91').primaryKey().identity(), + col940: int('col91').primaryKey().identity(), + col941: int('col91').primaryKey().identity(), + col942: int('col91').primaryKey().identity(), + col943: int('col91').primaryKey().identity(), + col944: varchar('col91', { length: 200 }).primaryKey().default('0'), + col945: varchar('col91', { length: 200 }).primaryKey().default('0'), + col946: varchar('col91', { length: 200 }).primaryKey().default('0'), + col948: varchar('col91', { length: 200 }).primaryKey().default('0'), + col949: varchar('col91', { length: 200 }).primaryKey().default('0'), + col950: varchar('col91', { length: 200 }).primaryKey().default('0'), + col951: varchar('col91', { length: 200 }).primaryKey().default('0'), + col952: varchar('col91', { length: 200 }).primaryKey().default('0'), + col953: varchar('col91', { length: 200 }).primaryKey().default('0'), + col954: varchar('col91', { length: 200 }).primaryKey().default('0'), + col955: varchar('col91', { length: 200 }).primaryKey().default('0'), + col956: varchar('col91', { length: 200 }).primaryKey().default('0'), + col958: varchar('col91', { length: 200 }).primaryKey().default('0'), + col959: varchar('col91', { length: 200 }).primaryKey().default('0'), + col960: varchar('col91', { length: 200 }).primaryKey().default('0'), + col961: varchar('col91', { length: 200 }).primaryKey().default('0'), + col962: varchar('col91', { length: 200 }).primaryKey().default('0'), + col963: varchar('col91', { length: 200 }).primaryKey().default('0'), + col964: varchar('col91', { length: 200 }).primaryKey().default('0'), + col965: varchar('col91', { length: 200 }).primaryKey().default('0'), + col966: varchar('col91', { length: 200 }).primaryKey().default('0'), + col968: varchar('col91', { length: 200 }).primaryKey().default('0'), + col969: varchar('col91', { length: 200 }).primaryKey().default('0'), + col970: varchar('col91', { length: 200 }).primaryKey().default('0'), + col971: varchar('col91', { length: 200 }).primaryKey().default('0'), + col972: varchar('col91', { length: 200 }).primaryKey().default('0'), + col973: varchar('col91', { length: 200 }).primaryKey().default('0'), + col974: varchar('col91', { length: 200 }).primaryKey().default('0'), + col975: varchar('col91', { length: 200 }).primaryKey().default('0'), + col976: varchar('col91', { length: 200 }).primaryKey().default('0'), + col978: varchar('col91', { length: 200 }).primaryKey().default('0'), + col979: varchar('col91', { length: 200 }).primaryKey().default('0'), + col980: varchar('col91', { length: 200 }).primaryKey().default('0'), + col981: varchar('col91', { length: 200 }).primaryKey().default('0'), + col982: varchar('col91', { length: 200 }).primaryKey().default('0'), + col983: varchar('col91', { length: 200 }).primaryKey().default('0'), + col984: varchar('col91', { length: 200 }).primaryKey().default('0'), + col985: varchar('col91', { length: 200 }).primaryKey().default('0'), + col986: varchar('col91', { length: 200 }).primaryKey().default('0'), + col988: varchar('col91', { length: 200 }).primaryKey().default('0'), + col989: varchar('col91', { length: 200 }).primaryKey().default('0'), + col990: varchar('col91', { length: 200 }).primaryKey().default('0'), + col991: varchar('col91', { length: 200 }).primaryKey().default('0'), + col992: varchar('col91', { length: 200 }).primaryKey().default('0'), + col993: varchar('col91', { length: 200 }).primaryKey().default('0'), + col994: varchar('col91', { length: 200 }).primaryKey().default('0'), + col995: varchar('col91', { length: 200 }).primaryKey().default('0'), + col996: varchar('col91', { length: 200 }).primaryKey().default('0'), + col998: varchar('col91', { length: 200 }).primaryKey().default('0'), + col999: varchar('col91', { length: 200 }).primaryKey().default('0'), +}); diff --git a/drizzle-orm/type-tests/mssql/db-rel.ts b/drizzle-orm/type-tests/mssql/db-rel.ts new file mode 100644 index 0000000000..b5c92da690 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/db-rel.ts @@ -0,0 +1,117 @@ +import mssql from 'mssql'; +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { drizzle } from '~/node-mssql/index.ts'; +import { sql } from '~/sql/sql.ts'; +import * as schema from './tables-rel.ts'; + +const conn = new mssql.ConnectionPool(process.env['MSSQL_CONNECTION_STRING']!); +const db = drizzle({ client: conn, schema }); + +{ + const result = await db._query.users.findMany({ + where: (users, { sql }) => sql`char_length(${users.name} > 1)`, + limit: sql.placeholder('l'), + orderBy: (users, { asc, desc }) => [asc(users.name), desc(users.id)], + with: { + posts: { + where: (posts, { sql }) => sql`char_length(${posts.title} > 1)`, + limit: sql.placeholder('l'), + columns: { + id: false, + }, + with: { + author: true, + comments: { + where: (comments, { sql }) => sql`char_length(${comments.text} > 1)`, + limit: sql.placeholder('l'), + columns: { + text: true, + }, + with: { + author: { + columns: {}, + with: { + city: { + with: { + users: true, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }); + + Expect< + Equal<{ + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + posts: { + title: string; + authorId: number | null; + comments: { + text: string; + author: { + city: { + id: number; + name: string; + users: { + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + }[]; + }; + } | null; + }[]; + author: { + id: number; + name: string; + cityId: number; + homeCityId: number | null; + createdAt: Date; + } | null; + }[]; + }[], typeof result> + >; +} + +{ + const result = await db._query.users.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + authorId: true, + }, + extras: { + lower: sql`lower(${schema.posts.title})`.as('lower_name'), + }, + }, + }, + }); + + Expect< + Equal< + { + id: number; + name: string; + posts: { + authorId: number | null; + lower: string; + }[]; + }[], + typeof result + > + >; +} diff --git a/drizzle-orm/type-tests/mssql/db.ts b/drizzle-orm/type-tests/mssql/db.ts new file mode 100644 index 0000000000..36ca948794 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/db.ts @@ -0,0 +1,11 @@ +import mssql from 'mssql'; +import { drizzle } from '~/node-mssql/index.ts'; + +const pool = await mssql.connect({} as mssql.config); + +export const db = drizzle({ client: pool }); + +{ + drizzle({ client: pool }); + drizzle({ client: pool, schema: {} }); +} diff --git a/drizzle-orm/type-tests/mssql/delete.ts b/drizzle-orm/type-tests/mssql/delete.ts new file mode 100644 index 0000000000..7ede4176bb --- /dev/null +++ b/drizzle-orm/type-tests/mssql/delete.ts @@ -0,0 +1,63 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import type { MsSqlDelete } from '~/mssql-core/index.ts'; +import { drizzle } from '~/node-mssql'; +import type { MsSqlQueryResult } from '~/node-mssql'; +import { eq } from '~/sql/expressions'; +import { sql } from '~/sql/sql.ts'; +import { users } from './tables.ts'; + +const db = drizzle.mock(); + +const deleteAll = await db.delete(users); +Expect>; + +const deleteAllStmt = db.delete(users).prepare(); +const deleteAllPrepared = await deleteAllStmt.execute(); +Expect>; + +const deleteWhere = await db.delete(users).where(eq(users.id, 1)); +Expect>; + +const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare(); +const deleteWherePrepared = await deleteWhereStmt.execute(); +Expect>; + +const deleteOutputAll = await db.delete(users).output(); +Expect>; + +const deleteOutputAllStmt = db.delete(users).output().prepare(); +const deleteOutputAllPrepared = await deleteOutputAllStmt.execute(); +Expect>; + +const deleteOutputPartial = await db.delete(users).output({ cityHome: users.homeCity }); +Expect>; + +const deleteOutputPartialStmt = db.delete(users).output({ cityHome: users.homeCity }).prepare(); +const deleteOutputPartialPrepared = await deleteOutputPartialStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.delete(users).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .delete(users) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); + + db + .delete(users) + .$dynamic() + .where(sql``) + .where(sql``); +} diff --git a/drizzle-orm/type-tests/mssql/insert.ts b/drizzle-orm/type-tests/mssql/insert.ts new file mode 100644 index 0000000000..abc359d135 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/insert.ts @@ -0,0 +1,122 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { int, mssqlTable, text } from '~/mssql-core/index.ts'; +import type { MsSqlQueryResult } from '~/node-mssql'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const insert = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertPrepared = await insertStmt.execute(); +Expect>; + +const insertSql = await db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}); +Expect>; + +const insertSqlStmt = db.insert(users).values({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}).prepare(); +const insertSqlPrepared = await insertSqlStmt.execute(); +Expect>; + +const insertReturning = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPrepared = await insertReturningStmt.execute(); +Expect>; + +const insertReturningPartial = await db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const insertReturningPartialStmt = db.insert(users).values({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); +Expect>; + +const insertOutputSql = await db.insert(users).output().values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}); +Expect>; + +const insertOutputSqlStmt = db.insert(users).output().values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}).prepare(); +const insertReturningSqlPrepared = await insertOutputSqlStmt.execute(); +Expect>; + +const insertOutputPartialSql = await db.insert(users).output({ cityHome: users.homeCity }).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}); +Expect>; + +const insertOutputPartialSqlStmt = db.insert(users).output({ cityHome: users.homeCity }).values({ + homeCity: 1, + class: 'A', + age1: sql`2 + 2`, + enumCol: 'a', +}).prepare(); +const insertOutputPartialSqlPrepared = await insertOutputPartialSqlStmt.execute(); +Expect>; + +{ + const users = mssqlTable('users', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + age: int('age'), + occupation: text('occupation'), + }); + + await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper' }); + // @ts-expect-error id is an identity column MsSql doesn't allow to write to it + await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper', id: 1 }); +} diff --git a/drizzle-orm/type-tests/mssql/select.ts b/drizzle-orm/type-tests/mssql/select.ts new file mode 100644 index 0000000000..97b67000a3 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/select.ts @@ -0,0 +1,616 @@ +import { alias } from '~/mssql-core/alias.ts'; +import { + and, + between, + eq, + exists, + gt, + gte, + ilike, + inArray, + isNotNull, + isNull, + like, + lt, + lte, + ne, + not, + notBetween, + notExists, + notIlike, + notInArray, + notLike, + or, +} from '~/sql/expressions'; +import { sql } from '~/sql/sql.ts'; + +import type { IRecordSet } from 'mssql'; +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { type MsSqlSelect, type MsSqlSelectQueryBuilder, QueryBuilder } from '~/mssql-core/index.ts'; +import { db } from './db.ts'; +import { cities, classes, newYorkers, users } from './tables.ts'; + +const city = alias(cities, 'city'); +const city1 = alias(cities, 'city1'); + +const join = await db + .select({ + users, + cities, + city, + city1: { + id: city1.id, + }, + }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)) + .rightJoin(city, eq(city.id, users.id)) + .rightJoin(city1, eq(city1.id, users.id)); + +Expect< + Equal< + { + users: { + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + } | null; + cities: { + id: number; + name: string; + population: number | null; + } | null; + city: { + id: number; + name: string; + population: number | null; + } | null; + city1: { + id: number; + }; + }[], + typeof join + > +>; + +const join2 = await db + .select({ + userId: users.id, + cityId: cities.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + }[], + typeof join2 + > +>; + +const join3 = await db + .select({ + userId: users.id, + cityId: cities.id, + classId: classes.id, + }) + .from(users) + .fullJoin(cities, eq(users.id, cities.id)) + .rightJoin(classes, eq(users.id, classes.id)); + +Expect< + Equal< + { + userId: number | null; + cityId: number | null; + classId: number; + }[], + typeof join3 + > +>; + +db + .select() + .from(users) + .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); + +function mapFunkyFuncResult(valueFromDriver: unknown) { + return { + foo: (valueFromDriver as Record)['foo'], + }; +} + +const age = 1; + +const allOperators = await db + .select({ + col2: sql`5 - ${users.id} + 1`, // unknown + col3: sql`${users.id} + 1`, // number + col33: sql`${users.id} + 1`.mapWith(users.id), // number + col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number + col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number + col5: sql`true`, // unknown + col6: sql`true`, // boolean + col7: sql`random()`, // number + col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } + col9: sql`greatest(${users.createdAt}, ${sql.param(new Date(), users.createdAt)})`, // unknown + col10: sql`date_or_false(${users.createdAt}, ${sql.param(new Date(), users.createdAt)})`, // Date | boolean + col11: sql`${users.age1} + ${age}`, // unknown + col12: sql`${users.age1} + ${sql.param(age, users.age1)}`, // unknown + col13: sql`lower(${users.class})`, // unknown + col14: sql`length(${users.class})`, // number + count: sql`count(*)::int`, // number + }) + .from(users) + .where(and( + eq(users.id, 1), + ne(users.id, 1), + or(eq(users.id, 1), ne(users.id, 1)), + not(eq(users.id, 1)), + gt(users.id, 1), + gte(users.id, 1), + lt(users.id, 1), + lte(users.id, 1), + inArray(users.id, [1, 2, 3]), + inArray(users.id, db.select({ id: users.id }).from(users)), + inArray(users.id, sql`select id from ${users}`), + notInArray(users.id, [1, 2, 3]), + notInArray(users.id, db.select({ id: users.id }).from(users)), + notInArray(users.id, sql`select id from ${users}`), + isNull(users.subClass), + isNotNull(users.id), + exists(db.select({ id: users.id }).from(users)), + exists(sql`select id from ${users}`), + notExists(db.select({ id: users.id }).from(users)), + notExists(sql`select id from ${users}`), + between(users.id, 1, 2), + notBetween(users.id, 1, 2), + like(users.id, '%1%'), + notLike(users.id, '%1%'), + ilike(users.id, '%1%'), + notIlike(users.id, '%1%'), + )); + +Expect< + Equal<{ + col2: unknown; + col3: number; + col33: number; + col34: { foo: any }; + col4: string | number; + col5: unknown; + col6: boolean; + col7: number; + col8: { + foo: any; + }; + col9: unknown; + col10: boolean | Date; + col11: unknown; + col12: unknown; + col13: unknown; + col14: number; + count: number; + }[], typeof allOperators> +>; + +const textSelect = await db + .select({ + t: users.text, + }) + .from(users); + +Expect>; + +const homeCity = alias(cities, 'homeCity'); +const c = alias(classes, 'c'); +const otherClass = alias(classes, 'otherClass'); +const anotherClass = alias(classes, 'anotherClass'); +const friend = alias(users, 'friend'); +const currentCity = alias(cities, 'currentCity'); +const subscriber = alias(users, 'subscriber'); +const closestCity = alias(cities, 'closestCity'); + +const megaJoin = await db + .select({ + user: { + id: users.id, + maxAge: sql`max(${users.age1})`, + }, + city: { + id: cities.id, + }, + homeCity, + c, + otherClass, + anotherClass, + friend, + currentCity, + subscriber, + closestCity, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) + .innerJoin(c, eq(c.id, users.class)) + .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) + .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) + .innerJoin(friend, sql`${users.id} = ${friend.id}`) + .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) + .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) + .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) + .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) + .orderBy(users.id) + .offset(1) + .fetch(1); + +Expect< + Equal< + { + user: { + id: number; + maxAge: unknown; + }; + city: { + id: number; + }; + homeCity: { + id: number; + name: string; + population: number | null; + }; + c: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + otherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + anotherClass: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + currentCity: { + id: number; + name: string; + population: number | null; + }; + subscriber: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + closestCity: { + id: number; + name: string; + population: number | null; + }; + }[], + typeof megaJoin + > +>; + +const friends = alias(users, 'friends'); + +const join4 = await db + .select({ + user: { + id: users.id, + }, + city: { + id: cities.id, + }, + class: classes, + friend: friends, + }) + .from(users) + .innerJoin(cities, sql`${users.id} = ${cities.id}`) + .innerJoin(classes, sql`${cities.id} = ${classes.id}`) + .innerJoin(friends, sql`${friends.id} = ${users.id}`) + .where(sql`${users.age1} > 0`); + +Expect< + Equal<{ + user: { + id: number; + }; + city: { + id: number; + }; + class: { + id: number; + class: 'A' | 'C' | null; + subClass: 'B' | 'D'; + }; + friend: { + id: number; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + text: string | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }; + }[], typeof join4> +>; + +{ + const authenticated = false as boolean; + + const result = await db + .select({ + id: users.id, + ...(authenticated ? { city: users.homeCity } : {}), + }) + .from(users); + + Expect< + Equal< + { + id: number; + city?: number; + }[], + typeof result + > + >; +} + +await db.select().from(users); + +{ + const result = await db.select().from(newYorkers); + Expect< + Equal< + { + userId: number; + cityId: number | null; + }[], + typeof result + > + >; +} + +{ + const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); + Expect< + Equal< + { + userId: number; + }[], + typeof result + > + >; +} + +{ + const query = db.select().from(users).prepare().iterator(); + for await (const row of query) { + Expect>(); + } +} + +{ + db + .select() + .from(users) + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); + + db + .select() + .from(users) + .where(eq(users.id, 1)) + .orderBy(users.id) + // @ts-expect-error - can't use where twice + .where(eq(users.id, 1)); +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function withFriends(qb: T) { + const friends = alias(users, 'friends'); + const friends2 = alias(users, 'friends2'); + const friends3 = alias(users, 'friends3'); + const friends4 = alias(users, 'friends4'); + const friends5 = alias(users, 'friends5'); + return qb + .leftJoin(friends, sql`true`) + .leftJoin(friends2, sql`true`) + .leftJoin(friends3, sql`true`) + .leftJoin(friends4, sql`true`) + .leftJoin(friends5, sql`true`); + } + + const qb = db.select().from(users).$dynamic(); + const result = await withFriends(qb); + Expect< + Equal + >; +} + +{ + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).offset(1).fetch(10); + } + + const qb = db.select().from(users).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +{ + // TODO: add to docs + function dynamic(qb: T) { + return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).offset(1); + } + + const query = new QueryBuilder().select().from(users).$dynamic(); + dynamic(query); +} + +{ + // TODO: add to docs + function paginated(qb: T, page: number) { + return qb.orderBy(sql`1`).offset((page - 1) * 10).fetch(10); + } + + const qb = db.select().from(users).$dynamic(); + const result = await paginated(qb, 1); + + Expect>; +} + +{ + db + .select() + .from(users) + .where(sql``) + .orderBy(users.id) + // @ts-expect-error method was already called + .where(sql``); + + db + .select() + .from(users) + .having(sql``) + .orderBy(users.id) + // @ts-expect-error method was already called + .having(sql``); + + db + .select() + .from(users) + .groupBy(sql``) + // @ts-expect-error method was already called + .groupBy(sql``); + + db + .select() + .from(users) + .orderBy(sql``) + // @ts-expect-error method was already called + .orderBy(sql``); + + db + .select() + .from(users) + .where(sql``) + // @ts-expect-error method doesn't actually exists + .limit(10); + + db + .select() + .from(users) + // @ts-expect-error method can not be called before order by + .offset(); + + db + .select() + .from(users) + .orderBy() + .offset(5) + .fetch(2); + + db + .select() + .top(2) + .from(users) + .orderBy() + // @ts-expect-error method can not be called if top method used + .offset(10); +} + +{ + const result = await db.execute<{ name: string | null }[]>(sql`select name from users`); + + Expect< + Equal; + recordsets: IRecordSet<{ name: string | null }>[]; + output: { [key: string]: any }; + rowsAffected: number[]; + }> + >; +} diff --git a/drizzle-orm/type-tests/mssql/set-operators.ts b/drizzle-orm/type-tests/mssql/set-operators.ts new file mode 100644 index 0000000000..6aa0fafe22 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/set-operators.ts @@ -0,0 +1,274 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import { except, intersect, type MsSqlSetOperator, union, unionAll } from '~/mssql-core/index.ts'; +import { eq } from '~/sql/expressions'; +import { desc, sql } from '~/sql/index.ts'; +import { db } from './db.ts'; +import { cities, classes, newYorkers, users } from './tables.ts'; + +const unionTest = await db + .select({ id: users.id }) + .from(users) + .union( + db + .select({ id: users.id }) + .from(users), + ); + +Expect>; + +const unionAllTest = await db + .select({ id: users.id, age: users.age1 }) + .from(users) + .unionAll( + db.select({ id: users.id, age: users.age1 }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const intersectTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .intersect(({ intersect }) => + intersect( + db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users), + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ) + ); + +Expect>; + +const intersectAllTest = await db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .intersect( + db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .leftJoin(cities, eq(users.id, cities.id)), + ); + +Expect>; + +const exceptTest = await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) + .from(users), + ); + +Expect>; + +const exceptAllTest = await db + .select({ id: users.id, homeCity: users.class }) + .from(users) + .except( + db + .select({ id: users.id, homeCity: sql<'A' | 'C'>`${users.class}` }) + .from(users), + ); + +Expect>; + +const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); + +Expect>; + +const unionAll2Test = await unionAll( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select().from(cities), +); + +Expect>; + +const intersect2Test = await intersect( + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), + db.select({ + id: cities.id, + name: cities.name, + population: cities.population, + }).from(cities), +); + +Expect>; + +const intersectAll2Test = await intersect( + union( + db.select({ + id: cities.id, + }).from(cities), + db.select({ + id: cities.id, + }) + .from(cities).where(sql``), + ), + db.select({ + id: cities.id, + }) + .from(cities), +).orderBy(desc(cities.id)).offset(0).fetch(23); + +Expect>; + +const except2Test = await except( + db.select({ + userId: newYorkers.userId, + }) + .from(newYorkers), + db.select({ + userId: newYorkers.userId, + }).from(newYorkers), +); + +Expect>; + +const exceptAll2Test = await except( + db.select({ + userId: newYorkers.userId, + cityId: newYorkers.cityId, + }) + .from(newYorkers).where(sql``), + db.select({ + userId: newYorkers.userId, + cityId: newYorkers.cityId, + }).from(newYorkers).leftJoin(users, sql``), +); + +Expect>; + +const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).offset(0).fetch(23); + +Expect< + Equal<{ + id: number; + text: string | null; + homeCity: number; + currentCity: number | null; + serialNullable: number; + serialNotNull: number; + class: 'A' | 'C'; + subClass: 'B' | 'D' | null; + age1: number; + createdAt: Date; + enumCol: 'a' | 'b' | 'c'; + }[], typeof unionfull> +>; + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + // @ts-expect-error - method was already called + .orderBy(sql``); + +union(db.select().from(users), db.select().from(users)) + .orderBy(sql``) + .offset(0) + // @ts-expect-error - method was already called + .offset(0); + +{ + function dynamic(qb: T) { + return qb.orderBy(sql``).offset(1).offset(2); + } + + const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); + const result = await dynamic(qb); + Expect>; +} + +await db + .select({ id: users.id, homeCity: users.homeCity }) + .from(users) + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); + +// All queries in combining statements should return the same number of columns +// and the corresponding columns should have compatible data type +// @ts-expect-error +db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + db.select({ id: cities.id, name: cities.name }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select().from(cities), +); + +union( + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), + db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).orderBy(users.id).$dynamic(), + db.select({ id: cities.id, name: cities.name }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: newYorkers.userId }).from(newYorkers), + db.select({ id: cities.id }).from(cities), +); + +union( + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities), + db.select({ id: cities.id }).from(cities).where(sql``), + db.select({ id: sql`${cities.id}` }).from(cities), + db.select({ id: cities.id }).from(cities), + // All queries in combining statements should return the same number of columns + // and the corresponding columns should have compatible data type + // @ts-expect-error + db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), +); diff --git a/drizzle-orm/type-tests/mssql/subquery.ts b/drizzle-orm/type-tests/mssql/subquery.ts new file mode 100644 index 0000000000..a5000f4048 --- /dev/null +++ b/drizzle-orm/type-tests/mssql/subquery.ts @@ -0,0 +1,97 @@ +import { Expect } from 'type-tests/utils.ts'; +import { alias, int, mssqlTable, text } from '~/mssql-core/index.ts'; +import { and, eq } from '~/sql/expressions'; +import { sql } from '~/sql/sql.ts'; +import type { DrizzleTypeError, Equal } from '~/utils.ts'; +import { db } from './db.ts'; + +const names = mssqlTable('names', { + id: int('id').identity().primaryKey(), + name: text('name'), + authorId: int('author_id'), +}); + +const n1 = db + .select({ + id: names.id, + name: names.name, + authorId: names.authorId, + count1: sql`count(1)::int`.as('count1'), + }) + .from(names) + .groupBy(names.id, names.name, names.authorId) + .as('n1'); + +const n2 = db + .select({ + id: names.id, + authorId: names.authorId, + totalCount: sql`count(1)::int`.as('totalCount'), + }) + .from(names) + .groupBy(names.id, names.authorId) + .as('n2'); + +const result = await db + .select({ + name: n1.name, + authorId: n1.authorId, + count1: n1.count1, + totalCount: n2.totalCount, + }) + .from(n1) + .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); + +Expect< + Equal< + { + name: string | null; + authorId: number | null; + count1: number; + totalCount: number; + }[], + typeof result + > +>; + +const names2 = alias(names, 'names2'); + +const sq1 = db + .select({ + id: names.id, + name: names.name, + id2: names2.id, + }) + .from(names) + .leftJoin(names2, eq(names.name, names2.name)) + .as('sq1'); + +const res = await db.select().from(sq1); + +Expect< + Equal< + { + id: number; + name: string | null; + id2: number | null; + }[], + typeof res + > +>; + +{ + const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); + Expect ? true : false>; +} + +const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); + +const resUnion = await db.select().from(sqUnion); + +Expect< + Equal<{ + id: number; + name: string | null; + authorId: number | null; + }[], typeof resUnion> +>; diff --git a/drizzle-orm/type-tests/mssql/tables-rel.ts b/drizzle-orm/type-tests/mssql/tables-rel.ts new file mode 100644 index 0000000000..9428466a4f --- /dev/null +++ b/drizzle-orm/type-tests/mssql/tables-rel.ts @@ -0,0 +1,79 @@ +import { relations } from '~/_relations.ts'; +import { datetime, foreignKey, int, mssqlTable, text } from '~/mssql-core/index.ts'; + +export const users = mssqlTable('users', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => cities.id).notNull(), + homeCityId: int('home_city_id').references(() => cities.id), + createdAt: datetime('created_at').notNull(), +}); +export const usersConfig = relations(users, ({ one, many }) => ({ + city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), + homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), + posts: many(posts), + comments: many(comments), +})); + +export const cities = mssqlTable('cities', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), +}); +export const citiesConfig = relations(cities, ({ many }) => ({ + users: many(users, { relationName: 'UsersInCity' }), +})); + +export const posts = mssqlTable('posts', { + id: int('id').identity().primaryKey(), + title: text('title').notNull(), + authorId: int('author_id').references(() => users.id), +}); +export const postsConfig = relations(posts, ({ one, many }) => ({ + author: one(users, { fields: [posts.authorId], references: [users.id] }), + comments: many(comments), +})); + +export const comments = mssqlTable('comments', { + id: int('id').identity().primaryKey(), + postId: int('post_id').references(() => posts.id).notNull(), + authorId: int('author_id').references(() => users.id), + text: text('text').notNull(), +}); +export const commentsConfig = relations(comments, ({ one }) => ({ + post: one(posts, { fields: [comments.postId], references: [posts.id] }), + author: one(users, { fields: [comments.authorId], references: [users.id] }), +})); + +export const books = mssqlTable('books', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), +}); +export const booksConfig = relations(books, ({ many }) => ({ + authors: many(bookAuthors), +})); + +export const bookAuthors = mssqlTable('book_authors', { + bookId: int('book_id').references(() => books.id).notNull(), + authorId: int('author_id').references(() => users.id).notNull(), + role: text('role').notNull(), +}); +export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ + book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), + author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), +})); + +export const node = mssqlTable('node', { + id: int('id').identity().primaryKey(), + parentId: int('parent_id'), + leftId: int('left_id'), + rightId: int('right_id'), +}, (node) => [ + foreignKey({ name: 'name8', columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ name: 'name9', columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ name: 'name10', columns: [node.rightId], foreignColumns: [node.id] }), +]); +export const nodeRelations = relations(node, ({ one }) => ({ + parent: one(node, { fields: [node.parentId], references: [node.id] }), + left: one(node, { fields: [node.leftId], references: [node.id] }), + right: one(node, { fields: [node.rightId], references: [node.id] }), +})); diff --git a/drizzle-orm/type-tests/mssql/tables.ts b/drizzle-orm/type-tests/mssql/tables.ts new file mode 100644 index 0000000000..78d80c7e9a --- /dev/null +++ b/drizzle-orm/type-tests/mssql/tables.ts @@ -0,0 +1,514 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import type { BuildColumn, GeneratedColumnConfig, InferSelectModel, Simplify } from '~/index.ts'; +import { + bigint, + char, + check, + customType, + date, + datetime, + decimal, + foreignKey, + index, + int, + type MsSqlColumn, + mssqlTable, + nchar, + nvarchar, + primaryKey, + text, + uniqueIndex, + varchar, +} from '~/mssql-core/index.ts'; +import { mssqlSchema } from '~/mssql-core/schema.ts'; +import { mssqlView, type MsSqlViewWithSelection } from '~/mssql-core/view.ts'; +import { eq, gt } from '~/sql/expressions'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +export const users = mssqlTable( + 'users_table', + { + id: int('id').identity().primaryKey(), + homeCity: int('home_city') + .notNull() + .references(() => cities.id), + currentCity: int('current_city').references(() => cities.id), + serialNullable: int('serial1').identity(), + serialNotNull: int('serial2').identity(), + class: text('class', { enum: ['A', 'C'] }).notNull(), + subClass: text('sub_class', { enum: ['B', 'D'] }), + text: text('text'), + age1: int('age1').notNull(), + createdAt: datetime('created_at', { mode: 'date' }).default(sql`current_timestamp`).notNull(), + enumCol: text('enum_col', { enum: ['a', 'b', 'c'] }).notNull(), + }, + (users) => [ + uniqueIndex('usersAge1Idx').on(users.class), + index('usersAge2Idx').on(users.class), + uniqueIndex('uniqueClass') + .on(users.class, users.subClass), + check('legalAge', sql`${users.age1} > 18`), + foreignKey({ name: 'fk_1', columns: [users.subClass], foreignColumns: [classes.subClass] }), + foreignKey({ + name: 'fk_2', + columns: [users.class, users.subClass], + foreignColumns: [classes.class, classes.subClass], + }), + primaryKey({ columns: [users.age1, users.class], name: 'custom_name' }), + ], +); + +export const cities = mssqlTable('cities_table', { + id: int('id').identity().primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => [ + index('citiesNameIdx').on(cities.id), +]); + +Expect< + Equal<{ + id: number; + name: string; + population: number | null; + }, InferSelectModel> +>; + +export const customSchema = mssqlSchema('custom_schema'); + +export const citiesCustom = customSchema.table('cities_table', { + id: int('id').identity().primaryKey(), + name: text('name_db').notNull(), + population: int('population').default(0), +}, (cities) => [ + index('citiesNameIdx').on(cities.id), +]); + +Expect>; + +export const classes = mssqlTable('classes_table', { + id: int('id').identity().primaryKey(), + class: text('class', { enum: ['A', 'C'] }), + subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), +}); + +/* export const classes2 = mssqlTable('classes_table', { + id: serial().primaryKey(), + class: text({ enum: ['A', 'C'] }).$dbName('class_db'), + subClass: text({ enum: ['B', 'D'] }).notNull(), +}); */ + +export const newYorkers = mssqlView('new_yorkers') + .with({ checkOption: true, encryption: false, schemaBinding: true, viewMetadata: false }) + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + +Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + MsSqlViewWithSelection<'new_yorkers', false, { + userId: MsSqlColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + tableName: 'new_yorkers'; + enumValues: undefined; + identity: undefined; + baseColumn: never; + generated: GeneratedColumnConfig; + }>; + cityId: MsSqlColumn<{ + name: 'id'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: GeneratedColumnConfig; + }>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > +>; + +{ + const newYorkers = customSchema.view('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + MsSqlViewWithSelection<'new_yorkers', false, { + userId: MsSqlColumn<{ + name: 'id'; + tableName: 'new_yorkers'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: true; + hasDefault: true; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: GeneratedColumnConfig; + }, object>; + cityId: MsSqlColumn<{ + name: 'id'; + tableName: 'new_yorkers'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: false; + hasDefault: true; + isPrimaryKey: true; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: GeneratedColumnConfig; + }, object>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = mssqlView('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }) + .as( + sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ + eq(cities.id, users.homeCity) + } where ${gt(users.age1, 18)}`, + ); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + MsSqlViewWithSelection<'new_yorkers', false, { + userId: MsSqlColumn<{ + name: 'user_id'; + tableName: 'new_yorkers'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: true; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: undefined; + }, {}>; + cityId: MsSqlColumn<{ + name: 'city_id'; + tableName: 'new_yorkers'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + notNull: false; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + identity: undefined; + generated: undefined; + }, {}>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const newYorkers = customSchema.view('new_yorkers', { + userId: int('user_id').notNull(), + cityId: int('city_id'), + }).existing(); + + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + MsSqlViewWithSelection<'new_yorkers', true, { + userId: MsSqlColumn<{ + name: 'user_id'; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + hasDefault: false; + notNull: true; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + }, {}>; + cityId: MsSqlColumn<{ + name: 'city_id'; + notNull: false; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + dataType: 'number'; + columnType: 'MsSqlInt'; + data: number; + driverParam: number; + tableName: 'new_yorkers'; + enumValues: undefined; + baseColumn: never; + generated: undefined; + identity: undefined; + }, {}>; + }>, + // @ts-ignore - TODO: Remake type checks for new columns + typeof newYorkers + > + >; +} + +{ + const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, + }); + + const t = customText('name').notNull(); + Expect< + // @ts-ignore - TODO: Remake type checks for new columns + Equal< + { + brand: 'Column'; + name: 'name'; + tableName: 'table'; + dataType: 'custom'; + columnType: 'MsSqlCustomColumn'; + data: string; + driverParam: unknown; + notNull: true; + hasDefault: false; + isPrimaryKey: false; + isAutoincrement: false; + hasRuntimeDefault: false; + enumValues: undefined; + baseColumn: never; + dialect: 'mssql'; + identity: undefined; + generated: undefined; + }, + // @ts-ignore - TODO: Remake type checks for new columns + Simplify['_']> + > + >; +} + +{ + mssqlTable('test', { + bigint: bigint('bigint', { mode: 'bigint' }), + number: bigint('number', { mode: 'number' }), + date: date('date').default(new Date()), + date2: date('date2', { mode: 'date' }).default(new Date()), + date3: date('date3', { mode: 'string' }).default('2020-01-01'), + date4: date('date4', { mode: undefined }).default(new Date()), + datetime: datetime('datetime').default(new Date()), + datetime2: datetime('datetime2', { mode: 'date' }).default(new Date()), + datetime3: datetime('datetime3', { mode: 'string' }).default('2020-01-01'), + datetime4: datetime('datetime4', { mode: undefined }).default(new Date()), + }); +} + +{ + mssqlTable('test', { + col1: decimal('col1').default('1'), + }); +} + +{ + const test = mssqlTable('test', { + test1: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test2: varchar('test', { enum: ['a', 'b', 'c'] }).notNull(), + test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), + test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), + test5: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test6: text('test', { enum: ['a', 'b', 'c'] }).notNull(), + test7: nvarchar('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test8: nvarchar('test', { enum: ['a', 'b', 'c'] }).notNull(), + test9: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test10: char('test', { enum: ['a', 'b', 'c'] }).notNull(), + test11: nchar('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test12: nchar('test', { enum: ['a', 'b', 'c'] }).notNull(), + test13: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), + test14: char('test', { enum: ['a', 'b', 'c'] }).notNull(), + test15: text('test').notNull(), + }); + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; + Expect>; +} + +{ + const getUsersTable = (schemaName: TSchema) => { + return mssqlSchema(schemaName).table('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + }; + + const users1 = getUsersTable('id1'); + Expect>; + + const users2 = getUsersTable('id2'); + Expect>; +} + +{ + const internalStaff = mssqlTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = mssqlTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = mssqlTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin( + customUser, + eq(internalStaff.userId, customUser.id), + ).as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + Expect< + Equal<{ + internal_staff: { + internal_staff: { + userId: number; + }; + custom_user: { + id: number | null; + }; + } | null; + ticket: { + staffId: number; + }; + }[], typeof mainQuery> + >; +} + +{ + const newYorkers = mssqlView('new_yorkers') + .as((qb) => { + const sq = qb + .$with('sq') + .as( + qb.select({ userId: users.id, cityId: cities.id }) + .from(users) + .leftJoin(cities, eq(cities.id, users.homeCity)) + .where(sql`${users.age1} > 18`), + ); + return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); + }); + + await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); +} + +{ + const test = mssqlTable('test', { + id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), + }); + + Expect< + Equal<{ + id?: string; + }, typeof test.$inferInsert> + >; +} + +{ + mssqlTable('test', { + id: int('id').$default(() => 1), + id2: int('id').$defaultFn(() => 1), + // @ts-expect-error - should be number + id3: int('id').$default(() => '1'), + // @ts-expect-error - should be number + id4: int('id').$defaultFn(() => '1'), + }); +} diff --git a/drizzle-orm/type-tests/mssql/update.ts b/drizzle-orm/type-tests/mssql/update.ts new file mode 100644 index 0000000000..ff7847020e --- /dev/null +++ b/drizzle-orm/type-tests/mssql/update.ts @@ -0,0 +1,145 @@ +import { type Equal, Expect } from 'type-tests/utils.ts'; +import type { MsSqlUpdate } from '~/mssql-core/index.ts'; +import type { MsSqlQueryResult } from '~/node-mssql/session.ts'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; +import { users } from './tables.ts'; + +const update = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}); +Expect>; + +const updateStmt = db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).prepare(); +const updatePrepared = await updateStmt.execute(); +Expect>; + +const updateSql = await db.update(users).set({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}); +Expect>; + +const updateSqlStmt = db.update(users).set({ + homeCity: sql`123`, + class: 'A', + age1: 1, + enumCol: sql`foobar`, +}).prepare(); +const updateSqlPrepared = await updateSqlStmt.execute(); +Expect>; + +const updateOutput = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output(); +Expect>; + +const updateOutputWithTrue = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output({ deleted: true, inserted: true }); +Expect< + Equal<{ + inserted: typeof users.$inferSelect; + deleted: typeof users.$inferSelect; + }[], typeof updateOutputWithTrue> +>; + +const updateOutputWithTrue2 = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output({ deleted: true }); +Expect< + Equal<{ + deleted: typeof users.$inferSelect; + }[], typeof updateOutputWithTrue2> +>; + +const updateOutputWithTrue3 = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output({ inserted: true }); +Expect< + Equal<{ + inserted: typeof users.$inferSelect; + }[], typeof updateOutputWithTrue3> +>; + +const updateOutputStmt = db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output().prepare(); +const updateOutputPrepared = await updateOutputStmt.execute(); +Expect>; + +const updateOutputPartial = await db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output({ inserted: { cityHome: users.homeCity } }); +Expect>; + +const updateOutputPartialStmt = db.update(users).set({ + homeCity: 1, + class: 'A', + age1: 1, + enumCol: 'a', +}).output({ deleted: { cityHome: users.homeCity } }).prepare(); +const updateOutputPartialPrepared = await updateOutputPartialStmt.execute(); +Expect>; + +{ + function dynamic(qb: T) { + return qb.where(sql``); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + function dynamic(qb: T) { + return qb.output().where(sql``); + } + + const qbBase = db.update(users).set({}).$dynamic(); + const qb = dynamic(qbBase); + const result = await qb; + Expect>; +} + +{ + db + .update(users) + .set({}) + .where(sql``) + // @ts-expect-error method was already called + .where(sql``); + + // @ts-expect-error Can't update and identity column + db.update(users).set({ id: 2 }); +} diff --git a/drizzle-orm/type-tests/mssql/with.ts b/drizzle-orm/type-tests/mssql/with.ts new file mode 100644 index 0000000000..049787dccf --- /dev/null +++ b/drizzle-orm/type-tests/mssql/with.ts @@ -0,0 +1,65 @@ +import type { Equal } from 'type-tests/utils.ts'; +import { Expect } from 'type-tests/utils.ts'; +import { int, mssqlTable, text } from '~/mssql-core/index.ts'; +import { gt, inArray } from '~/sql/expressions'; +import { sql } from '~/sql/sql.ts'; +import { db } from './db.ts'; + +const orders = mssqlTable('orders', { + id: int('id').identity().primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +{ + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: orders.region, + totalSales: orders.amount, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})`, + productSales: sql`sum(${orders.amount})`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); + + Expect< + Equal<{ + region: string; + product: string; + productUnits: number; + productSales: number; + }[], typeof result> + >; +} diff --git a/drizzle-orm/type-tests/mysql/db-rel.ts b/drizzle-orm/type-tests/mysql/db-rel.ts index 66ad2ace24..7b2c9672d1 100644 --- a/drizzle-orm/type-tests/mysql/db-rel.ts +++ b/drizzle-orm/type-tests/mysql/db-rel.ts @@ -7,7 +7,7 @@ import * as schema from './tables-rel.ts'; const { Pool } = pg; const pdb = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'] }); -const db = drizzle(pdb, { schema }); +const db = drizzle({ client: pdb, schema }); { const result = await db._query.users.findMany({ diff --git a/drizzle-orm/type-tests/mysql/db.ts b/drizzle-orm/type-tests/mysql/db.ts index ac32f645ed..c4002bfeaf 100644 --- a/drizzle-orm/type-tests/mysql/db.ts +++ b/drizzle-orm/type-tests/mysql/db.ts @@ -3,14 +3,14 @@ import { drizzle } from '~/mysql2/index.ts'; const pool = createPool({}); -export const db = drizzle(pool); +export const db = drizzle({ client: pool }); { - drizzle(pool); + drizzle({ client: pool }); // @ts-expect-error - missing mode - drizzle(pool, { schema: {} }); - drizzle(pool, { schema: {}, mode: 'default' }); - drizzle(pool, { schema: {}, mode: 'planetscale' }); - drizzle(pool, { mode: 'default' }); - drizzle(pool, { mode: 'planetscale' }); + drizzle({ client: pool, schema: {} }); + drizzle({ client: pool, schema: {}, mode: 'default' }); + drizzle({ client: pool, schema: {}, mode: 'planetscale' }); + drizzle({ client: pool, mode: 'default' }); + drizzle({ client: pool, mode: 'planetscale' }); } diff --git a/drizzle-orm/type-tests/mysql/tables-rel.ts b/drizzle-orm/type-tests/mysql/tables-rel.ts index 3e00a63714..ac26616bb8 100644 --- a/drizzle-orm/type-tests/mysql/tables-rel.ts +++ b/drizzle-orm/type-tests/mysql/tables-rel.ts @@ -67,11 +67,11 @@ export const node = mysqlTable('node', { parentId: int('parent_id'), leftId: int('left_id'), rightId: int('right_id'), -}, (node) => ({ - fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), - fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), - fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), -})); +}, (node) => [ + foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), +]); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), diff --git a/drizzle-orm/type-tests/mysql/tables.ts b/drizzle-orm/type-tests/mysql/tables.ts index bbd651fc25..6d9eb246a3 100644 --- a/drizzle-orm/type-tests/mysql/tables.ts +++ b/drizzle-orm/type-tests/mysql/tables.ts @@ -63,31 +63,31 @@ export const users = mysqlTable( createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), enumCol: mysqlEnum('enum_col', ['a', 'b', 'c']).notNull(), }, - (users) => ({ - usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), - usersAge2Idx: index('usersAge2Idx').on(users.class), - uniqueClass: uniqueIndex('uniqueClass') + (users) => [ + uniqueIndex('usersAge1Idx').on(users.class), + index('usersAge2Idx').on(users.class), + uniqueIndex('uniqueClass') .on(users.class, users.subClass) .lock('default') .algorithm('copy') .using(`btree`), - legalAge: check('legalAge', sql`${users.age1} > 18`), - usersClassFK: foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }), - usersClassComplexFK: foreignKey({ + check('legalAge', sql`${users.age1} > 18`), + foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }), + foreignKey({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], }), - pk: primaryKey(users.age1, users.class), - }), + primaryKey({ columns: [users.age1, users.class] }), + ], ); export const cities = mysqlTable('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), -}, (cities) => ({ - citiesNameIdx: index('citiesNameIdx').on(cities.id), -})); +}, (cities) => [ + index('citiesNameIdx').on(cities.id), +]); Expect< // @ts-ignore - TODO: Remake type checks for new columns @@ -178,9 +178,9 @@ export const citiesCustom = customSchema.table('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), -}, (cities) => ({ - citiesNameIdx: index('citiesNameIdx').on(cities.id), -})); +}, (cities) => [ + index('citiesNameIdx').on(cities.id), +]); Expect>; @@ -764,12 +764,10 @@ Expect< createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), }, - (table) => { - return { - emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), - emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), - }; - }, + (table) => [ + primaryKey({ columns: [table.id] }), + unique('email_log_message_id_unique').on(table.messageId), + ], ); Expect< diff --git a/drizzle-orm/type-tests/pg/db-rel.ts b/drizzle-orm/type-tests/pg/db-rel.ts index eb19416bb2..856dd29403 100644 --- a/drizzle-orm/type-tests/pg/db-rel.ts +++ b/drizzle-orm/type-tests/pg/db-rel.ts @@ -7,7 +7,7 @@ import * as schema from './tables-rel.ts'; const { Pool } = pg; const pdb = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'] }); -const db = drizzle(pdb, { schema }); +const db = drizzle({ client: pdb, schema }); { const result = await db._query.users.findMany({ diff --git a/drizzle-orm/type-tests/pg/db.ts b/drizzle-orm/type-tests/pg/db.ts index 8cdd301166..f1603f11d2 100644 --- a/drizzle-orm/type-tests/pg/db.ts +++ b/drizzle-orm/type-tests/pg/db.ts @@ -3,4 +3,4 @@ import { drizzle } from '~/node-postgres/index.ts'; const { Client } = pg; -export const db = drizzle(new Client()); +export const db = drizzle({ client: new Client() }); diff --git a/drizzle-orm/type-tests/pg/tables-rel.ts b/drizzle-orm/type-tests/pg/tables-rel.ts index 9a9a1921bc..ef742ae4b4 100644 --- a/drizzle-orm/type-tests/pg/tables-rel.ts +++ b/drizzle-orm/type-tests/pg/tables-rel.ts @@ -67,11 +67,11 @@ export const node = pgTable('node', { parentId: integer('parent_id'), leftId: integer('left_id'), rightId: integer('right_id'), -}, (node) => ({ - fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), - fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), - fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), -})); +}, (node) => [ + foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), +]); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), diff --git a/drizzle-orm/type-tests/pg/tables.ts b/drizzle-orm/type-tests/pg/tables.ts index 6483712c9c..e2859ea761 100644 --- a/drizzle-orm/type-tests/pg/tables.ts +++ b/drizzle-orm/type-tests/pg/tables.ts @@ -131,9 +131,7 @@ export const cities = pgTable('cities_table', { id: serial('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), -}, (cities) => ({ - citiesNameIdx: index().on(cities.id), -})); +}, (cities) => [index().on(cities.id)]); export const smallSerialTest = pgTable('cities_table', { id: smallserial('id').primaryKey(), diff --git a/drizzle-orm/type-tests/singlestore/db.ts b/drizzle-orm/type-tests/singlestore/db.ts index b314e504d7..5eed69d193 100644 --- a/drizzle-orm/type-tests/singlestore/db.ts +++ b/drizzle-orm/type-tests/singlestore/db.ts @@ -3,11 +3,11 @@ import { drizzle } from '~/singlestore/index.ts'; const pool = createPool({}); -export const db = drizzle(pool); +export const db = drizzle({ client: pool }); { - drizzle(pool); - drizzle(pool, { schema: {} }); - drizzle(pool, { schema: {} }); - drizzle(pool, {}); + drizzle({ client: pool }); + drizzle({ client: pool, schema: {} }); + drizzle({ client: pool, schema: {} }); + drizzle({ client: pool }); } diff --git a/drizzle-orm/type-tests/singlestore/tables.ts b/drizzle-orm/type-tests/singlestore/tables.ts index de8c2b339d..9b58e57a0a 100644 --- a/drizzle-orm/type-tests/singlestore/tables.ts +++ b/drizzle-orm/type-tests/singlestore/tables.ts @@ -60,25 +60,23 @@ export const users = singlestoreTable( createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), enumCol: singlestoreEnum('enum_col', ['a', 'b', 'c']).notNull(), }, - (users) => ({ - usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), - usersAge2Idx: index('usersAge2Idx').on(users.class), - uniqueClass: uniqueIndex('uniqueClass') + (users) => [ + uniqueIndex('usersAge1Idx').on(users.class), + index('usersAge2Idx').on(users.class), + uniqueIndex('uniqueClass') .on(users.class, users.subClass) .lock('default') .algorithm('copy') .using(`btree`), - pk: primaryKey(users.age1, users.class), - }), + primaryKey({ columns: [users.age1, users.class] }), + ], ); export const cities = singlestoreTable('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), -}, (cities) => ({ - citiesNameIdx: index('citiesNameIdx').on(cities.id), -})); +}, (cities) => [index('citiesNameIdx').on(cities.id)]); Expect< // @ts-ignore - TODO: Remake type checks for new columns @@ -169,9 +167,7 @@ export const citiesCustom = customSchema.table('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), -}, (cities) => ({ - citiesNameIdx: index('citiesNameIdx').on(cities.id), -})); +}, (cities) => [index('citiesNameIdx').on(cities.id)]); Expect>; @@ -741,12 +737,10 @@ Expect< createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), }, - (table) => { - return { - emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), - emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), - }; - }, + (table) => [ + primaryKey({ columns: [table.id], name: 'email_log_id' }), + unique('email_log_message_id_unique').on(table.messageId), + ], ); Expect< diff --git a/drizzle-orm/type-tests/sqlite/db.ts b/drizzle-orm/type-tests/sqlite/db.ts index 1950c7435d..f039592dcf 100644 --- a/drizzle-orm/type-tests/sqlite/db.ts +++ b/drizzle-orm/type-tests/sqlite/db.ts @@ -10,7 +10,7 @@ const bunClient = new BunDatabase(':memory:'); declare const d1: D1Database; declare const durableSql: DurableObjectStorage; -export const db = drizzleBetterSqlite3(client); -export const bunDb = drizzleBun(bunClient); +export const db = drizzleBetterSqlite3({ client }); +export const bunDb = drizzleBun({ client: bunClient }); export const d1Db = drizzleD1(d1); export const durableSqliteDb = durableSqlite(durableSql); diff --git a/drizzle-orm/type-tests/sqlite/tables.ts b/drizzle-orm/type-tests/sqlite/tables.ts index 531e1e520a..265365bfbd 100644 --- a/drizzle-orm/type-tests/sqlite/tables.ts +++ b/drizzle-orm/type-tests/sqlite/tables.ts @@ -39,27 +39,27 @@ export const users = sqliteTable( createdAt: integer('created_at', { mode: 'timestamp' }).notNull().defaultNow(), enumCol: text('enum_col', { enum: ['a', 'b', 'c'] }).notNull(), }, - (users) => ({ - usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), - usersAge2Idx: index('usersAge2Idx').on(users.class), - uniqueClass: uniqueIndex('uniqueClass') + (users) => [ + uniqueIndex('usersAge1Idx').on(users.class), + index('usersAge2Idx').on(users.class), + uniqueIndex('uniqueClass') .on(users.class, users.subClass) .where( sql`${users.class} is not null`, ), - uniqueClassEvenBetterThanPrisma: uniqueIndex('uniqueClass') + uniqueIndex('uniqueClass') .on(users.class, users.subClass) .where( sql`${users.class} is not null`, ), - legalAge: check('legalAge', sql`${users.age1} > 18`), - usersClassFK: foreignKey(() => ({ columns: [users.subClass], foreignColumns: [classes.subClass] })), - usersClassComplexFK: foreignKey(() => ({ + check('legalAge', sql`${users.age1} > 18`), + foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }), + foreignKey({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], - })), - pk: primaryKey(users.age1, users.class), - }), + }), + primaryKey({ columns: [users.age1, users.class] }), + ], ); export type User = typeof users.$inferSelect; diff --git a/drizzle-orm/vitest.config.ts b/drizzle-orm/vitest.config.ts index 4a9a7e6599..945b92f298 100644 --- a/drizzle-orm/vitest.config.ts +++ b/drizzle-orm/vitest.config.ts @@ -1,5 +1,6 @@ import { viteCommonjs } from '@originjs/vite-plugin-commonjs'; import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/drizzle-seed/.gitignore b/drizzle-seed/.gitignore new file mode 100644 index 0000000000..2eea525d88 --- /dev/null +++ b/drizzle-seed/.gitignore @@ -0,0 +1 @@ +.env \ No newline at end of file diff --git a/drizzle-seed/package.json b/drizzle-seed/package.json index 1861521954..727434f98b 100644 --- a/drizzle-seed/package.json +++ b/drizzle-seed/package.json @@ -1,13 +1,15 @@ { "name": "drizzle-seed", - "version": "0.3.1", + "version": "1.0.0-beta.2", "main": "index.js", "type": "module", "scripts": { - "build": "tsx scripts/build.ts", + "build": "tsc -p ./tsconfig.json && tsx scripts/build.ts", + "build:artifact": "pnpm run build", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "test": "vitest --config ./vitest.config.ts", - "test:types": "cd type-tests && tsc", + "test:types": "cd type-tests && tsc -p ./../tsconfig.json", "generate-for-tests:pg": "drizzle-kit generate --config=./src/tests/pg/drizzle.config.ts", "generate-for-tests:mysql": "drizzle-kit generate --config=./src/tests/mysql/drizzle.config.ts", "generate-for-tests:sqlite": "drizzle-kit generate --config=./src/tests/sqlite/drizzle.config.ts", @@ -65,11 +67,11 @@ } }, "peerDependencies": { - "drizzle-orm": ">=1.0.0-beta.1" + "drizzle-orm": ">=1.0.0-beta.2" }, "peerDependenciesMeta": { "drizzle-orm": { - "optional": true + "optional": false } }, "devDependencies": { @@ -77,11 +79,14 @@ "@electric-sql/pglite": "^0.2.12", "@rollup/plugin-terser": "^0.4.4", "@rollup/plugin-typescript": "^11.1.6", + "@types/async-retry": "^1.4.8", "@types/better-sqlite3": "^7.6.11", "@types/dockerode": "^3.3.31", + "@types/mssql": "^9.1.4", "@types/node": "^22.5.4", "@types/pg": "^8.11.6", "@types/uuid": "^10.0.0", + "async-retry": "^1.3.3", "better-sqlite3": "^11.1.2", "cpy": "^11.1.0", "dockerode": "^4.0.6", @@ -89,6 +94,7 @@ "drizzle-kit": "workspace:./drizzle-kit/dist", "drizzle-orm": "workspace:./drizzle-orm/dist", "get-port": "^7.1.0", + "mssql": "^11.0.1", "mysql2": "^3.14.1", "pg": "^8.12.0", "resolve-tspaths": "^0.8.19", @@ -96,7 +102,6 @@ "tslib": "^2.7.0", "tsx": "^4.19.0", "uuid": "^10.0.0", - "vitest": "^3.1.3", "zx": "^8.1.5" }, "dependencies": { diff --git a/drizzle-seed/src/services/SeedService.ts b/drizzle-seed/src/SeedService.ts similarity index 57% rename from drizzle-seed/src/services/SeedService.ts rename to drizzle-seed/src/SeedService.ts index d6d6c46572..706878d5c1 100644 --- a/drizzle-seed/src/services/SeedService.ts +++ b/drizzle-seed/src/SeedService.ts @@ -1,23 +1,44 @@ /* eslint-disable drizzle-internal/require-entity-kind */ -import { entityKind, eq, is } from 'drizzle-orm'; +import { entityKind, eq, is, sql } from 'drizzle-orm'; import type { MySqlTable, MySqlTableWithColumns } from 'drizzle-orm/mysql-core'; import { MySqlDatabase } from 'drizzle-orm/mysql-core'; import type { PgTable, PgTableWithColumns } from 'drizzle-orm/pg-core'; import { PgDatabase } from 'drizzle-orm/pg-core'; import type { SQLiteTable, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core'; import { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; +import { generatorsMap } from './generators/GeneratorFuncs.ts'; import type { + AbstractGenerator, + GenerateArray, + GenerateCompositeUniqueKey, + GenerateHashFromString, + GenerateWeightedCount, +} from './generators/Generators.ts'; +import type { + DbType, + GeneratedValueType, GeneratePossibleGeneratorsColumnType, GeneratePossibleGeneratorsTableType, RefinementsType, TableGeneratorsType, -} from '../types/seedService.ts'; -import type { Column, Prettify, Relation, Table } from '../types/tables.ts'; -import { generatorsMap } from './GeneratorFuncs.ts'; -import type { AbstractGenerator, GenerateArray, GenerateInterval, GenerateWeightedCount } from './Generators.ts'; - -import { latestVersion } from './apiVersion.ts'; -import { equalSets, generateHashFromString } from './utils.ts'; + TableType, +} from './types/seedService.ts'; +import type { Prettify, Relation, Table } from './types/tables.ts'; + +import type { CockroachTable, CockroachTableWithColumns } from 'drizzle-orm/cockroach-core'; +import { CockroachDatabase } from 'drizzle-orm/cockroach-core'; +import type { MsSqlTable, MsSqlTableWithColumns } from 'drizzle-orm/mssql-core'; +import { getTableConfig, MsSqlDatabase } from 'drizzle-orm/mssql-core'; +import type { SingleStoreTable, SingleStoreTableWithColumns } from 'drizzle-orm/singlestore-core'; +import { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; +import { selectGeneratorForCockroachColumn } from './cockroach-core/selectGensForColumn.ts'; +import { latestVersion } from './generators/apiVersion.ts'; +import { selectGeneratorForMssqlColumn } from './mssql-core/selectGensForColumn.ts'; +import { selectGeneratorForMysqlColumn } from './mysql-core/selectGensForColumn.ts'; +import { selectGeneratorForPostgresColumn } from './pg-core/selectGensForColumn.ts'; +import { selectGeneratorForSingleStoreColumn } from './singlestore-core/selectGensForColumn.ts'; +import { selectGeneratorForSqlite } from './sqlite-core/selectGensForColumn.ts'; +import { equalSets } from './utils.ts'; export class SeedService { static readonly entityKind: string = 'SeedService'; @@ -29,10 +50,12 @@ export class SeedService { private mysqlMaxParametersNumber = 100000; // SQLITE_MAX_VARIABLE_NUMBER, which by default equals to 999 for SQLite versions prior to 3.32.0 (2020-05-22) or 32766 for SQLite versions after 3.32.0. private sqliteMaxParametersNumber = 32766; + private mssqlMaxParametersNumber = 2100; private version?: number; + private hashFromStringGenerator: GenerateHashFromString | undefined; generatePossibleGenerators = ( - connectionType: 'postgresql' | 'mysql' | 'sqlite', + connectionType: 'postgresql' | 'mysql' | 'sqlite' | 'mssql' | 'cockroach' | 'singlestore', tables: Table[], relations: (Relation & { isCyclic: boolean })[], refinements?: RefinementsType, @@ -45,6 +68,9 @@ export class SeedService { if (Number.isNaN(this.version) || this.version < 1 || this.version > latestVersion) { throw new Error(`Version should be in range [1, ${latestVersion}].`); } + this.hashFromStringGenerator = this.selectVersionOfGenerator( + new generatorsMap.GenerateHashFromString[0](), + ) as GenerateHashFromString; // sorting table in order which they will be filled up (tables with foreign keys case) const { tablesInOutRelations } = this.getInfoFromRelations(relations); @@ -76,6 +102,7 @@ export class SeedService { })); for (const [i, table] of tables.entries()) { + const compositeUniqueKeyGenMap: { [key: string]: GenerateCompositeUniqueKey } = {}; // get foreignKey columns relations const foreignKeyColumns: { [columnName: string]: { table: string; column: string }; @@ -139,7 +166,7 @@ export class SeedService { }[]; weightedCountSeed = customSeed - + generateHashFromString(`${table.name}.${fkTableName}`); + + this.hashFromStringGenerator.generate({ input: `${table.name}.${fkTableName}` }); newTableWithCount = this.getWeightedWithCount( weightedRepeatedValuesCount, @@ -173,6 +200,7 @@ export class SeedService { notNull: col.notNull, primary: col.primary, generatedIdentityType: col.generatedIdentityType, + identity: col.identity, generator: undefined, isCyclic: false, wasDefinedBefore: false, @@ -186,6 +214,22 @@ export class SeedService { && refinements[table.name]!.columns[col.name] !== undefined ) { const genObj = refinements[table.name]!.columns[col.name]!; + if (genObj === false) { + if (col.notNull === true && col.hasDefault === false) { + throw new Error( + `You cannot set the '${col.name}' column in the '${table.name}' table to false in your refinements.` + + `\nDoing so will result in a null value being inserted into the '${col.name}' column,` + + `\nwhich will cause an error because the column has a not null constraint and no default value.`, + ); + } + + // Generating undefined as a value for a column and then inserting it via drizzle-orm + // will result in the value not being inserted into that column. + columnPossibleGenerator.generator = new generatorsMap.GenerateDefault[0]({ defaultValue: undefined }); + columnPossibleGenerator.wasRefined = true; + + continue; + } if (col.columnType.match(/\[\w*]/g) !== null) { if ( @@ -252,20 +296,17 @@ export class SeedService { } } // TODO: rewrite pickGeneratorFor... using new col properties: isUnique and notNull else if (connectionType === 'postgresql') { - columnPossibleGenerator.generator = this.selectGeneratorForPostgresColumn( - table, - col, - ); + columnPossibleGenerator.generator = selectGeneratorForPostgresColumn(table, col); } else if (connectionType === 'mysql') { - columnPossibleGenerator.generator = this.selectGeneratorForMysqlColumn( - table, - col, - ); + columnPossibleGenerator.generator = selectGeneratorForMysqlColumn(table, col); } else if (connectionType === 'sqlite') { - columnPossibleGenerator.generator = this.selectGeneratorForSqlite( - table, - col, - ); + columnPossibleGenerator.generator = selectGeneratorForSqlite(table, col); + } else if (connectionType === 'mssql') { + columnPossibleGenerator.generator = selectGeneratorForMssqlColumn(table, col); + } else if (connectionType === 'cockroach') { + columnPossibleGenerator.generator = selectGeneratorForCockroachColumn(table, col); + } else if (connectionType === 'singlestore') { + columnPossibleGenerator.generator = selectGeneratorForSingleStoreColumn(table, col); } if (columnPossibleGenerator.generator === undefined) { @@ -274,24 +315,88 @@ export class SeedService { ); } + columnPossibleGenerator.generator.typeParams = col.typeParams ?? columnPossibleGenerator.generator.typeParams; const arrayGen = columnPossibleGenerator.generator.replaceIfArray(); if (arrayGen !== undefined) { columnPossibleGenerator.generator = arrayGen; } columnPossibleGenerator.generator.isUnique = col.isUnique; + + // composite unique keys handling + let compositeKeyColumnNames = table.uniqueConstraints.filter((colNames) => colNames.includes(col.name)); + if (compositeKeyColumnNames.some((colNames) => colNames.length === 1)) { + // composite unique key contains only one column, therefore it equals to just unique column + columnPossibleGenerator.generator.isUnique = true; + } + + // removing column from composite unique keys if current column is unique + if (columnPossibleGenerator.generator.isUnique && compositeKeyColumnNames.length > 0) { + const newUniqueConstraints: string[][] = []; + for (const colNames of table.uniqueConstraints) { + if (colNames.includes(col.name)) { + const newColNames = colNames.filter((colName) => colName !== col.name); + if (newColNames.length === 0) continue; + newUniqueConstraints.push(newColNames); + } else { + newUniqueConstraints.push(colNames); + } + } + + table.uniqueConstraints = newUniqueConstraints; + } + + compositeKeyColumnNames = table.uniqueConstraints.filter((colNames) => colNames.includes(col.name)); + if (compositeKeyColumnNames.length > 1) { + throw new Error('Currently, multiple composite unique keys that share the same column are not supported.'); + } + + // to handle composite unique key generation, I will need a unique generator for each column in the composite key + if (compositeKeyColumnNames.length === 1) { + if (columnPossibleGenerator.generator.params.isUnique === false) { + throw new Error( + `To handle the composite unique key on columns: ${compositeKeyColumnNames[0]}, ` + + `column: ${col.name} should either be assigned a generator with isUnique set to true, or have isUnique omitted.`, + ); + } + columnPossibleGenerator.generator.params.isUnique = true; + } + const uniqueGen = columnPossibleGenerator.generator.replaceIfUnique(); if (uniqueGen !== undefined) { columnPossibleGenerator.generator = uniqueGen; } + if ( + compositeKeyColumnNames.length === 1 && !columnPossibleGenerator.generator.isGeneratorUnique + && !(columnPossibleGenerator.generator.getEntityKind() === 'GenerateValuesFromArray') + ) { + throw new Error( + `To handle the composite unique key on columns: ${compositeKeyColumnNames[0]}, ` + + `column: ${col.name} should be assigned a generator with its own unique version.`, + ); + } + // selecting version of generator columnPossibleGenerator.generator = this.selectVersionOfGenerator(columnPossibleGenerator.generator); // TODO: for now only GenerateValuesFromArray support notNull property columnPossibleGenerator.generator.notNull = col.notNull; columnPossibleGenerator.generator.dataType = col.dataType; - columnPossibleGenerator.generator.stringLength = col.typeParams.length; + + // assigning composite key generator + if (compositeKeyColumnNames.length === 1) { + const key = compositeKeyColumnNames[0]!.join('_'); + if (compositeUniqueKeyGenMap[key] === undefined) { + let compositeUniqueKeyGen = new generatorsMap.GenerateCompositeUniqueKey[0](); + compositeUniqueKeyGen.uniqueKey = key; + compositeUniqueKeyGen = this.selectVersionOfGenerator(compositeUniqueKeyGen) as GenerateCompositeUniqueKey; + compositeUniqueKeyGenMap[key] = compositeUniqueKeyGen; + } + + compositeUniqueKeyGenMap[key].addGenerator(col.name, columnPossibleGenerator.generator); + columnPossibleGenerator.generator = compositeUniqueKeyGenMap[key]; + } tablePossibleGenerators.columnsPossibleGenerators.push( columnPossibleGenerator, @@ -308,7 +413,7 @@ export class SeedService { const oldBaseColumnGen = (generator as GenerateArray).params.baseColumnGen; const newBaseColumnGen = this.selectVersionOfGenerator(oldBaseColumnGen); - // newGenerator.baseColumnDataType = oldGenerator.baseColumnDataType; + newBaseColumnGen.typeParams = oldBaseColumnGen.typeParams; (generator as GenerateArray).params.baseColumnGen = newBaseColumnGen; } @@ -331,7 +436,9 @@ export class SeedService { // TODO: for now only GenerateValuesFromArray support notNull property newGenerator.notNull = generator.notNull; newGenerator.dataType = generator.dataType; - newGenerator.stringLength = generator.stringLength; + // newGenerator.stringLength = generator.stringLength; + newGenerator.typeParams = generator.typeParams ?? newGenerator.typeParams; + newGenerator.uniqueKey = generator.uniqueKey; return newGenerator; }; @@ -499,612 +606,6 @@ export class SeedService { return weightedWithCount; }; - // TODO: revise serial part generators - selectGeneratorForPostgresColumn = ( - table: Table, - col: Column, - ) => { - const pickGenerator = (table: Table, col: Column) => { - // ARRAY - if (col.columnType.match(/\[\w*]/g) !== null && col.baseColumn !== undefined) { - const baseColumnGen = this.selectGeneratorForPostgresColumn( - table, - col.baseColumn!, - ) as AbstractGenerator; - if (baseColumnGen === undefined) { - throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); - } - - // const getBaseColumnDataType = (baseColumn: Column) => { - // if (baseColumn.baseColumn !== undefined) { - // return getBaseColumnDataType(baseColumn.baseColumn); - // } - - // return baseColumn.dataType; - // }; - // const baseColumnDataType = getBaseColumnDataType(col.baseColumn); - - const generator = new generatorsMap.GenerateArray[0]({ baseColumnGen, size: col.size }); - // generator.baseColumnDataType = baseColumnDataType; - - return generator; - } - - // ARRAY for studio - if (col.columnType.match(/\[\w*]/g) !== null) { - // remove dimensions from type - const baseColumnType = col.columnType.replace(/\[\w*]/g, ''); - const baseColumn: Column = { - ...col, - }; - baseColumn.columnType = baseColumnType; - - const baseColumnGen = this.selectGeneratorForPostgresColumn(table, baseColumn) as AbstractGenerator; - if (baseColumnGen === undefined) { - throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); - } - - let generator = new generatorsMap.GenerateArray[0]({ baseColumnGen }); - - for (let i = 0; i < col.typeParams.dimensions! - 1; i++) { - generator = new generatorsMap.GenerateArray[0]({ baseColumnGen: generator }); - } - - return generator; - } - - // INT ------------------------------------------------------------------------------------------------------------ - if ( - (col.columnType.includes('serial') - || col.columnType === 'integer' - || col.columnType === 'smallint' - || col.columnType.includes('bigint')) - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateIntPrimaryKey[0](); - - return generator; - } - - let minValue: number | bigint | undefined; - let maxValue: number | bigint | undefined; - if (col.columnType.includes('serial')) { - minValue = 1; - if (col.columnType === 'smallserial') { - // 2^16 / 2 - 1, 2 bytes - maxValue = 32767; - } else if (col.columnType === 'serial') { - // 2^32 / 2 - 1, 4 bytes - maxValue = 2147483647; - } else if (col.columnType === 'bigserial') { - // 2^64 / 2 - 1, 8 bytes - minValue = BigInt(1); - maxValue = BigInt('9223372036854775807'); - } - } else if (col.columnType.includes('int')) { - if (col.columnType === 'smallint') { - // 2^16 / 2 - 1, 2 bytes - minValue = -32768; - maxValue = 32767; - } else if (col.columnType === 'integer') { - // 2^32 / 2 - 1, 4 bytes - minValue = -2147483648; - maxValue = 2147483647; - } else if (col.columnType.includes('bigint')) { - if (col.dataType === 'bigint') { - // 2^64 / 2 - 1, 8 bytes - minValue = BigInt('-9223372036854775808'); - maxValue = BigInt('9223372036854775807'); - } else { - // if (col.dataType === 'number') - // if you’re expecting values above 2^31 but below 2^53 - minValue = -9007199254740991; - maxValue = 9007199254740991; - } - } - } - - if ( - col.columnType.includes('int') - && !col.columnType.includes('interval') - && !col.columnType.includes('point') - ) { - const generator = new generatorsMap.GenerateInt[0]({ - minValue, - maxValue, - }); - - return generator; - } - - if (col.columnType.includes('serial')) { - const generator = new generatorsMap.GenerateIntPrimaryKey[0](); - - generator.maxValue = maxValue; - - return generator; - } - - // NUMBER(real, double, decimal, numeric) - if ( - col.columnType.startsWith('real') - || col.columnType.startsWith('double precision') - || col.columnType.startsWith('decimal') - || col.columnType.startsWith('numeric') - ) { - if (col.typeParams.precision !== undefined) { - const precision = col.typeParams.precision; - const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; - - const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); - const generator = new generatorsMap.GenerateNumber[0]({ - minValue: -maxAbsoluteValue, - maxValue: maxAbsoluteValue, - precision: Math.pow(10, scale), - }); - return generator; - } - const generator = new generatorsMap.GenerateNumber[0](); - - return generator; - } - - // STRING - if ( - (col.columnType === 'text' - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('char')) - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateUniqueString[0](); - - return generator; - } - - if ( - (col.columnType === 'text' - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('char')) - && col.name.toLowerCase().includes('name') - ) { - const generator = new generatorsMap.GenerateFirstName[0](); - - return generator; - } - - if ( - (col.columnType === 'text' - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('char')) - && col.name.toLowerCase().includes('email') - ) { - const generator = new generatorsMap.GenerateEmail[0](); - - return generator; - } - - if ( - col.columnType === 'text' - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('char') - ) { - const generator = new generatorsMap.GenerateString[0](); - - return generator; - } - - // UUID - if (col.columnType === 'uuid') { - const generator = new generatorsMap.GenerateUUID[0](); - - return generator; - } - - // BOOLEAN - if (col.columnType === 'boolean') { - const generator = new generatorsMap.GenerateBoolean[0](); - - return generator; - } - - // DATE, TIME, TIMESTAMP - if (col.columnType.includes('date')) { - const generator = new generatorsMap.GenerateDate[0](); - - return generator; - } - - if (col.columnType === 'time') { - const generator = new generatorsMap.GenerateTime[0](); - - return generator; - } - - if (col.columnType.includes('timestamp')) { - const generator = new generatorsMap.GenerateTimestamp[0](); - - return generator; - } - - // JSON, JSONB - if (col.columnType === 'json' || col.columnType === 'jsonb') { - const generator = new generatorsMap.GenerateJson[0](); - - return generator; - } - - // if (col.columnType === "jsonb") { - // const generator = new GenerateJsonb({}); - // return generator; - // } - - // ENUM - if (col.enumValues !== undefined) { - const generator = new generatorsMap.GenerateEnum[0]({ - enumValues: col.enumValues, - }); - - return generator; - } - - // INTERVAL - if (col.columnType.startsWith('interval')) { - if (col.columnType === 'interval') { - const generator = new generatorsMap.GenerateInterval[0](); - - return generator; - } - - const fields = col.columnType.replace('interval ', '') as GenerateInterval['params']['fields']; - const generator = new generatorsMap.GenerateInterval[0]({ fields }); - - return generator; - } - - // POINT, LINE - if (col.columnType.includes('point')) { - const generator = new generatorsMap.GeneratePoint[0](); - - return generator; - } - - if (col.columnType.includes('line')) { - const generator = new generatorsMap.GenerateLine[0](); - - return generator; - } - - if (col.hasDefault && col.default !== undefined) { - const generator = new generatorsMap.GenerateDefault[0]({ - defaultValue: col.default, - }); - return generator; - } - - return; - }; - - const generator = pickGenerator(table, col); - if (generator !== undefined) { - generator.isUnique = col.isUnique; - generator.dataType = col.dataType; - generator.stringLength = col.typeParams.length; - } - - return generator; - }; - - selectGeneratorForMysqlColumn = ( - table: Table, - col: Column, - ) => { - const pickGenerator = (table: Table, col: Column) => { - // INT ------------------------------------------------------------------------------------------------------------ - if ( - (col.columnType.includes('serial') || col.columnType.includes('int')) - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateIntPrimaryKey[0](); - return generator; - } - - let minValue: number | bigint | undefined; - let maxValue: number | bigint | undefined; - if (col.columnType === 'serial') { - // 2^64 % 2 - 1, 8 bytes - minValue = BigInt(0); - maxValue = BigInt('9223372036854775807'); - } else if (col.columnType.includes('int')) { - if (col.columnType === 'tinyint') { - // 2^8 / 2 - 1, 1 bytes - minValue = -128; - maxValue = 127; - } else if (col.columnType === 'smallint') { - // 2^16 / 2 - 1, 2 bytes - minValue = -32768; - maxValue = 32767; - } else if (col.columnType === 'mediumint') { - // 2^16 / 2 - 1, 2 bytes - minValue = -8388608; - maxValue = 8388607; - } else if (col.columnType === 'int') { - // 2^32 / 2 - 1, 4 bytes - minValue = -2147483648; - maxValue = 2147483647; - } else if (col.columnType === 'bigint') { - // 2^64 / 2 - 1, 8 bytes - minValue = BigInt('-9223372036854775808'); - maxValue = BigInt('9223372036854775807'); - } - } - - if (col.columnType.includes('int')) { - const generator = new generatorsMap.GenerateInt[0]({ - minValue, - maxValue, - }); - return generator; - } - - if (col.columnType.includes('serial')) { - const generator = new generatorsMap.GenerateIntPrimaryKey[0](); - generator.maxValue = maxValue; - return generator; - } - - // NUMBER(real, double, decimal, float) - if ( - col.columnType.startsWith('real') - || col.columnType.startsWith('double') - || col.columnType.startsWith('decimal') - || col.columnType.startsWith('float') - || col.columnType.startsWith('numeric') - ) { - if (col.typeParams.precision !== undefined) { - const precision = col.typeParams.precision; - const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; - - const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); - const generator = new generatorsMap.GenerateNumber[0]({ - minValue: -maxAbsoluteValue, - maxValue: maxAbsoluteValue, - precision: Math.pow(10, scale), - }); - return generator; - } - - const generator = new generatorsMap.GenerateNumber[0](); - return generator; - } - - // STRING - if ( - (col.columnType === 'text' - || col.columnType === 'blob' - || col.columnType.startsWith('char') - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('binary') - || col.columnType.startsWith('varbinary')) - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateUniqueString[0](); - return generator; - } - - if ( - (col.columnType === 'text' - || col.columnType === 'blob' - || col.columnType.startsWith('char') - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('binary') - || col.columnType.startsWith('varbinary')) - && col.name.toLowerCase().includes('name') - ) { - const generator = new generatorsMap.GenerateFirstName[0](); - return generator; - } - - if ( - (col.columnType === 'text' - || col.columnType === 'blob' - || col.columnType.startsWith('char') - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('binary') - || col.columnType.startsWith('varbinary')) - && col.name.toLowerCase().includes('email') - ) { - const generator = new generatorsMap.GenerateEmail[0](); - return generator; - } - - if ( - col.columnType === 'text' - || col.columnType === 'blob' - || col.columnType.startsWith('char') - || col.columnType.startsWith('varchar') - || col.columnType.startsWith('binary') - || col.columnType.startsWith('varbinary') - ) { - const generator = new generatorsMap.GenerateString[0](); - return generator; - } - - // BOOLEAN - if (col.columnType === 'boolean') { - const generator = new generatorsMap.GenerateBoolean[0](); - return generator; - } - - // DATE, TIME, TIMESTAMP, DATETIME, YEAR - if (col.columnType.includes('datetime')) { - const generator = new generatorsMap.GenerateDatetime[0](); - return generator; - } - - if (col.columnType.includes('date')) { - const generator = new generatorsMap.GenerateDate[0](); - return generator; - } - - if (col.columnType === 'time') { - const generator = new generatorsMap.GenerateTime[0](); - return generator; - } - - if (col.columnType.includes('timestamp')) { - const generator = new generatorsMap.GenerateTimestamp[0](); - return generator; - } - - if (col.columnType === 'year') { - const generator = new generatorsMap.GenerateYear[0](); - return generator; - } - - // JSON - if (col.columnType === 'json') { - const generator = new generatorsMap.GenerateJson[0](); - return generator; - } - - // ENUM - if (col.enumValues !== undefined) { - const generator = new generatorsMap.GenerateEnum[0]({ - enumValues: col.enumValues, - }); - return generator; - } - - if (col.hasDefault && col.default !== undefined) { - const generator = new generatorsMap.GenerateDefault[0]({ - defaultValue: col.default, - }); - return generator; - } - - return; - }; - - const generator = pickGenerator(table, col); - - return generator; - }; - - selectGeneratorForSqlite = ( - table: Table, - col: Column, - ) => { - const pickGenerator = (table: Table, col: Column) => { - // int section --------------------------------------------------------------------------------------- - if ( - (col.columnType === 'integer' || col.columnType === 'numeric') - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateIntPrimaryKey[0](); - return generator; - } - - if (col.columnType === 'integer' && col.dataType === 'boolean') { - const generator = new generatorsMap.GenerateBoolean[0](); - return generator; - } - - if ((col.columnType === 'integer' && col.dataType === 'object')) { - const generator = new generatorsMap.GenerateTimestamp[0](); - return generator; - } - - if ( - col.columnType === 'integer' - || (col.dataType === 'bigint' && col.columnType === 'blob') - ) { - const generator = new generatorsMap.GenerateInt[0](); - return generator; - } - - // number section ------------------------------------------------------------------------------------ - if (col.columnType.startsWith('real') || col.columnType.startsWith('numeric')) { - if (col.typeParams.precision !== undefined) { - const precision = col.typeParams.precision; - const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; - - const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); - const generator = new generatorsMap.GenerateNumber[0]({ - minValue: -maxAbsoluteValue, - maxValue: maxAbsoluteValue, - precision: Math.pow(10, scale), - }); - return generator; - } - - const generator = new generatorsMap.GenerateNumber[0](); - return generator; - } - - // string section ------------------------------------------------------------------------------------ - if ( - (col.columnType.startsWith('text') - || col.columnType.startsWith('numeric') - || col.columnType.startsWith('blob')) - && table.primaryKeys.includes(col.name) - ) { - const generator = new generatorsMap.GenerateUniqueString[0](); - return generator; - } - - if ( - (col.columnType.startsWith('text') - || col.columnType.startsWith('numeric') - || col.columnType.startsWith('blob')) - && col.name.toLowerCase().includes('name') - ) { - const generator = new generatorsMap.GenerateFirstName[0](); - return generator; - } - - if ( - (col.columnType.startsWith('text') - || col.columnType.startsWith('numeric') - || col.columnType.startsWith('blob')) - && col.name.toLowerCase().includes('email') - ) { - const generator = new generatorsMap.GenerateEmail[0](); - return generator; - } - - if ( - col.columnType.startsWith('text') - || col.columnType.startsWith('numeric') - || col.columnType.startsWith('blob') - || col.columnType.startsWith('blobbuffer') - ) { - const generator = new generatorsMap.GenerateString[0](); - return generator; - } - - if ( - (col.columnType.startsWith('text') && col.dataType === 'json') - || (col.columnType.startsWith('blob') && col.dataType === 'json') - ) { - const generator = new generatorsMap.GenerateJson[0](); - return generator; - } - - if (col.hasDefault && col.default !== undefined) { - const generator = new generatorsMap.GenerateDefault[0]({ - defaultValue: col.default, - }); - return generator; - } - - return; - }; - - const generator = pickGenerator(table, col); - - return generator; - }; - filterCyclicTables = (tablesGenerators: ReturnType) => { const filteredTablesGenerators = tablesGenerators.filter((tableGen) => tableGen.columnsPossibleGenerators.some((columnGen) => @@ -1127,13 +628,14 @@ export class SeedService { } tablesUniqueNotNullColumn[tableGen.tableName] = { uniqueNotNullColName }; - filteredTablesGenerators[idx]!.columnsPossibleGenerators = tableGen.columnsPossibleGenerators.filter((colGen) => - (colGen.isCyclic === true && colGen.wasDefinedBefore === true) || colGen.columnName === uniqueNotNullColName - ).map((colGen) => { - const newColGen = { ...colGen }; - newColGen.wasDefinedBefore = false; - return newColGen; - }); + filteredTablesGenerators[idx]!.columnsPossibleGenerators = tableGen.columnsPossibleGenerators.filter(( + colGen, + ) => (colGen.isCyclic === true && colGen.wasDefinedBefore === true) || colGen.columnName === uniqueNotNullColName) + .map((colGen) => { + const newColGen = { ...colGen }; + newColGen.wasDefinedBefore = false; + return newColGen; + }); } return { filteredTablesGenerators, tablesUniqueNotNullColumn }; @@ -1142,11 +644,8 @@ export class SeedService { generateTablesValues = async ( relations: (Relation & { isCyclic: boolean })[], tablesGenerators: ReturnType, - db?: - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, - schema?: { [key: string]: PgTable | MySqlTable | SQLiteTable }, + db?: DbType, + schema?: { [key: string]: TableType }, options?: { count?: number; seed?: number; @@ -1157,7 +656,7 @@ export class SeedService { tablesValues?: { tableName: string; rows: { - [columnName: string]: string | number | boolean | undefined; + [columnName: string]: GeneratedValueType; }[]; }[]; tablesUniqueNotNullColumn?: { [tableName: string]: { uniqueNotNullColName: string } }; @@ -1169,7 +668,7 @@ export class SeedService { let tableGenerators: Prettify; let tableValues: { - [columnName: string]: string | number | boolean | undefined; + [columnName: string]: GeneratedValueType; }[]; let tablesValues: { @@ -1203,10 +702,13 @@ export class SeedService { const columnRelations = filteredRelations.filter((rel) => rel.columns.includes(col.columnName)); pRNGSeed = (columnRelations.length !== 0 && columnRelations[0]!.columns.length >= 2) - ? (customSeed + generateHashFromString( - `${columnRelations[0]!.table}.${columnRelations[0]!.columns.join('_')}`, - )) - : (customSeed + generateHashFromString(`${table.tableName}.${col.columnName}`)); + ? (customSeed + + this.hashFromStringGenerator!.generate({ + input: `${columnRelations[0]!.table}.${columnRelations[0]!.columns.join('_')}`, + })) + : col.generator?.uniqueKey === undefined + ? (customSeed + this.hashFromStringGenerator!.generate({ input: `${table.tableName}.${col.columnName}` })) + : (customSeed + this.hashFromStringGenerator!.generate({ input: col.generator.uniqueKey })); tableGenerators[col.columnName] = { pRNGSeed, @@ -1227,7 +729,7 @@ export class SeedService { } for (let colIdx = 0; colIdx < rel.columns.length; colIdx++) { - let refColumnValues: (string | number | boolean)[]; + let refColumnValues: GeneratedValueType[]; let hasSelfRelation: boolean = false; let repeatedValuesCount: | number @@ -1241,9 +743,9 @@ export class SeedService { && tableGenerators[rel.columns[colIdx]!]?.wasRefined === false ) { const refColName = rel.refColumns[colIdx] as string; - pRNGSeed = generateHashFromString( - `${table.tableName}.${refColName}`, - ); + pRNGSeed = this.hashFromStringGenerator!.generate({ + input: `${table.tableName}.${refColName}`, + }); const refColumnGenerator: typeof tableGenerators = {}; refColumnGenerator[refColName] = { @@ -1256,11 +758,11 @@ export class SeedService { count: tableCount, preserveData: true, insertDataInDb: false, - }))!.map((rows) => rows[refColName]) as (string | number | boolean)[]; + }))!.map((rows) => rows[refColName]); hasSelfRelation = true; genObj = new generatorsMap.GenerateSelfRelationsValuesFromArray[0]({ - values: refColumnValues, + values: refColumnValues as (string | number | bigint)[], }); genObj = this.selectVersionOfGenerator(genObj); // genObj = new GenerateSelfRelationsValuesFromArray({ @@ -1284,7 +786,9 @@ export class SeedService { } // TODO: revise maybe need to select version of generator here too - genObj = new generatorsMap.GenerateValuesFromArray[0]({ values: refColumnValues }); + genObj = new generatorsMap.GenerateValuesFromArray[0]({ + values: refColumnValues as (string | number | bigint)[], + }); genObj.notNull = tableGenerators[rel.columns[colIdx]!]!.notNull; genObj.weightedCountSeed = weightedCountSeed; genObj.maxRepeatedValuesCount = repeatedValuesCount; @@ -1364,11 +868,8 @@ export class SeedService { batchSize = 10000, }: { tableGenerators: Prettify; - db?: - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase; - schema?: { [key: string]: PgTable | MySqlTable | SQLiteTable }; + db?: DbType; + schema?: { [key: string]: TableType }; tableName?: string; count?: number; preserveData?: boolean; @@ -1389,14 +890,17 @@ export class SeedService { const columnsGenerators: { [columnName: string]: AbstractGenerator; } = {}; - let generatedValues: { [columnName: string]: number | string | boolean | undefined }[] = []; + let generatedValues: { [columnName: string]: GeneratedValueType }[] = []; let columnsNumber = 0; let override = false; for (const columnName of Object.keys(tableGenerators)) { columnsNumber += 1; columnGenerator = tableGenerators[columnName]!; + // postgres identity columns override = tableGenerators[columnName]?.generatedIdentityType === 'always' ? true : override; + // mssql identity columns + override = tableGenerators[columnName]?.identity === true ? true : override; columnsGenerators[columnName] = columnGenerator.generator!; columnsGenerators[columnName]!.init({ @@ -1415,16 +919,18 @@ export class SeedService { // } } let maxParametersNumber: number; - if (is(db, PgDatabase)) { + if (is(db, PgDatabase)) { // @ts-ignore maxParametersNumber = db.constructor[entityKind] === 'PgliteDatabase' ? this.postgresPgLiteMaxParametersNumber : this.postgresMaxParametersNumber; - } else if (is(db, MySqlDatabase)) { + } else if (is(db, MySqlDatabase)) { maxParametersNumber = this.mysqlMaxParametersNumber; - } else { - // is(db, BaseSQLiteDatabase) + } else if (is(db, BaseSQLiteDatabase)) { maxParametersNumber = this.sqliteMaxParametersNumber; + } else { + // is(db, MsSqlDatabase) + maxParametersNumber = this.mssqlMaxParametersNumber; } const maxBatchSize = Math.floor(maxParametersNumber / columnsNumber); batchSize = batchSize > maxBatchSize ? maxBatchSize : batchSize; @@ -1436,7 +942,7 @@ export class SeedService { throw new Error('db or schema or tableName is undefined.'); } - let row: { [columnName: string]: string | number | boolean }, + let row: { [columnName: string]: string | Buffer | bigint | number | boolean }, generatedValue, i: number; @@ -1445,11 +951,7 @@ export class SeedService { generatedValues.push(row); for (const columnName of Object.keys(columnsGenerators)) { - // generatedValue = columnsGenerators[columnName].next().value as - // | string - // | number - // | boolean; - generatedValue = columnsGenerators[columnName]!.generate({ i }) as + generatedValue = columnsGenerators[columnName]!.generate({ i, columnName }) as | string | number | boolean; @@ -1464,12 +966,9 @@ export class SeedService { if (insertDataInDb === true) { await this.insertInDb({ generatedValues, - db: db as - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + db: db as DbType, schema: schema as { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }, tableName: tableName as string, override, @@ -1477,12 +976,9 @@ export class SeedService { } else if (updateDataInDb === true) { await this.updateDb({ generatedValues, - db: db as - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + db: db as DbType, schema: schema as { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }, tableName: tableName as string, uniqueNotNullColName: uniqueNotNullColName as string, @@ -1499,12 +995,9 @@ export class SeedService { batchSize * batchCount, batchSize * (batchCount + 1), ), - db: db as - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + db: db as DbType, schema: schema as { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }, tableName: tableName as string, override, @@ -1515,12 +1008,9 @@ export class SeedService { batchSize * batchCount, batchSize * (batchCount + 1), ), - db: db as - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + db: db as DbType, schema: schema as { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }, tableName: tableName as string, uniqueNotNullColName: uniqueNotNullColName as string, @@ -1541,32 +1031,56 @@ export class SeedService { override, }: { generatedValues: { - [columnName: string]: number | string | boolean | undefined; + [columnName: string]: GeneratedValueType; }[]; - db: - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase; + db: DbType; schema: { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }; tableName: string; override: boolean; }) => { - if (is(db, PgDatabase)) { + if (is(db, PgDatabase)) { const query = db.insert((schema as { [key: string]: PgTable })[tableName]!); if (override === true) { return await query.overridingSystemValue().values(generatedValues); } await query.values(generatedValues); - } else if (is(db, MySqlDatabase)) { + } else if (is(db, MySqlDatabase)) { await db .insert((schema as { [key: string]: MySqlTable })[tableName]!) .values(generatedValues); - } else if (is(db, BaseSQLiteDatabase)) { + } else if (is(db, BaseSQLiteDatabase)) { await db .insert((schema as { [key: string]: SQLiteTable })[tableName]!) .values(generatedValues); + } else if (is(db, MsSqlDatabase)) { + let schemaDbName: string | undefined; + let tableDbName: string | undefined; + if (override === true) { + const tableConfig = getTableConfig(schema[tableName]! as MsSqlTable); + schemaDbName = tableConfig.schema ?? 'dbo'; + tableDbName = tableConfig.name; + await db.execute(sql.raw(`SET IDENTITY_INSERT [${schemaDbName}].[${tableDbName}] ON;`)); + } + + await db + .insert((schema as { [key: string]: MsSqlTable })[tableName]!) + .values(generatedValues); + + if (override === true) { + await db.execute(sql.raw(`SET IDENTITY_INSERT [${schemaDbName}].[${tableDbName}] OFF;`)); + } + } else if (is(db, CockroachDatabase)) { + const query = db + .insert((schema as { [key: string]: CockroachTable })[tableName]!) + .values(generatedValues); + await query; + } else if (is(db, SingleStoreDatabase)) { + const query = db + .insert((schema as { [key: string]: SingleStoreTable })[tableName]!) + .values(generatedValues); + await query; } }; @@ -1578,33 +1092,49 @@ export class SeedService { uniqueNotNullColName, }: { generatedValues: { - [columnName: string]: number | string | boolean | undefined; + [columnName: string]: GeneratedValueType; }[]; - db: - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase; + db: DbType; schema: { - [key: string]: PgTable | MySqlTable | SQLiteTable; + [key: string]: TableType; }; tableName: string; uniqueNotNullColName: string; }) => { - if (is(db, PgDatabase)) { + let values = generatedValues[0]!; + const uniqueNotNullColValue = values[uniqueNotNullColName]; + values = Object.fromEntries(Object.entries(values).filter(([colName]) => colName !== uniqueNotNullColName)); + + if (is(db, PgDatabase)) { const table = (schema as { [key: string]: PgTableWithColumns })[tableName]!; const uniqueNotNullCol = table[uniqueNotNullColName]; - await db.update(table).set(generatedValues[0]!).where( - eq(uniqueNotNullCol, generatedValues[0]![uniqueNotNullColName]), + await db.update(table).set(values).where( + eq(uniqueNotNullCol, uniqueNotNullColValue), ); - } else if (is(db, MySqlDatabase)) { + } else if (is(db, MySqlDatabase)) { const table = (schema as { [key: string]: MySqlTableWithColumns })[tableName]!; - await db.update(table).set(generatedValues[0]!).where( - eq(table[uniqueNotNullColName], generatedValues[0]![uniqueNotNullColName]), + await db.update(table).set(values).where( + eq(table[uniqueNotNullColName], uniqueNotNullColValue), ); - } else if (is(db, BaseSQLiteDatabase)) { + } else if (is(db, BaseSQLiteDatabase)) { const table = (schema as { [key: string]: SQLiteTableWithColumns })[tableName]!; - await db.update(table).set(generatedValues[0]!).where( - eq(table[uniqueNotNullColName], generatedValues[0]![uniqueNotNullColName]), + await db.update(table).set(values).where( + eq(table[uniqueNotNullColName], uniqueNotNullColValue), + ); + } else if (is(db, MsSqlDatabase)) { + const table = (schema as { [key: string]: MsSqlTableWithColumns })[tableName]!; + await db.update(table).set(values).where( + eq(table[uniqueNotNullColName], uniqueNotNullColValue), + ); + } else if (is(db, CockroachDatabase)) { + const table = (schema as { [key: string]: CockroachTableWithColumns })[tableName]!; + await db.update(table).set(values).where( + eq(table[uniqueNotNullColName], uniqueNotNullColValue), + ); + } else if (is(db, SingleStoreDatabase)) { + const table = (schema as { [key: string]: SingleStoreTableWithColumns })[tableName]!; + await db.update(table).set(values).where( + eq(table[uniqueNotNullColName], uniqueNotNullColValue), ); } }; diff --git a/drizzle-seed/src/cockroach-core/index.ts b/drizzle-seed/src/cockroach-core/index.ts new file mode 100644 index 0000000000..fae0ca2151 --- /dev/null +++ b/drizzle-seed/src/cockroach-core/index.ts @@ -0,0 +1,179 @@ +import { is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { CockroachArray, CockroachDatabase, CockroachSchema } from 'drizzle-orm/cockroach-core'; +import { CockroachTable, getTableConfig } from 'drizzle-orm/cockroach-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +// Cockroach----------------------------------------------------------------------------------------------------------- +export const resetCockroach = async ( + db: CockroachDatabase, + cockroachTables: { [key: string]: CockroachTable }, +) => { + const tablesToTruncate = Object.entries(cockroachTables).map(([_, table]) => { + const config = getTableConfig(table); + config.schema = config.schema === undefined ? 'public' : config.schema; + + return `"${config.schema}"."${config.name}"`; + }); + + await db.execute(sql.raw(`truncate ${tablesToTruncate.join(',')} cascade;`)); +}; + +export const filterCockroachSchema = (schema: { + [key: string]: + | CockroachTable + | CockroachSchema + | Relations + | any; +}) => { + const cockroachSchema = Object.fromEntries( + Object.entries(schema).filter((keyValue): keyValue is [string, CockroachTable | Relations] => + is(keyValue[1], CockroachTable) || is(keyValue[1], Relations) + ), + ); + + const cockroachTables = Object.fromEntries( + Object.entries(schema).filter((keyValue): keyValue is [string, CockroachTable] => is(keyValue[1], CockroachTable)), + ); + + return { cockroachSchema, cockroachTables }; +}; + +export const seedCockroach = async ( + db: CockroachDatabase, + schema: { + [key: string]: + | CockroachTable + | CockroachSchema + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const seedService = new SeedService(); + + const { cockroachSchema, cockroachTables } = filterCockroachSchema(schema); + const { tables, relations } = getSchemaInfo(cockroachSchema, cockroachTables, mapCockroachColumns); + + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'cockroach', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + cockroachTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + cockroachTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +export const mapCockroachColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + const getAllBaseColumns = ( + baseColumn: CockroachArray['baseColumn'] & { baseColumn?: CockroachArray['baseColumn'] }, + ): Column['baseColumn'] => { + const baseColumnResult: Column['baseColumn'] = { + name: baseColumn.name, + columnType: baseColumn.getSQLType(), + typeParams: getTypeParams(baseColumn.getSQLType()), + dataType: baseColumn.dataType.split(' ')[0]!, + size: (baseColumn as CockroachArray).length, + hasDefault: baseColumn.hasDefault, + enumValues: baseColumn.enumValues, + default: baseColumn.default, + isUnique: baseColumn.isUnique, + notNull: baseColumn.notNull, + primary: baseColumn.primary, + baseColumn: baseColumn.baseColumn === undefined ? undefined : getAllBaseColumns(baseColumn.baseColumn), + }; + + return baseColumnResult; + }; + + const getTypeParams = (sqlType: string) => { + // get type params + const typeParams: Column['typeParams'] = {}; + + // handle dimensions + if (sqlType.includes('[')) { + const match = sqlType.match(/\[\w*]/g); + if (match) { + typeParams['dimensions'] = match.length; + } + } + + if ( + sqlType.startsWith('numeric') + || sqlType.startsWith('decimal') + || sqlType.startsWith('double precision') + || sqlType.startsWith('real') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('varchar') + || sqlType.startsWith('char') + || sqlType.startsWith('bit') + || sqlType.startsWith('vector') + || sqlType.startsWith('time') + || sqlType.startsWith('timestamp') + || sqlType.startsWith('interval') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } + + return typeParams; + }; + + const mappedColumns = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + size: (column as CockroachArray).length, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + generatedIdentityType: column.generatedIdentity?.type, + baseColumn: ((column as CockroachArray).baseColumn === undefined) + ? undefined + : getAllBaseColumns((column as CockroachArray).baseColumn), + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/cockroach-core/selectGensForColumn.ts b/drizzle-seed/src/cockroach-core/selectGensForColumn.ts new file mode 100644 index 0000000000..71c7a6544d --- /dev/null +++ b/drizzle-seed/src/cockroach-core/selectGensForColumn.ts @@ -0,0 +1,294 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { AbstractGenerator, GenerateInterval } from '../generators/Generators.ts'; +import type { Column, Table } from '../types/tables.ts'; + +// TODO: revise serial part generators +export const selectGeneratorForCockroachColumn = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // ARRAY + if (col.columnType.match(/\[\w*]/g) !== null && col.baseColumn !== undefined) { + const baseColumnGen = selectGeneratorForCockroachColumn( + table, + col.baseColumn!, + ) as AbstractGenerator; + if (baseColumnGen === undefined) { + throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); + } + + // const getBaseColumnDataType = (baseColumn: Column) => { + // if (baseColumn.baseColumn !== undefined) { + // return getBaseColumnDataType(baseColumn.baseColumn); + // } + + // return baseColumn.dataType; + // }; + // const baseColumnDataType = getBaseColumnDataType(col.baseColumn); + + const generator = new generatorsMap.GenerateArray[0]({ baseColumnGen, size: col.size }); + // generator.baseColumnDataType = baseColumnDataType; + + return generator; + } + + // ARRAY for studio + if (col.columnType.match(/\[\w*]/g) !== null) { + // remove dimensions from type + const baseColumnType = col.columnType.replace(/\[\w*]/g, ''); + const baseColumn: Column = { + ...col, + }; + baseColumn.columnType = baseColumnType; + + const baseColumnGen = selectGeneratorForCockroachColumn(table, baseColumn) as AbstractGenerator; + if (baseColumnGen === undefined) { + throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); + } + + let generator = new generatorsMap.GenerateArray[0]({ baseColumnGen }); + + for (let i = 0; i < col.typeParams.dimensions! - 1; i++) { + generator = new generatorsMap.GenerateArray[0]({ baseColumnGen: generator }); + } + + return generator; + } + + // INT ------------------------------------------------------------------------------------------------------------ + if ( + ( + col.columnType === 'int2' + || col.columnType === 'int4' + || col.columnType.includes('int8') + ) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + + return generator; + } + + let minValue: number | bigint | undefined; + let maxValue: number | bigint | undefined; + if (col.columnType.startsWith('int')) { + if (col.columnType === 'int2') { + // 2^16 / 2 - 1, 2 bytes + minValue = -32768; + maxValue = 32767; + } else if (col.columnType === 'int4') { + // 2^32 / 2 - 1, 4 bytes + minValue = -2147483648; + maxValue = 2147483647; + } else if (col.columnType.includes('int8')) { + if (col.dataType === 'bigint') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt('-9223372036854775808'); + maxValue = BigInt('9223372036854775807'); + } else { + // if (col.dataType === 'number') + // if you’re expecting values above 2^31 but below 2^53 + minValue = -9007199254740991; + maxValue = 9007199254740991; + } + } + } + + if ( + col.columnType.startsWith('int') + && !col.columnType.includes('interval') + ) { + const generator = new generatorsMap.GenerateInt[0]({ + minValue, + maxValue, + }); + + return generator; + } + + // NUMBER(real, double, decimal, numeric) + if ( + col.columnType.startsWith('real') + || col.columnType.startsWith('float') + || col.columnType.startsWith('decimal') + || col.columnType.startsWith('numeric') + ) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + const generator = new generatorsMap.GenerateNumber[0](); + + return generator; + } + + // STRING + if ( + (col.columnType === 'string' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + + return generator; + } + + if ( + (col.columnType === 'string' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + + return generator; + } + + if ( + (col.columnType === 'string' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + + return generator; + } + + if ( + col.columnType === 'string' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char') + ) { + const generator = new generatorsMap.GenerateString[0](); + + return generator; + } + + // BIT + if (col.columnType.startsWith('bit')) { + const generator = new generatorsMap.GenerateBitString[0](); + + return generator; + } + + // INET + if (col.columnType === 'inet') { + const generator = new generatorsMap.GenerateInet[0](); + + return generator; + } + + // geometry(point) + if (col.columnType.startsWith('geometry')) { + const generator = new generatorsMap.GenerateGeometry[0](); + + return generator; + } + + // vector + if (col.columnType.startsWith('vector')) { + const generator = new generatorsMap.GenerateVector[0](); + + return generator; + } + + // UUID + if (col.columnType === 'uuid') { + const generator = new generatorsMap.GenerateUUID[0](); + + return generator; + } + + // BOOL + if (col.columnType === 'bool') { + const generator = new generatorsMap.GenerateBoolean[0](); + + return generator; + } + + // DATE, TIME, TIMESTAMP + if (col.columnType.includes('date')) { + const generator = new generatorsMap.GenerateDate[0](); + + return generator; + } + + if (col.columnType === 'time') { + const generator = new generatorsMap.GenerateTime[0](); + + return generator; + } + + if (col.columnType.includes('timestamp')) { + const generator = new generatorsMap.GenerateTimestamp[0](); + + return generator; + } + + // JSON, JSONB + if (col.columnType === 'json' || col.columnType === 'jsonb') { + const generator = new generatorsMap.GenerateJson[0](); + + return generator; + } + + // if (col.columnType === "jsonb") { + // const generator = new GenerateJsonb({}); + // return generator; + // } + + // ENUM + if (col.enumValues !== undefined) { + const generator = new generatorsMap.GenerateEnum[0]({ + enumValues: col.enumValues, + }); + + return generator; + } + + // INTERVAL + if (col.columnType.startsWith('interval')) { + if (col.columnType === 'interval') { + const generator = new generatorsMap.GenerateInterval[0](); + + return generator; + } + + const fields = col.columnType.replace('interval ', '') as GenerateInterval['params']['fields']; + const generator = new generatorsMap.GenerateInterval[0]({ fields }); + + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + // set params for base column + if (generator !== undefined) { + generator.isUnique = col.isUnique; + generator.dataType = col.dataType; + // generator.stringLength = col.typeParams.length; + generator.typeParams = col.typeParams; + } + + return generator; +}; diff --git a/drizzle-seed/src/common.ts b/drizzle-seed/src/common.ts new file mode 100644 index 0000000000..2ea4d94d21 --- /dev/null +++ b/drizzle-seed/src/common.ts @@ -0,0 +1,225 @@ +import { Column as DrizzleColumn, getColumnTable, getTableName, is } from 'drizzle-orm'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + One, + type Relations, +} from 'drizzle-orm/_relations'; +import { CockroachTable, getTableConfig as getCockroachTableConfig } from 'drizzle-orm/cockroach-core'; +import { getTableConfig as getMsSqlTableConfig, MsSqlTable } from 'drizzle-orm/mssql-core'; +import { getTableConfig as getMySqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; +import { getTableConfig as getPgTableConfig, PgTable } from 'drizzle-orm/pg-core'; +import { getTableConfig as getSingleStoreTableConfig } from 'drizzle-orm/singlestore-core'; +import { getTableConfig as getSQLiteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { Column, DrizzleTable, RelationWithReferences, Table, TableConfigT } from './types/tables.ts'; +import { isRelationCyclic } from './utils.ts'; + +const getTableConfig = ( + table: DrizzleTable, +): TableConfigT => { + if (is(table, PgTable)) return getPgTableConfig(table); + else if (is(table, MySqlTable)) return getMySqlTableConfig(table); + else if (is(table, SQLiteTable)) return getSQLiteTableConfig(table); + else if (is(table, CockroachTable)) return getCockroachTableConfig(table); + else if (is(table, MsSqlTable)) return getMsSqlTableConfig(table); + else return getSingleStoreTableConfig(table); // if (is(table, SingleStoreTable)) +}; + +const transformFromDrizzleRelation = ( + schema: Record, + getDbToTsColumnNamesMap: (table: DrizzleTable) => { + [dbColName: string]: string; + }, + tableRelations: { + [tableName: string]: RelationWithReferences[]; + }, +) => { + const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); + const relations: RelationWithReferences[] = []; + for (const table of Object.values(schemaConfig.tables)) { + if (table.relations === undefined) continue; + + for (const drizzleRel of Object.values(table.relations)) { + if (!is(drizzleRel, One)) continue; + + const tableConfig = getTableConfig(drizzleRel.sourceTable as DrizzleTable); + const tableDbSchema = tableConfig.schema ?? 'public'; + const tableDbName = tableConfig.name; + const tableTsName = schemaConfig.tableNamesMap[`${tableDbSchema}.${tableDbName}`] ?? tableDbName; + + const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable); + const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) + ?? []; + + const refTableConfig = getTableConfig(drizzleRel.referencedTable as DrizzleTable); + const refTableDbSchema = refTableConfig.schema ?? 'public'; + const refTableDbName = refTableConfig.name; + const refTableTsName = schemaConfig.tableNamesMap[`${refTableDbSchema}.${refTableDbName}`] + ?? refTableDbName; + + const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable); + const refColumns = drizzleRel.config?.references.map((ref) => dbToTsColumnNamesMapForRefTable[ref.name] as string) + ?? []; + + if (tableRelations[refTableTsName] === undefined) { + tableRelations[refTableTsName] = []; + } + + const relation: RelationWithReferences = { + table: tableTsName, + columns, + refTable: refTableTsName, + refColumns, + refTableRels: tableRelations[refTableTsName], + type: 'one', + }; + + // do not add duplicate relation + if ( + tableRelations[tableTsName]?.some((rel) => + rel.table === relation.table + && rel.refTable === relation.refTable + ) + ) { + console.warn( + `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` + + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` + + `In this case, the foreign key constraint will be used.\n`, + ); + continue; + } + + relations.push(relation); + tableRelations[tableTsName]!.push(relation); + } + } + return relations; +}; + +export const getSchemaInfo = ( + drizzleTablesAndRelations: { [key: string]: DrizzleTable | Relations }, + drizzleTables: { [key: string]: DrizzleTable }, + mapColumns: ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, + ) => Column[], +) => { + let tableConfig: ReturnType; + let dbToTsColumnNamesMap: { [key: string]: string }; + const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( + Object.entries(drizzleTables).map(([key, value]) => [getTableName(value), key]), + ); + + const tables: Table[] = []; + const relations: RelationWithReferences[] = []; + const dbToTsColumnNamesMapGlobal: { + [tableName: string]: { [dbColumnName: string]: string }; + } = {}; + const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; + + const getDbToTsColumnNamesMap = (table: DrizzleTable) => { + let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; + + const tableName = getTableName(table); + if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { + dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; + return dbToTsColumnNamesMap; + } + + const tableConfig = getTableConfig(table); + for (const [tsCol, col] of Object.entries(getColumnTable(tableConfig.columns[0]!))) { + if (is(col, DrizzleColumn)) dbToTsColumnNamesMap[col.name] = tsCol; + } + dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; + + return dbToTsColumnNamesMap; + }; + + for (const table of Object.values(drizzleTables)) { + tableConfig = getTableConfig(table); + + dbToTsColumnNamesMap = getDbToTsColumnNamesMap(table); + + // might be empty list + const newRelations = tableConfig.foreignKeys === undefined ? [] : tableConfig.foreignKeys.map((fk) => { + const table = dbToTsTableNamesMap[tableConfig.name] as string; + const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; + + const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( + fk.reference().foreignTable, + ); + + if (tableRelations[refTable] === undefined) { + tableRelations[refTable] = []; + } + return { + table, + columns: fk + .reference() + .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), + refTable, + refColumns: fk + .reference() + .foreignColumns.map( + (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, + ), + refTableRels: tableRelations[refTable], + }; + }); + + relations.push( + ...newRelations, + ); + + if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { + tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; + } + tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); + + const stringsSet: string[] = []; + const uniqueConstraints: string[][] = []; + for (const uniCon of tableConfig.uniqueConstraints) { + const uniConColumns = uniCon.columns.map((col) => dbToTsColumnNamesMap[col.name] as string); + const uniConColumnsStr = JSON.stringify(uniConColumns); + + if (!stringsSet.includes(uniConColumnsStr)) { + stringsSet.push(uniConColumnsStr); + uniqueConstraints.push(uniConColumns); + } + } + + const mappedTable: Table = { + name: dbToTsTableNamesMap[tableConfig.name] as string, + uniqueConstraints, + primaryKeys: tableConfig.columns + .filter((column) => column.primary) + .map((column) => dbToTsColumnNamesMap[column.name] as string), + columns: mapColumns(tableConfig, dbToTsColumnNamesMap), + }; + tables.push(mappedTable); + } + + const transformedDrizzleRelations = transformFromDrizzleRelation( + drizzleTablesAndRelations, + getDbToTsColumnNamesMap, + tableRelations, + ); + relations.push( + ...transformedDrizzleRelations, + ); + + const isCyclicRelations = relations.map( + (relI) => { + // if (relations.some((relj) => relI.table === relj.refTable && relI.refTable === relj.table)) { + const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; + if (isRelationCyclic(relI)) { + tableRel['isCyclic'] = true; + return { ...relI, isCyclic: true }; + } + tableRel['isCyclic'] = false; + return { ...relI, isCyclic: false }; + }, + ); + + return { tables, relations: isCyclicRelations, tableRelations }; +}; diff --git a/drizzle-seed/src/services/GeneratorFuncs.ts b/drizzle-seed/src/generators/GeneratorFuncs.ts similarity index 83% rename from drizzle-seed/src/services/GeneratorFuncs.ts rename to drizzle-seed/src/generators/GeneratorFuncs.ts index 10d0d10f75..ea65dc5a0e 100644 --- a/drizzle-seed/src/services/GeneratorFuncs.ts +++ b/drizzle-seed/src/generators/GeneratorFuncs.ts @@ -1,9 +1,11 @@ import type { AbstractGenerator } from './Generators.ts'; import { GenerateArray, + GenerateBitString, GenerateBoolean, GenerateCity, GenerateCompanyName, + GenerateCompositeUniqueKey, GenerateCountry, GenerateDate, GenerateDatetime, @@ -12,6 +14,9 @@ import { GenerateEnum, GenerateFirstName, GenerateFullName, + GenerateGeometry, + GenerateHashFromString, + GenerateInet, GenerateInt, GenerateInterval, GenerateIntPrimaryKey, @@ -30,11 +35,14 @@ import { GenerateString, GenerateTime, GenerateTimestamp, + GenerateUniqueBitString, GenerateUniqueCity, GenerateUniqueCompanyName, GenerateUniqueCountry, GenerateUniqueFirstName, GenerateUniqueFullName, + GenerateUniqueGeometry, + GenerateUniqueInet, GenerateUniqueInt, GenerateUniqueInterval, GenerateUniqueLastName, @@ -44,14 +52,17 @@ import { GenerateUniquePostcode, GenerateUniqueStreetAddress, GenerateUniqueString, + GenerateUniqueVector, GenerateUUID, GenerateValuesFromArray, + GenerateVector, GenerateWeightedCount, GenerateYear, HollowGenerator, WeightedRandomGenerator, } from './Generators.ts'; import { GenerateStringV2, GenerateUniqueIntervalV2, GenerateUniqueStringV2 } from './versioning/v2.ts'; +import { GenerateHashFromStringV3 } from './versioning/v3.ts'; function createGenerator, T>( generatorConstructor: new(params?: T) => GeneratorType, @@ -754,6 +765,134 @@ export const generatorsFuncs = { * ``` */ weightedRandom: createGenerator(WeightedRandomGenerator), + + /** + * generates bit strings based on specified parameters + * + * @param isUnique - property that controls if generated values gonna be unique or not. + * @param arraySize - number of elements in each one-dimensional array (If specified, arrays will be generated). + * @param dimensions - desired length of each bit string (e.g., `dimensions = 3` produces values like `'010'`). + * + * Defaults to the value of the database column bit-length. + * + * @example + * ```ts + * await seed(db, { bitStringTable: schema.bitStringTable }).refine((funcs) => ({ + * bitStringTable: { + * count, + * columns: { + * bit: funcs.bitString({ + * dimensions: 12, + * isUnique: true + * }), + * }, + * }, + * })); + * ``` + */ + bitString: createGenerator(GenerateBitString), + + /** + * generates ip addresses based on specified parameters + * + * @param isUnique - property that controls if generated values gonna be unique or not. + * @param arraySize - number of elements in each one-dimensional array (If specified, arrays will be generated). + * @param ipAddress - type of IP address to generate — either "ipv4" or "ipv6". + * + * Defaults to `'ipv4'`. + * @param includeCidr - determines whether generated IPs include a CIDR suffix. + * + * Defaults to `true`. + * + * @example + * ```ts + * await seed(db, { inetTable: schema.inetTable }).refine((funcs) => ({ + * inetTable: { + * count, + * columns: { + * inet: funcs.inet({ + * ipAddress: 'ipv4', + * includeCidr: true, + * isUnique: true + * }), + * }, + * }, + * })); + * ``` + */ + inet: createGenerator(GenerateInet), + + /** + * generates PostGIS geometry objects based on the given parameters. + * + * @param isUnique - property that controls if generated values gonna be unique or not. + * @param arraySize - number of elements in each one-dimensional array (If specified, arrays will be generated). + * @param type - geometry type to generate; currently only `'point'` is supported. + * + * Defaults to `'point'`. + * @param srid - Spatial Reference System Identifier: determines what type of point will be generated - either `4326` or `3857`. + * + * Defaults to `4326`. + * @param decimalPlaces - number of decimal places for points when `srid` is `4326` (e.g., `decimalPlaces = 3` produces values like `'point(30.723 46.482)'`). + * + * Defaults to `6`. + * + * @example + * ```ts + * await seed(db, { geometryTable: schema.geometryTable }).refine((funcs) => ({ + * geometryTable: { + * count, + * columns: { + * geometryPointTuple: funcs.geometry({ + * type: 'point', + * srid: 4326, + * decimalPlaces: 5, + * isUnique: true + * }) + * }, + * }, + * })); + * ``` + */ + geometry: createGenerator(GenerateGeometry), + + /** + * generates vectors based on the provided parameters. + * + * @param isUnique - property that controls if generated values gonna be unique or not. + * @param arraySize - number of elements in each one-dimensional array (If specified, arrays will be generated). + * @param decimalPlaces - number of decimal places for each vector element (e.g., `decimalPlaces = 3` produces values like `1.123`). + * + * Defaults to `2`. + * @param dimensions - number of elements in each generated vector (e.g., `dimensions = 3` produces values like `[1,2,3]`). + * + * Defaults to the value of the database column’s dimensions. + * @param minValue - minimum allowed value for each vector element. + * + * Defaults to `-1000`. + * @param maxValue - maximum allowed value for each vector element. + * + * Defaults to `1000`. + * + * @example + * ```ts + * await seed(db, { vectorTable: schema.vectorTable }).refine((funcs) => ({ + * vectorTable: { + * count, + * columns: { + * vector: funcs.vector({ + * decimalPlaces: 5, + * dimensions: 12, + * minValue: -100, + * maxValue: 100, + * isUnique: true + * }), + * }, + * }, + * })); + * ``` + */ + vector: createGenerator(GenerateVector), }; // so far, version changes don’t affect generator parameters. @@ -761,7 +900,15 @@ export const generatorsFuncsV2 = { ...generatorsFuncs, }; +export const generatorsFuncsV3 = { + ...generatorsFuncs, +}; + export const generatorsMap = { + GenerateHashFromString: [ + GenerateHashFromString, + GenerateHashFromStringV3, + ], HollowGenerator: [ HollowGenerator, ], @@ -915,4 +1062,31 @@ export const generatorsMap = { GenerateWeightedCount: [ GenerateWeightedCount, ], + GenerateBitString: [ + GenerateBitString, + ], + GenerateUniqueBitString: [ + GenerateUniqueBitString, + ], + GenerateInet: [ + GenerateInet, + ], + GenerateUniqueInet: [ + GenerateUniqueInet, + ], + GenerateGeometry: [ + GenerateGeometry, + ], + GenerateUniqueGeometry: [ + GenerateUniqueGeometry, + ], + GenerateVector: [ + GenerateVector, + ], + GenerateUniqueVector: [ + GenerateUniqueVector, + ], + GenerateCompositeUniqueKey: [ + GenerateCompositeUniqueKey, + ], } as const; diff --git a/drizzle-seed/src/services/Generators.ts b/drizzle-seed/src/generators/Generators.ts similarity index 65% rename from drizzle-seed/src/services/Generators.ts rename to drizzle-seed/src/generators/Generators.ts index d05c577db0..096cc7db02 100644 --- a/drizzle-seed/src/services/Generators.ts +++ b/drizzle-seed/src/generators/Generators.ts @@ -12,17 +12,29 @@ import loremIpsumSentences, { maxStringLength as maxLoremIpsumLength } from '../ import phonesInfo from '../datasets/phonesInfo.ts'; import states, { maxStringLength as maxStateLength } from '../datasets/states.ts'; import streetSuffix, { maxStringLength as maxStreetSuffixLength } from '../datasets/streetSuffix.ts'; -import { fastCartesianProduct, fillTemplate, getWeightedIndices, isObject } from './utils.ts'; +import type { GeneratedValueType } from '../types/seedService.ts'; +import type { Column } from '../types/tables.ts'; +import { + abs, + fastCartesianProduct, + fastCartesianProductForBigint, + fillTemplate, + getWeightedIndices, + isObject, + OrderedNumberRange, +} from './utils.ts'; export abstract class AbstractGenerator { static readonly entityKind: string = 'AbstractGenerator'; static readonly version: number = 1; + public isGeneratorUnique = false; public isUnique = false; public notNull = false; // param for generators which have a unique version of themselves public uniqueVersionOfGen?: new(params: T) => AbstractGenerator; + public maxUniqueCount: number = -1; public dataType?: string; public timeSpent?: number; @@ -32,12 +44,15 @@ export abstract class AbstractGenerator { public baseColumnDataType?: string; // param for text-like generators - public stringLength?: number; + // public stringLength?: number; // params for GenerateValuesFromArray public weightedCountSeed?: number | undefined; public maxRepeatedValuesCount?: number | { weight: number; count: number | number[] }[] | undefined; + public typeParams: Column['typeParams'] = {}; + public uniqueKey?: string; + public params: T; constructor(params?: T) { @@ -63,17 +78,24 @@ export abstract class AbstractGenerator { } } - abstract generate(params: { i: number }): number | string | boolean | unknown | undefined | void; + abstract generate( + params: { i?: number; columnName?: string; input?: string }, + ): number | string | boolean | unknown | undefined | void; getEntityKind(): string { const constructor = this.constructor as typeof AbstractGenerator; return constructor.entityKind; } + getMaxUniqueCount() { + // override if you need to initialize this.maxUniqueCount after constructor + return this.maxUniqueCount; + } + replaceIfUnique() { this.updateParams(); if ( - this.uniqueVersionOfGen !== undefined + (this.uniqueVersionOfGen !== undefined) && this.isUnique === true ) { const uniqueGen = new this.uniqueVersionOfGen({ @@ -82,6 +104,7 @@ export abstract class AbstractGenerator { uniqueGen.isUnique = this.isUnique; uniqueGen.dataType = this.dataType; + uniqueGen.typeParams = this.typeParams; return uniqueGen; } @@ -93,13 +116,18 @@ export abstract class AbstractGenerator { if (!(this.getEntityKind() === 'GenerateArray') && this.arraySize !== undefined) { const uniqueGen = this.replaceIfUnique(); const baseColumnGen = uniqueGen === undefined ? this : uniqueGen; + baseColumnGen.dataType = this.baseColumnDataType; + const { dimensions, ...rest } = baseColumnGen.typeParams; + baseColumnGen.typeParams = rest; + const arrayGen = new GenerateArray( { baseColumnGen, size: this.arraySize, }, ); + arrayGen.typeParams = { dimensions }; return arrayGen; } @@ -109,6 +137,22 @@ export abstract class AbstractGenerator { } // Generators Classes ----------------------------------------------------------------------------------------------------------------------- +export class GenerateHashFromString extends AbstractGenerator<{}> { + static override readonly entityKind: string = 'GenerateHashFromString'; + override init() {} + generate({ input }: { input: string }): number { + let hash = 0; + // p and m are prime numbers + const p = 53; + const m = 28871271685163; + + for (let i = 0; i < input.length; i++) { + hash += ((input.codePointAt(i) || 0) * Math.pow(p, i)) % m; + } + + return hash; + } +} export class GenerateArray extends AbstractGenerator<{ baseColumnGen: AbstractGenerator; size?: number }> { static override readonly entityKind: string = 'GenerateArray'; public override arraySize = 10; @@ -186,28 +230,55 @@ export class GenerateDefault extends AbstractGenerator<{ } } -export class GenerateValuesFromArray extends AbstractGenerator< - { - values: - | (number | string | boolean | undefined)[] - | { weight: number; values: (number | string | boolean | undefined)[] }[]; - isUnique?: boolean; - arraySize?: number; - } -> { +// TODO split GenerateValuesFromArray into GenerateValuesFromArray and GenerateUniqueValuesFromArray; +// TODO make all unique generators extend from new UniqueGenerator class +export type GenerateValuesFromArrayT = { + values: + | GeneratedValueType[] + | { weight: number; values: GeneratedValueType[] }[]; + isUnique?: boolean; + arraySize?: number; +}; +export class GenerateValuesFromArray extends AbstractGenerator { static override readonly entityKind: string = 'GenerateValuesFromArray'; private state: { rng: prand.RandomGenerator; values: - | (number | string | boolean | undefined)[] - | { weight: number; values: (number | string | boolean | undefined)[] }[]; + | GeneratedValueType[] + | { weight: number; values: GeneratedValueType[] }[]; genIndicesObj: GenerateUniqueInt | undefined; genIndicesObjList: GenerateUniqueInt[] | undefined; valuesWeightedIndices: number[] | undefined; genMaxRepeatedValuesCount: GenerateDefault | GenerateWeightedCount | undefined; } | undefined; public override timeSpent: number = 0; + public override maxUniqueCount: number; + private allValuesCount: number = 0; // TODO rewrite generator + + constructor(params?: GenerateValuesFromArrayT) { + super(params); + + this.allValuesCount = this.params.values.length; + if (isObject(this.params.values[0])) { + this.allValuesCount = (this.params.values as { values: any[] }[]).reduce( + (acc, currVal) => acc + currVal.values.length, + 0, + ); + } + this.maxUniqueCount = this.allValuesCount; + } + + override getMaxUniqueCount(): number { + this.allValuesCount = this.params.values.length; + if (isObject(this.params.values[0])) { + this.allValuesCount = (this.params.values as { values: any[] }[]).reduce( + (acc, currVal) => acc + currVal.values.length, + 0, + ); + } + return this.allValuesCount; + } checks({ count }: { count: number }) { const { values } = this.params; @@ -237,11 +308,6 @@ export class GenerateValuesFromArray extends AbstractGenerator< throw new Error('maxRepeatedValuesCount should be greater than zero.'); } - let allValuesCount = values.length; - if (isObject(values[0])) { - allValuesCount = (values as { values: any[] }[]).reduce((acc, currVal) => acc + currVal.values.length, 0); - } - if ( notNull === true && maxRepeatedValuesCount !== undefined @@ -249,7 +315,8 @@ export class GenerateValuesFromArray extends AbstractGenerator< (!isObject(values[0]) && typeof maxRepeatedValuesCount === 'number' && maxRepeatedValuesCount * values.length < count) || (isObject(values[0]) && typeof maxRepeatedValuesCount === 'number' - && maxRepeatedValuesCount * allValuesCount < count) + // eslint-disable-next-line unicorn/consistent-destructuring + && maxRepeatedValuesCount * this.allValuesCount < count) ) ) { throw new Error("Can't fill notNull column with null values."); @@ -272,7 +339,8 @@ export class GenerateValuesFromArray extends AbstractGenerator< if ( isUnique === true && notNull === true && ( (!isObject(values[0]) && values.length < count) - || (isObject(values[0]) && allValuesCount < count) + // eslint-disable-next-line unicorn/consistent-destructuring + || (isObject(values[0]) && this.allValuesCount < count) ) ) { // console.log(maxRepeatedValuesCount, values.length, allValuesCount, count) @@ -403,13 +471,13 @@ export class GenerateValuesFromArray extends AbstractGenerator< } } -export class GenerateSelfRelationsValuesFromArray extends AbstractGenerator<{ values: (number | string | boolean)[] }> { +export class GenerateSelfRelationsValuesFromArray extends AbstractGenerator<{ values: (number | string | bigint)[] }> { static override readonly entityKind: string = 'GenerateSelfRelationsValuesFromArray'; private state: { rng: prand.RandomGenerator; firstValuesCount: number; - firstValues: (string | number | boolean)[]; + firstValues: (string | number | bigint)[]; } | undefined; override init({ count, seed }: { count: number; seed: number }) { @@ -418,7 +486,7 @@ export class GenerateSelfRelationsValuesFromArray extends AbstractGenerator<{ va // generate 15-40 % values with the same value as reference column let percent = 30; [percent, rng] = prand.uniformIntDistribution(20, 40, rng); - const firstValuesCount = Math.floor((percent / 100) * count), firstValues: (string | number | boolean)[] = []; + const firstValuesCount = Math.floor((percent / 100) * count), firstValues: (string | number | bigint)[] = []; this.state = { rng, firstValuesCount, firstValues }; } @@ -445,6 +513,8 @@ export class GenerateIntPrimaryKey extends AbstractGenerator<{}> { static override readonly entityKind: string = 'GenerateIntPrimaryKey'; public maxValue?: number | bigint; + public override maxUniqueCount: number = Number.POSITIVE_INFINITY; + public override isGeneratorUnique = true; override init({ count }: { count: number; seed: number }) { if (this.maxValue !== undefined && count > this.maxValue) { @@ -480,6 +550,7 @@ export class GenerateNumber extends AbstractGenerator< } | undefined; override uniqueVersionOfGen = GenerateUniqueNumber; + // TODO rewrite precision to decimalPlaces override init({ count, seed }: { seed: number; count: number }) { super.init({ count, seed }); @@ -517,14 +588,13 @@ export class GenerateNumber extends AbstractGenerator< } } -export class GenerateUniqueNumber extends AbstractGenerator< - { - minValue?: number; - maxValue?: number; - precision?: number; - isUnique?: boolean; - } -> { +export type GenerateUniqueNumberT = { + minValue?: number; + maxValue?: number; + precision?: number; + isUnique?: boolean; +}; +export class GenerateUniqueNumber extends AbstractGenerator { static override readonly entityKind: string = 'GenerateUniqueNumber'; private state: { @@ -533,9 +603,37 @@ export class GenerateUniqueNumber extends AbstractGenerator< maxValue: number; precision: number; } | undefined; - public override isUnique = true; + public precision: number; + + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + + constructor(params?: GenerateUniqueNumberT) { + super(params); + let { minValue, maxValue } = this.params; + const { precision } = this.params; + + this.precision = precision ?? 100; + + if (maxValue === undefined) { + this.maxUniqueCount = Number.POSITIVE_INFINITY; + return; + } else { + maxValue *= this.precision; + } + + if (minValue === undefined) { + minValue = -maxValue; + } else { + minValue *= this.precision; + } + + this.maxUniqueCount = maxValue - minValue + 1; + } + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; - override init({ count, seed }: { count: number; seed: number }) { let { minValue, maxValue, precision } = this.params; if (precision === undefined) { @@ -543,7 +641,8 @@ export class GenerateUniqueNumber extends AbstractGenerator< } if (maxValue === undefined) { - maxValue = count * precision; + this.maxUniqueCount = Number.POSITIVE_INFINITY; + return this.maxUniqueCount; } else { maxValue *= precision; } @@ -554,10 +653,30 @@ export class GenerateUniqueNumber extends AbstractGenerator< minValue *= precision; } + this.maxUniqueCount = maxValue - minValue + 1; + + return this.maxUniqueCount; + } + + override init({ count, seed }: { count: number; seed: number }) { + let { minValue, maxValue } = this.params; + + if (maxValue === undefined) { + maxValue = count * this.precision; + } else { + maxValue *= this.precision; + } + + if (minValue === undefined) { + minValue = -maxValue; + } else { + minValue *= this.precision; + } + const genUniqueIntObj = new GenerateUniqueInt({ minValue, maxValue }); genUniqueIntObj.init({ count, seed }); - this.state = { genUniqueIntObj, minValue, maxValue, precision }; + this.state = { genUniqueIntObj, minValue, maxValue, precision: this.precision }; } generate() { @@ -640,11 +759,12 @@ export class GenerateInt extends AbstractGenerator<{ } } -export class GenerateUniqueInt extends AbstractGenerator<{ +export type GenerateUniqueIntT = { minValue?: number | bigint; maxValue?: number | bigint; isUnique?: boolean; -}> { +}; +export class GenerateUniqueInt extends AbstractGenerator { static override readonly entityKind: string = 'GenerateUniqueInt'; public genMaxRepeatedValuesCount: GenerateDefault | GenerateWeightedCount | undefined; @@ -656,8 +776,55 @@ export class GenerateUniqueInt extends AbstractGenerator<{ intervals: (number | bigint)[][]; integersCount: Map; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; public override timeSpent = 0; + public override maxUniqueCount: number; + + constructor(params?: GenerateUniqueIntT) { + super(params); + + let minValue = this.params.minValue, maxValue = this.params.maxValue; + + if (maxValue === undefined) { + this.maxUniqueCount = Number.POSITIVE_INFINITY; + return; + } + + if (minValue === undefined) { + minValue = -maxValue; + } + + if (typeof minValue === 'number' && typeof maxValue === 'number') { + minValue = minValue >= 0 ? Math.ceil(minValue) : Math.floor(minValue); + maxValue = maxValue >= 0 ? Math.floor(maxValue) : Math.ceil(maxValue); + this.maxUniqueCount = Number(maxValue! - minValue!) + 1; + } else if (typeof minValue === 'bigint' && typeof maxValue === 'bigint') { + this.maxUniqueCount = Number((maxValue as bigint) - (minValue as bigint)) + 1; + } else this.maxUniqueCount = Number(Number(maxValue) - Number(minValue)) + 1; // error should be triggered in init method + } + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + let minValue = this.params.minValue as T, maxValue = this.params.maxValue as T; + + if (maxValue === undefined) { + this.maxUniqueCount = Number.POSITIVE_INFINITY; + return this.maxUniqueCount; + } + + if (minValue === undefined) { + minValue = -maxValue as T; + } + + if (typeof minValue === 'number' && typeof maxValue === 'number') { + minValue = minValue >= 0 ? Math.ceil(minValue) as T : Math.floor(minValue) as T; + maxValue = maxValue >= 0 ? Math.floor(maxValue) as T : Math.ceil(maxValue) as T; + } + + this.maxUniqueCount = Number(maxValue - minValue) + 1; + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { const rng = prand.xoroshiro128plus(seed); @@ -1313,7 +1480,7 @@ export class GenerateInterval extends AbstractGenerator<{ } // has a newer version -export class GenerateUniqueInterval extends AbstractGenerator<{ +export type GenerateUniqueIntervalT = { fields?: | 'year' | 'month' @@ -1329,7 +1496,8 @@ export class GenerateUniqueInterval extends AbstractGenerator<{ | 'hour to second' | 'minute to second'; isUnique?: boolean; -}> { +}; +export class GenerateUniqueInterval extends AbstractGenerator { static override readonly 'entityKind': string = 'GenerateUniqueInterval'; private state: { @@ -1337,7 +1505,7 @@ export class GenerateUniqueInterval extends AbstractGenerator<{ fieldsToGenerate: string[]; intervalSet: Set; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; private config: { [key: string]: { from: number; to: number } } = { year: { from: 0, @@ -1364,6 +1532,53 @@ export class GenerateUniqueInterval extends AbstractGenerator<{ to: 60, }, }; + public override maxUniqueCount: number; + + constructor(params?: GenerateUniqueIntervalT) { + super(params); + + const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; + let fieldsToGenerate: string[] = allFields; + + if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { + const tokens = this.params.fields.split(' to '); + const endIdx = allFields.indexOf(tokens[1]!); + fieldsToGenerate = allFields.slice(0, endIdx + 1); + } else if (this.params.fields !== undefined) { + const endIdx = allFields.indexOf(this.params.fields); + fieldsToGenerate = allFields.slice(0, endIdx + 1); + } + + this.maxUniqueCount = 1; + for (const field of fieldsToGenerate) { + const from = this.config[field]!.from, to = this.config[field]!.to; + this.maxUniqueCount *= from - to + 1; + } + } + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; + let fieldsToGenerate: string[] = allFields; + + if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { + const tokens = this.params.fields.split(' to '); + const endIdx = allFields.indexOf(tokens[1]!); + fieldsToGenerate = allFields.slice(0, endIdx + 1); + } else if (this.params.fields !== undefined) { + const endIdx = allFields.indexOf(this.params.fields); + fieldsToGenerate = allFields.slice(0, endIdx + 1); + } + + this.maxUniqueCount = 1; + for (const field of fieldsToGenerate) { + const from = this.config[field]!.from, to = this.config[field]!.to; + this.maxUniqueCount *= from - to + 1; + } + + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; @@ -1462,6 +1677,8 @@ export class GenerateString extends AbstractGenerator<{ ); currStr += stringChars[idx]; } + + if (this.dataType === 'object') return Buffer.from(currStr); return currStr; } } @@ -1471,7 +1688,12 @@ export class GenerateUniqueString extends AbstractGenerator<{ isUnique?: boolean static override readonly entityKind: string = 'GenerateUniqueString'; private state: { rng: prand.RandomGenerator } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = Number.POSITIVE_INFINITY; + + override getMaxUniqueCount(): number { + return Number.POSITIVE_INFINITY; + } override init({ seed }: { seed: number }) { const rng = prand.xoroshiro128plus(seed); @@ -1506,7 +1728,10 @@ export class GenerateUniqueString extends AbstractGenerator<{ isUnique?: boolean currStr += stringChars[idx]; } - return currStr.slice(0, 4) + uniqueStr + currStr.slice(4); + currStr = currStr.slice(0, 4) + uniqueStr + currStr.slice(4); + + if (this.dataType === 'object') return Buffer.from(currStr); + return currStr; } } @@ -1515,10 +1740,15 @@ export class GenerateUUID extends AbstractGenerator<{ }> { static override readonly entityKind: string = 'GenerateUUID'; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = Number.POSITIVE_INFINITY; private state: { rng: prand.RandomGenerator } | undefined; + override getMaxUniqueCount(): number { + return Number.POSITIVE_INFINITY; + } + override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); @@ -1573,9 +1803,9 @@ export class GenerateFirstName extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxFirstNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxFirstNameLength) { throw new Error( - `You can't use first name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxFirstNameLength}.`, + `You can't use first name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxFirstNameLength}.`, ); } @@ -1604,16 +1834,24 @@ export class GenerateUniqueFirstName extends AbstractGenerator<{ private state: { genIndicesObj: GenerateUniqueInt; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = firstNames.length; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + this.maxUniqueCount = firstNames.length; + return firstNames.length; + } override init({ count, seed }: { count: number; seed: number }) { - if (count > firstNames.length) { + if (count > this.getMaxUniqueCount()) { throw new Error('count exceeds max number of unique first names.'); } - if (this.stringLength !== undefined && this.stringLength < maxFirstNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxFirstNameLength) { throw new Error( - `You can't use first name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxFirstNameLength}.`, + `You can't use first name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxFirstNameLength}.`, ); } @@ -1652,9 +1890,9 @@ export class GenerateLastName extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxLastNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxLastNameLength) { throw new Error( - `You can't use last name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLastNameLength}.`, + `You can't use last name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxLastNameLength}.`, ); } @@ -1678,16 +1916,24 @@ export class GenerateUniqueLastName extends AbstractGenerator<{ isUnique?: boole private state: { genIndicesObj: GenerateUniqueInt; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = lastNames.length; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + this.maxUniqueCount = lastNames.length; + return lastNames.length; + } override init({ count, seed }: { count: number; seed: number }) { - if (count > lastNames.length) { + if (count > this.getMaxUniqueCount()) { throw new Error('count exceeds max number of unique last names.'); } - if (this.stringLength !== undefined && this.stringLength < maxLastNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxLastNameLength) { throw new Error( - `You can't use last name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLastNameLength}.`, + `You can't use last name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxLastNameLength}.`, ); } @@ -1725,9 +1971,11 @@ export class GenerateFullName extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < (maxFirstNameLength + maxLastNameLength + 1)) { + if ( + this.typeParams?.length !== undefined && this.typeParams?.length < (maxFirstNameLength + maxLastNameLength + 1) + ) { throw new Error( - `You can't use full name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ + `You can't use full name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${ maxFirstNameLength + maxLastNameLength + 1 }.`, ); @@ -1764,22 +2012,31 @@ export class GenerateUniqueFullName extends AbstractGenerator<{ fullnameSet: Set; rng: prand.RandomGenerator; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; public override timeSpent = 0; + public override maxUniqueCount: number = firstNames.length * lastNames.length; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + this.maxUniqueCount = firstNames.length * lastNames.length; + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { const t0 = new Date(); - const maxUniqueFullNamesNumber = firstNames.length * lastNames.length; - if (count > maxUniqueFullNamesNumber) { + if (count > this.getMaxUniqueCount()) { throw new RangeError( - `count exceeds max number of unique full names(${maxUniqueFullNamesNumber}).`, + `count exceeds max number of unique full names(${this.getMaxUniqueCount()}).`, ); } - if (this.stringLength !== undefined && this.stringLength < (maxFirstNameLength + maxLastNameLength + 1)) { + if ( + this.typeParams?.length !== undefined && this.typeParams?.length < (maxFirstNameLength + maxLastNameLength + 1) + ) { throw new Error( - `You can't use full name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ + `You can't use full name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${ maxFirstNameLength + maxLastNameLength + 1 }.`, ); @@ -1830,16 +2087,20 @@ export class GenerateEmail extends AbstractGenerator<{ arraysToGenerateFrom: string[][]; } | undefined; public override timeSpent: number = 0; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = adjectives.length * firstNames.length * emailDomains.length; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + this.maxUniqueCount = adjectives.length * firstNames.length * emailDomains.length; + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); - const domainsArray = emailDomains; - const adjectivesArray = adjectives; - const namesArray = firstNames; - - const maxUniqueEmailsNumber = adjectivesArray.length * namesArray.length * domainsArray.length; + const maxUniqueEmailsNumber = adjectives.length * firstNames.length * emailDomains.length; if (count > maxUniqueEmailsNumber) { throw new RangeError( `count exceeds max number of unique emails(${maxUniqueEmailsNumber}).`, @@ -1847,13 +2108,13 @@ export class GenerateEmail extends AbstractGenerator<{ } const maxEmailLength = maxAdjectiveLength + maxFirstNameLength + maxEmailDomainLength + 2; - if (this.stringLength !== undefined && this.stringLength < maxEmailLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxEmailLength) { throw new Error( - `You can't use email generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxEmailLength}.`, + `You can't use email generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxEmailLength}.`, ); } - const arraysToGenerateFrom = [adjectivesArray, namesArray, domainsArray]; + const arraysToGenerateFrom = [adjectives, firstNames, emailDomains]; const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: maxUniqueEmailsNumber - 1, @@ -1884,12 +2145,13 @@ export class GenerateEmail extends AbstractGenerator<{ } } -export class GeneratePhoneNumber extends AbstractGenerator<{ +export type GeneratePhoneNumberT = { template?: string; prefixes?: string[]; generatedDigitsNumbers?: number | number[]; arraySize?: number; -}> { +}; +export class GeneratePhoneNumber extends AbstractGenerator { static override readonly entityKind: string = 'GeneratePhoneNumber'; private state: { @@ -1900,28 +2162,100 @@ export class GeneratePhoneNumber extends AbstractGenerator<{ generatorsMap: Map; phoneNumbersSet: Set; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + + constructor(params?: GeneratePhoneNumberT) { + super(params); + + const { template } = this.params; + if (template === undefined) { + const { generatedDigitsNumbers } = this.prepareWithoutTemplate(); + this.maxUniqueCount = generatedDigitsNumbers.reduce( + (a, b) => a + Math.pow(10, b), + 0, + ); + } else { + const { placeholdersCount } = this.prepareWithTemplate(); + + this.maxUniqueCount = Math.pow(10, placeholdersCount); + } + } + + prepareWithTemplate(): { placeholdersCount: number } { + const { template } = this.params; + + const iterArray = [...template!.matchAll(/#/g)]; + const placeholdersCount = iterArray.length; + return { placeholdersCount }; + } + + prepareWithoutTemplate(): { generatedDigitsNumbers: number[]; prefixes: string[] } { + let { generatedDigitsNumbers, prefixes } = this.params; + if (prefixes === undefined || prefixes.length === 0) { + prefixes = phonesInfo.map((phoneInfo) => phoneInfo.split(',').slice(0, -1).join(' ')); + generatedDigitsNumbers = phonesInfo.map((phoneInfo) => { + // tokens = ["380","99","9"] = + // = ["country prefix", "operator prefix", "number length including operator prefix and excluding country prefix"] + const tokens = phoneInfo.split(','); + const operatorPrefixLength = tokens[1]!.replaceAll(' ', '').length; + + return Number(tokens[2]) - operatorPrefixLength; + }); + } else { + if (typeof generatedDigitsNumbers === 'number') { + generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill( + generatedDigitsNumbers, + ); + } else if ( + generatedDigitsNumbers === undefined + || generatedDigitsNumbers.length === 0 + ) { + generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill(7); + } + } + + return { prefixes, generatedDigitsNumbers }; + } + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + const { template } = this.params; + if (template === undefined) { + const { generatedDigitsNumbers } = this.prepareWithoutTemplate(); + this.maxUniqueCount = generatedDigitsNumbers.reduce( + (a, b) => a + Math.pow(10, b), + 0, + ); + + return this.maxUniqueCount; + } else { + const { placeholdersCount } = this.prepareWithTemplate(); + + this.maxUniqueCount = Math.pow(10, placeholdersCount); + return this.maxUniqueCount; + } + } override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); - let { generatedDigitsNumbers } = this.params; - const { prefixes, template } = this.params; + const { template } = this.params; const rng = prand.xoroshiro128plus(seed); if (template !== undefined) { - if (this.stringLength !== undefined && this.stringLength < template.length) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < template.length) { throw new Error( - `Length of phone number template is shorter than db column length restriction: ${this.stringLength}. + `Length of phone number template is shorter than db column length restriction: ${this.typeParams?.length}. Set the maximum string length to at least ${template.length}.`, ); } - const iterArray = [...template.matchAll(/#/g)]; - const placeholdersCount = iterArray.length; + const { placeholdersCount } = this.prepareWithTemplate(); - const maxUniquePhoneNumbersCount = Math.pow(10, placeholdersCount); + const maxUniquePhoneNumbersCount = this.getMaxUniqueCount(); if (maxUniquePhoneNumbersCount < count) { throw new RangeError( `count exceeds max number of unique phone numbers(${maxUniquePhoneNumbersCount}).`, @@ -1948,37 +2282,15 @@ export class GeneratePhoneNumber extends AbstractGenerator<{ return; } - let prefixesArray: string[]; - if (prefixes === undefined || prefixes.length === 0) { - prefixesArray = phonesInfo.map((phoneInfo) => phoneInfo.split(',').slice(0, -1).join(' ')); - generatedDigitsNumbers = phonesInfo.map((phoneInfo) => { - // tokens = ["380","99","9"] = - // = ["country prefix", "operator prefix", "number length including operator prefix and excluding country prefix"] - const tokens = phoneInfo.split(','); - const operatorPrefixLength = tokens[1]!.replaceAll(' ', '').length; - - return Number(tokens[2]) - operatorPrefixLength; - }); - } else { - prefixesArray = prefixes; - if (typeof generatedDigitsNumbers === 'number') { - generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill( - generatedDigitsNumbers, - ); - } else if ( - generatedDigitsNumbers === undefined - || generatedDigitsNumbers.length === 0 - ) { - generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill(7); - } - } + const { generatedDigitsNumbers, prefixes } = this.prepareWithoutTemplate(); + const prefixesArray = [...prefixes]; const maxPrefixLength = Math.max(...prefixesArray.map((prefix) => prefix.length)); const maxGeneratedDigits = Math.max(...generatedDigitsNumbers); - if (this.stringLength !== undefined && this.stringLength < (maxPrefixLength + maxGeneratedDigits)) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < (maxPrefixLength + maxGeneratedDigits)) { throw new Error( - `You can't use phone number generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ + `You can't use phone number generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${ maxPrefixLength + maxGeneratedDigits }.`, ); @@ -1988,10 +2300,7 @@ export class GeneratePhoneNumber extends AbstractGenerator<{ throw new Error('prefixes are not unique.'); } - const maxUniquePhoneNumbersCount = generatedDigitsNumbers.reduce( - (a, b) => a + Math.pow(10, b), - 0, - ); + const maxUniquePhoneNumbersCount = this.getMaxUniqueCount(); if (maxUniquePhoneNumbersCount < count) { throw new RangeError( `count exceeds max number of unique phone numbers(${maxUniquePhoneNumbersCount}).`, @@ -2096,9 +2405,9 @@ export class GenerateCountry extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxCountryLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCountryLength) { throw new Error( - `You can't use country generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCountryLength}.`, + `You can't use country generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCountryLength}.`, ); } @@ -2125,16 +2434,24 @@ export class GenerateUniqueCountry extends AbstractGenerator<{ isUnique?: boolea private state: { genIndicesObj: GenerateUniqueInt; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = countries.length; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount !== undefined) return this.maxUniqueCount; + + this.maxUniqueCount = countries.length; + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { - if (count > countries.length) { + if (count > this.getMaxUniqueCount()) { throw new Error('count exceeds max number of unique countries.'); } - if (this.stringLength !== undefined && this.stringLength < maxCountryLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCountryLength) { throw new Error( - `You can't use country generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCountryLength}.`, + `You can't use country generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCountryLength}.`, ); } @@ -2170,9 +2487,9 @@ export class GenerateJobTitle extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxJobTitleLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxJobTitleLength) { throw new Error( - `You can't use job title generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxJobTitleLength}.`, + `You can't use job title generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxJobTitleLength}.`, ); } @@ -2210,9 +2527,9 @@ export class GenerateStreetAddress extends AbstractGenerator<{ const possStreetNames = [firstNames, lastNames]; const maxStreetAddressLength = 4 + Math.max(maxFirstNameLength, maxLastNameLength) + 1 + maxStreetSuffixLength; - if (this.stringLength !== undefined && this.stringLength < maxStreetAddressLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxStreetAddressLength) { throw new Error( - `You can't use street address generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStreetAddressLength}.`, + `You can't use street address generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxStreetAddressLength}.`, ); } @@ -2253,12 +2570,19 @@ export class GenerateUniqueStreetAddress extends AbstractGenerator<{ isUnique?: arraysToChooseFrom: string[][]; }[]; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public streetNumbersCount = 999; + public override maxUniqueCount = this.streetNumbersCount * (firstNames.length + lastNames.length) + * streetSuffix.length; + + override getMaxUniqueCount(): number { + return this.maxUniqueCount; + } override init({ count, seed }: { count: number; seed: number }) { - const streetNumberStrs = Array.from({ length: 999 }, (_, i) => String(i + 1)); - const maxUniqueStreetnamesNumber = streetNumberStrs.length * firstNames.length * streetSuffix.length - + streetNumberStrs.length * firstNames.length * streetSuffix.length; + const streetNumberStrs = Array.from({ length: this.streetNumbersCount }, (_, i) => String(i + 1)); + const maxUniqueStreetnamesNumber = streetNumberStrs.length * (firstNames.length + lastNames.length) + * streetSuffix.length; if (count > maxUniqueStreetnamesNumber) { throw new RangeError( @@ -2267,9 +2591,9 @@ export class GenerateUniqueStreetAddress extends AbstractGenerator<{ isUnique?: } const maxStreetAddressLength = 4 + Math.max(maxFirstNameLength, maxLastNameLength) + 1 + maxStreetSuffixLength; - if (this.stringLength !== undefined && this.stringLength < maxStreetAddressLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxStreetAddressLength) { throw new Error( - `You can't use street address generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStreetAddressLength}.`, + `You can't use street address generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxStreetAddressLength}.`, ); } @@ -2291,7 +2615,7 @@ export class GenerateUniqueStreetAddress extends AbstractGenerator<{ isUnique?: minValue: 0, maxValue: streetNumberStrs.length * lastNames.length * streetSuffix.length - 1, }), - maxUniqueStreetNamesNumber: streetNumberStrs.length * firstNames.length * streetSuffix.length, + maxUniqueStreetNamesNumber: streetNumberStrs.length * lastNames.length * streetSuffix.length, count: 0, arraysToChooseFrom: [streetNumberStrs, lastNames, streetSuffix], }, @@ -2350,9 +2674,9 @@ export class GenerateCity extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxCityNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCityNameLength) { throw new Error( - `You can't use city generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCityNameLength}.`, + `You can't use city generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCityNameLength}.`, ); } @@ -2377,16 +2701,17 @@ export class GenerateUniqueCity extends AbstractGenerator<{ isUnique?: boolean } private state: { genIndicesObj: GenerateUniqueInt; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = cityNames.length; override init({ count, seed }: { count: number; seed: number }) { - if (count > cityNames.length) { + if (count > this.maxUniqueCount) { throw new Error('count exceeds max number of unique cities.'); } - if (this.stringLength !== undefined && this.stringLength < maxCityNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCityNameLength) { throw new Error( - `You can't use city generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCityNameLength}.`, + `You can't use city generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCityNameLength}.`, ); } @@ -2427,9 +2752,9 @@ export class GeneratePostcode extends AbstractGenerator<{ const templates = ['#####', '#####-####']; const maxPostcodeLength = Math.max(...templates.map((template) => template.length)); - if (this.stringLength !== undefined && this.stringLength < maxPostcodeLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxPostcodeLength) { throw new Error( - `You can't use postcode generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxPostcodeLength}.`, + `You can't use postcode generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxPostcodeLength}.`, ); } @@ -2478,13 +2803,13 @@ export class GenerateUniquePostcode extends AbstractGenerator<{ isUnique?: boole maxUniquePostcodeNumber: number; }[]; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = Math.pow(10, 5) + Math.pow(10, 9); override init({ count, seed }: { count: number; seed: number }) { - const maxUniquePostcodeNumber = Math.pow(10, 5) + Math.pow(10, 9); - if (count > maxUniquePostcodeNumber) { + if (count > this.maxUniqueCount) { throw new RangeError( - `count exceeds max number of unique postcodes(${maxUniquePostcodeNumber}).`, + `count exceeds max number of unique postcodes(${this.maxUniqueCount}).`, ); } @@ -2507,9 +2832,9 @@ export class GenerateUniquePostcode extends AbstractGenerator<{ isUnique?: boole ]; const maxPostcodeLength = Math.max(...templates.map((template) => template.template.length)); - if (this.stringLength !== undefined && this.stringLength < maxPostcodeLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxPostcodeLength) { throw new Error( - `You can't use postcode generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxPostcodeLength}.`, + `You can't use postcode generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxPostcodeLength}.`, ); } @@ -2564,9 +2889,9 @@ export class GenerateState extends AbstractGenerator<{ const rng = prand.xoroshiro128plus(seed); - if (this.stringLength !== undefined && this.stringLength < maxStateLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxStateLength) { throw new Error( - `You can't use state generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStateLength}.`, + `You can't use state generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxStateLength}.`, ); } @@ -2613,9 +2938,9 @@ export class GenerateCompanyName extends AbstractGenerator<{ maxLastNameLength + maxCompanyNameSuffixLength + 1, 3 * maxLastNameLength + 7, ); - if (this.stringLength !== undefined && this.stringLength < maxCompanyNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCompanyNameLength) { throw new Error( - `You can't use company name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCompanyNameLength}.`, + `You can't use company name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCompanyNameLength}.`, ); } @@ -2671,14 +2996,14 @@ export class GenerateUniqueCompanyName extends AbstractGenerator<{ isUnique?: bo arraysToChooseFrom: string[][]; }[]; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number = lastNames.length * companyNameSuffixes.length + Math.pow(lastNames.length, 2) + + Math.pow(lastNames.length, 2) + Math.pow(lastNames.length, 3); override init({ count, seed }: { count: number; seed: number }) { - const maxUniqueCompanyNameNumber = lastNames.length * companyNameSuffixes.length + Math.pow(lastNames.length, 2) - + Math.pow(lastNames.length, 2) + Math.pow(lastNames.length, 3); - if (count > maxUniqueCompanyNameNumber) { + if (count > this.maxUniqueCount) { throw new RangeError( - `count exceeds max number of unique company names(${maxUniqueCompanyNameNumber}).`, + `count exceeds max number of unique company names(${this.maxUniqueCount}).`, ); } @@ -2687,9 +3012,9 @@ export class GenerateUniqueCompanyName extends AbstractGenerator<{ isUnique?: bo maxLastNameLength + maxCompanyNameSuffixLength + 1, 3 * maxLastNameLength + 7, ); - if (this.stringLength !== undefined && this.stringLength < maxCompanyNameLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxCompanyNameLength) { throw new Error( - `You can't use company name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCompanyNameLength}.`, + `You can't use company name generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxCompanyNameLength}.`, ); } @@ -2783,9 +3108,9 @@ export class GenerateLoremIpsum extends AbstractGenerator<{ const maxLoremIpsumSentencesLength = maxLoremIpsumLength * this.params.sentencesCount + this.params.sentencesCount - 1; - if (this.stringLength !== undefined && this.stringLength < maxLoremIpsumSentencesLength) { + if (this.typeParams?.length !== undefined && this.typeParams?.length < maxLoremIpsumSentencesLength) { throw new Error( - `You can't use lorem ipsum generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLoremIpsumSentencesLength}.`, + `You can't use lorem ipsum generator with a db column length restriction of ${this.typeParams?.length}. Set the maximum string length to at least ${maxLoremIpsumSentencesLength}.`, ); } @@ -2925,37 +3250,50 @@ export class GeneratePoint extends AbstractGenerator<{ } } -export class GenerateUniquePoint extends AbstractGenerator<{ +export type GenerateUniquePointT = { minXValue?: number; maxXValue?: number; minYValue?: number; maxYValue?: number; isUnique?: boolean; -}> { +}; +export class GenerateUniquePoint extends AbstractGenerator { static override readonly entityKind: string = 'GenerateUniquePoint'; private state: { xCoordinateGen: GenerateUniqueNumber; yCoordinateGen: GenerateUniqueNumber; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public xCoordinateGen: GenerateUniqueNumber; + public yCoordinateGen: GenerateUniqueNumber; + public override maxUniqueCount: number; - override init({ count, seed }: { count: number; seed: number }) { - const xCoordinateGen = new GenerateUniqueNumber({ + constructor(params?: GenerateUniquePointT) { + super(params); + + this.xCoordinateGen = new GenerateUniqueNumber({ minValue: this.params.minXValue, maxValue: this.params.maxXValue, precision: 10, }); - xCoordinateGen.init({ count, seed }); - const yCoordinateGen = new GenerateUniqueNumber({ + this.yCoordinateGen = new GenerateUniqueNumber({ minValue: this.params.minYValue, maxValue: this.params.maxYValue, precision: 10, }); - yCoordinateGen.init({ count, seed }); - this.state = { xCoordinateGen, yCoordinateGen }; + this.maxUniqueCount = Math.min(this.xCoordinateGen.maxUniqueCount, this.yCoordinateGen.maxUniqueCount); + } + + override init({ count, seed }: { count: number; seed: number }) { + // TODO: rewrite the unique generator to use fastCartesianProduct for generating unique points. + + this.xCoordinateGen.init({ count, seed }); + this.yCoordinateGen.init({ count, seed }); + + this.state = { xCoordinateGen: this.xCoordinateGen, yCoordinateGen: this.yCoordinateGen }; } generate() { @@ -3049,7 +3387,7 @@ export class GenerateLine extends AbstractGenerator<{ } } -export class GenerateUniqueLine extends AbstractGenerator<{ +export type GenerateUniqueLineT = { minAValue?: number; maxAValue?: number; minBValue?: number; @@ -3057,7 +3395,8 @@ export class GenerateUniqueLine extends AbstractGenerator<{ minCValue?: number; maxCValue?: number; isUnique?: boolean; -}> { +}; +export class GenerateUniqueLine extends AbstractGenerator { static override readonly entityKind: string = 'GenerateUniqueLine'; private state: { @@ -3065,31 +3404,50 @@ export class GenerateUniqueLine extends AbstractGenerator<{ bCoefficientGen: GenerateUniqueNumber; cCoefficientGen: GenerateUniqueNumber; } | undefined; - public override isUnique = true; - - override init({ count, seed }: { count: number; seed: number }) { - const aCoefficientGen = new GenerateUniqueNumber({ + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + public aCoefficientGen: GenerateUniqueNumber; + public bCoefficientGen: GenerateUniqueNumber; + public cCoefficientGen: GenerateUniqueNumber; + + constructor(params?: GenerateUniqueLineT) { + super(params); + this.aCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minAValue, maxValue: this.params.maxAValue, precision: 10, }); - aCoefficientGen.init({ count, seed }); - const bCoefficientGen = new GenerateUniqueNumber({ + this.bCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minBValue, maxValue: this.params.maxBValue, precision: 10, }); - bCoefficientGen.init({ count, seed }); - const cCoefficientGen = new GenerateUniqueNumber({ + this.cCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minCValue, maxValue: this.params.maxCValue, precision: 10, }); - cCoefficientGen.init({ count, seed }); - this.state = { aCoefficientGen, bCoefficientGen, cCoefficientGen }; + this.maxUniqueCount = Math.min( + this.aCoefficientGen.maxUniqueCount, + this.bCoefficientGen.maxUniqueCount, + this.cCoefficientGen.maxUniqueCount, + ); + } + + override init({ count, seed }: { count: number; seed: number }) { + // TODO: rewrite the unique generator to use fastCartesianProduct for generating unique triplets(liens). + this.aCoefficientGen.init({ count, seed }); + this.bCoefficientGen.init({ count, seed }); + this.cCoefficientGen.init({ count, seed }); + + this.state = { + aCoefficientGen: this.aCoefficientGen, + bCoefficientGen: this.bCoefficientGen, + cCoefficientGen: this.cCoefficientGen, + }; } generate() { @@ -3117,3 +3475,748 @@ export class GenerateUniqueLine extends AbstractGenerator<{ } } } + +export class GenerateBitString extends AbstractGenerator<{ + dimensions?: number; + isUnique?: boolean; + arraySize?: number; +}> { + static override readonly entityKind: string = 'GenerateBitString'; + dimensions: number = 11; + + private state: { + intGen: GenerateInt; + } | undefined; + + override uniqueVersionOfGen = GenerateUniqueBitString; + + override init({ count, seed }: { count: number; seed: number }) { + super.init({ count, seed }); + + this.dimensions = this.params.dimensions ?? this.typeParams?.length ?? this.dimensions; + let intGen: GenerateInt; + + if (this.dimensions > 53) { + const maxValue = (BigInt(2) ** BigInt(this.dimensions)) - BigInt(1); + intGen = new GenerateInt({ minValue: BigInt(0), maxValue }); + } else { + // dimensions <= 53 + const maxValue = Math.pow(2, this.dimensions) - 1; + intGen = new GenerateInt({ minValue: 0, maxValue }); + } + + intGen.init({ count, seed }); + + this.state = { intGen }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + const bitString = this.state.intGen.generate().toString(2); + return bitString.padStart(this.dimensions!, '0'); + } +} + +export class GenerateUniqueBitString + extends AbstractGenerator<{ dimensions?: number; isUnique?: boolean; arraySize?: number }> +{ + static override readonly entityKind: string = 'GenerateUniqueBitString'; + dimensions: number = 11; + + private state: { + intGen: GenerateUniqueInt; + } | undefined; + + public override isGeneratorUnique = true; + + override getMaxUniqueCount() { + if (this.maxUniqueCount >= 0) return this.maxUniqueCount; + + this.dimensions = this.params.dimensions ?? this.typeParams?.length ?? this.dimensions; + this.maxUniqueCount = Math.pow(2, this.dimensions); + // TODO revise: will work incorrect with this.dimensions > 53, due to node js number limitations + return this.maxUniqueCount; + } + + override init({ count, seed }: { count: number; seed: number }) { + this.dimensions = this.params.dimensions ?? this.typeParams?.length ?? this.dimensions; + let intGen: GenerateUniqueInt; + + if (this.dimensions > 53) { + const maxValue = (BigInt(2) ** BigInt(this.dimensions)) - BigInt(1); + intGen = new GenerateUniqueInt({ minValue: BigInt(0), maxValue }); + } else { + // dimensions <= 53 + const maxValue = Math.pow(2, this.dimensions) - 1; + intGen = new GenerateUniqueInt({ minValue: 0, maxValue }); + } + + intGen.init({ count, seed }); + + this.state = { intGen }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + const bitString = this.state.intGen.generate()!.toString(2); + return bitString.padStart(this.dimensions!, '0'); + } +} + +export class GenerateInet extends AbstractGenerator< + { ipAddress?: 'ipv4' | 'ipv6'; includeCidr?: boolean; isUnique?: boolean; arraySize?: number } +> { + static override readonly entityKind: string = 'GenerateInet'; + ipAddress: 'ipv4' | 'ipv6' = 'ipv4'; + includeCidr: boolean = true; + + private state: { + rng: prand.RandomGenerator; + } | undefined; + + override uniqueVersionOfGen = GenerateUniqueInet; + + override init({ count, seed }: { count: number; seed: number }) { + super.init({ count, seed }); + this.ipAddress = this.params.ipAddress ?? this.ipAddress; + this.includeCidr = this.params.includeCidr ?? this.includeCidr; + + const rng = prand.xoroshiro128plus(seed); + + this.state = { rng }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + let value: number; + const values: string[] = []; + let inetVal = ''; + if (this.ipAddress === 'ipv4') { + for (let octet = 0; octet < 4; octet++) { + [value, this.state.rng] = prand.uniformIntDistribution( + 0, + 255, + this.state.rng, + ); + values.push(value.toString()); + } + + inetVal += values.join('.'); + + if (this.includeCidr) { + [value, this.state.rng] = prand.uniformIntDistribution( + 0, + 32, + this.state.rng, + ); + inetVal += `/${value}`; + } + return inetVal; + } else { + // this.ipAddress === 'ipv6' + for (let hextet = 0; hextet < 8; hextet++) { + [value, this.state.rng] = prand.uniformIntDistribution( + 0, + 65535, + this.state.rng, + ); + values.push(value.toString(16)); + } + + inetVal += values.join(':'); + + if (this.includeCidr) { + [value, this.state.rng] = prand.uniformIntDistribution( + 0, + 128, + this.state.rng, + ); + inetVal += `/${value}`; + } + return inetVal; + } + } +} + +// TODO: add defaults to js doc +export type GenerateUniqueInetT = { + ipAddress?: 'ipv4' | 'ipv6'; + includeCidr?: boolean; + isUnique?: boolean; + arraySize?: number; +}; +export class GenerateUniqueInet extends AbstractGenerator { + static override readonly entityKind: string = 'GenerateUniqueInet'; + ipAddress: 'ipv4' | 'ipv6' = 'ipv4'; + includeCidr: boolean = true; + delimiter: '.' | ':' = '.'; + + private state: { + indexGen: GenerateUniqueInt; + octetSet: string[]; + ipv4PrefixSet: string[]; + hextetSet: string[]; + ipv6PrefixSet: string[]; + } | undefined; + + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + + constructor(params?: GenerateUniqueInetT) { + super(params); + + this.ipAddress = this.params.ipAddress ?? this.ipAddress; + this.includeCidr = this.params.includeCidr ?? this.includeCidr; + + if (this.ipAddress === 'ipv4') { + this.maxUniqueCount = 256 ** 4; + if (this.includeCidr) { + this.maxUniqueCount *= 33; + } + } else { + // this.ipAddress === 'ipv6' + // TODO revise: this.maxUniqueCount can exceed Number.MAX_SAFE_INTEGER + this.maxUniqueCount = 65535 ** 8; + if (this.includeCidr) { + this.maxUniqueCount *= 129; + } + } + } + + override init({ count, seed }: { count: number; seed: number }) { + this.delimiter = this.ipAddress === 'ipv4' ? '.' : ':'; + + // maxValue - number of combinations for cartesian product: {0…255} × {0…255} × {0…255} × {0…255} × {0…32} + // where pattern for ipv4 ip is {0–255}.{0–255}.{0–255}.{0–255}[/{0–32}?] + // or number of combinations for cartesian product: {0…65535} × {0…65535} × {0…65535} × {0…65535} × {0…65535} × {0…65535} × {0…65535} × {0…65535} × {0…128} + // where pattern for ipv6 ip is {0-65535}:{0-65535}:{0-65535}:{0-65535}:{0-65535}:{0-65535}:{0-65535}:{0-65535}[/0-128?] + let minValue: number | bigint, maxValue: number | bigint; + + if (this.ipAddress === 'ipv4') { + minValue = 0; + maxValue = 256 ** 4; + if (this.includeCidr) { + maxValue = maxValue * 33; + } + } else { + // this.ipAddress === 'ipv6' + minValue = BigInt(0); + maxValue = BigInt(65535) ** BigInt(8); + if (this.includeCidr) { + maxValue = maxValue * BigInt(129); + } + } + + const indexGen = new GenerateUniqueInt({ minValue, maxValue }); + indexGen.init({ count, seed }); + + const octetSet = Array.from({ length: 256 }, (_, i) => i.toString()); + const ipv4PrefixSet = Array.from({ length: 33 }, (_, i) => i.toString()); + const hextetSet = Array.from({ length: 65536 }, (_, i) => i.toString(16)); + const ipv6PrefixSet = Array.from({ length: 129 }, (_, i) => i.toString()); + + this.state = { indexGen, octetSet, ipv4PrefixSet, hextetSet, ipv6PrefixSet }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + let inetVal = ''; + let tokens: string[] = []; + + if (this.ipAddress === 'ipv4') { + const sets = Array.from({ length: 4 }).fill(this.state.octetSet) as string[][]; + if (this.includeCidr) sets.push(this.state.ipv4PrefixSet); + + const index = this.state.indexGen.generate() as number; + tokens = fastCartesianProduct(sets, index) as string[]; + } else { + // this.ipAddress === 'ipv6' + const sets = Array.from({ length: 8 }).fill(this.state.hextetSet) as string[][]; + if (this.includeCidr) sets.push(this.state.ipv6PrefixSet); + + const idx = this.state.indexGen.generate() as bigint; + tokens = fastCartesianProductForBigint(sets, idx) as string[]; + } + + inetVal = this.includeCidr + ? tokens.slice(0, -1).join(this.delimiter) + `/${tokens.at(-1)}` + : tokens.join(this.delimiter); + + return inetVal; + } +} + +export class GenerateGeometry extends AbstractGenerator< + { + type?: 'point'; + srid?: 4326 | 3857; + decimalPlaces?: 1 | 2 | 3 | 4 | 5 | 6 | 7; + isUnique?: boolean; + arraySize?: number; + } +> { + static override readonly entityKind: string = 'GenerateGeometry'; + type = 'point' as const; + srid: 4326 | 3857 = 4326; + decimalPlaces: 1 | 2 | 3 | 4 | 5 | 6 | 7 = 6; + + private state: { + rng: prand.RandomGenerator; + minXValue: number; + maxXValue: number; + minYValue: number; + maxYValue: number; + denominator: number; + } | undefined; + + override uniqueVersionOfGen = GenerateUniqueGeometry; + + override init({ count, seed }: { count: number; seed: number }) { + super.init({ count, seed }); + + this.type = this.params.type ?? this.type; + this.srid = this.params.srid ?? this.srid; + this.decimalPlaces = this.params.decimalPlaces ?? this.decimalPlaces; + + let minXValue: number, maxXValue: number, minYValue: number, maxYValue: number, denominator: number; + if (this.type === 'point') { + if (this.srid === 4326) { + // Degrees (latitude / longitude) + denominator = 10 ** this.decimalPlaces; + minXValue = -180 * denominator; + maxXValue = 180 * denominator; + minYValue = -90 * denominator; + maxYValue = 90 * denominator; + } else { + // this.srid === 3857 + // Meters (projected X / Y) + denominator = 1; + minXValue = -20026376; + maxXValue = 20026376; + minYValue = -20048966; + maxYValue = 20048966; + } + } else { + throw new Error('geometry generator currently supports only the point type.'); + } + + const rng = prand.xoroshiro128plus(seed); + + this.state = { rng, minXValue, maxXValue, minYValue, maxYValue, denominator }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + let x: number, y: number; + [x, this.state.rng] = prand.uniformIntDistribution( + this.state.minXValue, + this.state.maxXValue, + this.state.rng, + ); + x = x / this.state.denominator; + + [y, this.state.rng] = prand.uniformIntDistribution( + this.state.minYValue, + this.state.maxYValue, + this.state.rng, + ); + y = y / this.state.denominator; + + if (this.dataType === 'array') { + return [x, y]; + } + + // this.dataType === 'object' + return { x, y }; + } +} + +export type GenerateUniqueGeometryT = { + type?: 'point'; + srid?: 4326 | 3857; + decimalPlaces?: 1 | 2 | 3 | 4 | 5 | 6 | 7; + isUnique?: boolean; + arraySize?: number; +}; +export class GenerateUniqueGeometry extends AbstractGenerator { + static override readonly entityKind: string = 'GenerateUniqueGeometry'; + type = 'point' as const; + srid: 4326 | 3857 = 4326; + decimalPlaces: 1 | 2 | 3 | 4 | 5 | 6 | 7 = 6; + + private state: { + denominator: number; + indexGen: GenerateUniqueInt; + xySets: OrderedNumberRange[]; + } | undefined; + + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + + constructor(params?: GenerateUniqueGeometryT) { + super(params); + + this.type = this.params.type ?? this.type; + this.srid = this.params.srid ?? this.srid; + this.decimalPlaces = this.params.decimalPlaces ?? this.decimalPlaces; + + let minXValue: number, maxXValue: number, minYValue: number, maxYValue: number, denominator: number; + if (this.type === 'point') { + if (this.srid === 4326) { + // Degrees (latitude / longitude) + denominator = 10 ** this.decimalPlaces; + minXValue = -180 * denominator; + maxXValue = 180 * denominator; + minYValue = -90 * denominator; + maxYValue = 90 * denominator; + } else { + // this.srid === 3857 + // Meters (projected X / Y) + denominator = 1; + minXValue = -20026376; + maxXValue = 20026376; + minYValue = -20048966; + maxYValue = 20048966; + } + } else { + // error should be triggered in init method + this.maxUniqueCount = -1; + return; + } + + // TODO revise: can lose accuracy due to exceeding Number.MAX_SAFE_INTEGER + this.maxUniqueCount = Number(BigInt(maxXValue - minXValue + 1) * BigInt(maxYValue - minYValue + 1)); + } + + override init({ count, seed }: { count: number; seed: number }) { + let minXValue: number, maxXValue: number, minYValue: number, maxYValue: number, denominator: number; + if (this.type === 'point') { + if (this.srid === 4326) { + // Degrees (latitude / longitude) + denominator = 10 ** this.decimalPlaces; + minXValue = -180 * denominator; + maxXValue = 180 * denominator; + minYValue = -90 * denominator; + maxYValue = 90 * denominator; + } else { + // this.srid === 3857 + // Meters (projected X / Y) + denominator = 1; + minXValue = -20026376; + maxXValue = 20026376; + minYValue = -20048966; + maxYValue = 20048966; + } + } else { + throw new Error('geometry generator currently supports only the point type.'); + } + + const xRange = new OrderedNumberRange(minXValue, maxXValue, 1); + const yRange = new OrderedNumberRange(minYValue, maxYValue, 1); + const xySets = [xRange, yRange]; + + const maxCombIdx = BigInt(maxXValue - minXValue + 1) * BigInt(maxYValue - minYValue + 1) - BigInt(1); + const indexGen = maxCombIdx <= 2 ** 53 + ? new GenerateUniqueInt({ minValue: 0, maxValue: Number(maxCombIdx) }) + : new GenerateUniqueInt({ minValue: BigInt(0), maxValue: maxCombIdx }); + indexGen.init({ count, seed }); + + this.state = { denominator, indexGen, xySets }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + const idx = this.state.indexGen.generate(); + let x: number, y: number; + if (typeof idx === 'number') { + [x, y] = fastCartesianProduct(this.state.xySets, idx) as [number, number]; + } else { + // typeof idx === 'bigint' + [x, y] = fastCartesianProductForBigint(this.state.xySets, idx as bigint) as [number, number]; + } + + if (this.dataType === 'array') { + return [x, y]; + } + + // this.dataType === 'object' + return { x, y }; + } +} + +export class GenerateVector extends AbstractGenerator< + { + dimensions?: number; + minValue?: number; + maxValue?: number; + decimalPlaces?: number; + isUnique?: boolean; + arraySize?: number; + } +> { + static override readonly entityKind: string = 'GenerateVector'; + // property below should be overridden in init + dimensions: number = 3; + minValue: number = -1000; + maxValue: number = 1000; + decimalPlaces: number = 2; + + private state: { + vectorGen: GenerateArray; + } | undefined; + + override uniqueVersionOfGen = GenerateUniqueVector; + + override init({ count, seed }: { count: number; seed: number }) { + super.init({ count, seed }); + + this.dimensions = this.params.dimensions ?? this.typeParams.length ?? this.dimensions; + this.decimalPlaces = this.params.decimalPlaces ?? this.decimalPlaces; + this.minValue = this.params.minValue ?? this.minValue; + this.maxValue = this.params.maxValue ?? this.maxValue; + if (this.minValue > this.maxValue) { + throw new Error( + `minValue ( ${this.minValue} ) cannot be greater than maxValue ( ${this.maxValue} ).\n` + + `Did you forget to pass both minValue and maxValue to the generator's properties?`, + ); + } + + if (this.decimalPlaces < 0) { + throw new Error(`decimalPlaces value must be greater than or equal to zero.`); + } + + if ( + abs(BigInt(this.minValue) * BigInt(10 ** this.decimalPlaces)) > Number.MAX_SAFE_INTEGER + || abs(BigInt(this.maxValue) * BigInt(10 ** this.decimalPlaces)) > Number.MAX_SAFE_INTEGER + ) { + console.warn( + `vector generator: minValue or maxValue multiplied by 10^decimalPlaces exceeds Number.MAX_SAFE_INTEGER (2^53 -1).\n` + + `This overflow may result in less accurate values being generated.`, + ); + } + + // `numberGen` is initialized in the `init` method of `GenerateArray` + const numberGen = new GenerateNumber({ + minValue: this.minValue, + maxValue: this.maxValue as number, + precision: 10 ** this.decimalPlaces, + }); + const vectorGen = new GenerateArray({ baseColumnGen: numberGen, size: this.dimensions }); + vectorGen.init({ count, seed }); + + this.state = { vectorGen }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + const vectorVal = this.state.vectorGen.generate(); + return vectorVal; + } +} + +export type GenerateUniqueVectorT = { + dimensions?: number; + minValue?: number; + maxValue?: number; + decimalPlaces?: number; + isUnique?: boolean; + arraySize?: number; +}; +export class GenerateUniqueVector extends AbstractGenerator { + static override readonly entityKind: string = 'GenerateUniqueVector'; + // property below should be overridden in init + dimensions: number = 3; + minValue: number = -1000; + maxValue: number = 1000; + decimalPlaces: number = 2; + + private state: { + denominator: number; + indexGen: GenerateUniqueInt; + vectorSets: OrderedNumberRange[]; + transformVector: (vector: number[], denominator: number) => void; + } | undefined; + + public override isGeneratorUnique = true; + + constructor(params?: GenerateUniqueVectorT) { + super(params); + + this.decimalPlaces = this.params.decimalPlaces ?? this.decimalPlaces; + this.minValue = this.params.minValue ?? this.minValue; + this.maxValue = this.params.maxValue ?? this.maxValue; + + if (this.minValue > this.maxValue) { + throw new Error( + `minValue ( ${this.minValue} ) cannot be greater than maxValue ( ${this.maxValue} ).\n` + + `Did you forget to pass both minValue and maxValue to the generator's properties?`, + ); + } + + if (this.decimalPlaces < 0) { + throw new Error(`decimalPlaces value must be greater than or equal to zero.`); + } + } + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount >= 0) return this.maxUniqueCount; + + this.dimensions = this.params.dimensions ?? this.typeParams.length ?? this.dimensions; + const denominator = 10 ** this.decimalPlaces; + + this.maxUniqueCount = (this.maxValue * denominator - this.minValue * denominator + 1) ** this.dimensions; + return this.maxUniqueCount; + } + + override init({ count, seed }: { count: number; seed: number }) { + this.dimensions = this.params.dimensions ?? this.typeParams.length ?? this.dimensions; + const denominator = 10 ** this.decimalPlaces; + + if ( + abs(BigInt(this.minValue) * BigInt(denominator)) > Number.MAX_SAFE_INTEGER + || abs(BigInt(this.maxValue) * BigInt(denominator)) > Number.MAX_SAFE_INTEGER + ) { + console.warn( + `vector generator: minValue or maxValue multiplied by 10^decimalPlaces exceeds Number.MAX_SAFE_INTEGER (2^53 -1).\n` + + `This overflow may result in less accurate values being generated.`, + ); + } + + const dimensionRange = new OrderedNumberRange(this.minValue * denominator, this.maxValue * denominator, 1); + const vectorSets = Array.from({ length: this.dimensions }).fill(dimensionRange) as OrderedNumberRange[]; + + const maxCombIdx = vectorSets.reduce((acc, curr) => acc * BigInt(curr.length), BigInt(1)) - BigInt(1); + const indexGen = maxCombIdx <= Number.MAX_SAFE_INTEGER + ? new GenerateUniqueInt({ minValue: 0, maxValue: Number(maxCombIdx) }) + : new GenerateUniqueInt({ minValue: BigInt(0), maxValue: maxCombIdx }); + indexGen.init({ count, seed }); + + const transformVector = denominator === 1 + ? (_vector: (number)[], _denominator: number) => {} + : (vector: number[], denominator: number) => { + for (let i = 0; i < vector.length; i++) { + vector[i] = vector[i]! / denominator; + } + return; + }; + + this.state = { indexGen, vectorSets, denominator, transformVector }; + } + generate() { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + const idx = this.state.indexGen.generate(); + const vector = typeof idx === 'number' + ? fastCartesianProduct(this.state.vectorSets, idx) + // typeof idx === 'bigint' + : fastCartesianProductForBigint(this.state.vectorSets, idx as bigint); + + this.state.transformVector(vector as number[], this.state.denominator); + + return vector; + } +} + +export class GenerateCompositeUniqueKey extends AbstractGenerator { + static override readonly entityKind: string = 'GenerateCompositeUniqueKey'; + columnGenerators: { + columnName: string; + generator: AbstractGenerator; + maxUniqueCount?: number; + count?: number; + }[] = []; + + private isInitialized: boolean = false; + private state: { + sets: any[][]; + currI: number; + currValue: { [columnName: string]: any }; + } | undefined; + + addGenerator(columnName: string, generator: AbstractGenerator) { + this.columnGenerators.push({ columnName, generator }); + } + + override init({ count, seed }: { count: number; seed: number }) { + if (this.isInitialized) return; + + if (this.columnGenerators.length === 0) { + throw new Error(`composite unique key generator has no generators to work with.`); + } + let countPerGen = Math.ceil(count ** (1 / this.columnGenerators.length)); + // const gensMaxUniqueCount: { columnName: string; count: number; maxUniqueCount: number }[] = []; + for (const colGen of this.columnGenerators) { + colGen.maxUniqueCount = colGen.generator.getMaxUniqueCount(); + } + + this.columnGenerators.sort((a, b) => a.maxUniqueCount! - b.maxUniqueCount!); + let currCount = count; + let canGenerate: boolean = false; + for (const [idx, colGen] of this.columnGenerators.entries()) { + if (colGen.maxUniqueCount! < countPerGen) { + colGen.count = colGen.maxUniqueCount; + currCount /= colGen.count!; + countPerGen = Math.ceil(currCount ** (1 / (this.columnGenerators.length - idx - 1))); + canGenerate = false; + } else { + colGen.count = countPerGen; + canGenerate = true; + } + } + + if (!canGenerate) { + const colGensCountInfo = this.columnGenerators.map((colGen) => + `generator:${colGen.generator.getEntityKind()};count:${colGen.count}` + ).join('\n'); + throw new Error( + `There are no enough unique values in each generator to generate ${count} values; \n${colGensCountInfo}`, + ); + } + + const sets: any[][] = []; + for (const colGen of this.columnGenerators) { + colGen.generator.init({ count: colGen.count!, seed }); + const setI = []; + for (let i = 0; i < countPerGen; i++) { + setI.push(colGen.generator.generate({ i })); + } + sets.push(setI); + } + + this.state = { sets, currI: -1, currValue: {} }; + this.isInitialized = true; + } + + override generate({ i, columnName }: { i: number; columnName: string }) { + if (this.state === undefined) { + throw new Error('state is not defined.'); + } + + if (i > this.state.currI) { + const rowI = fastCartesianProduct(this.state.sets, i); + const newCurrValue: typeof this.state.currValue = {}; + for (const [idx, colGen] of this.columnGenerators.entries()) { + newCurrValue[colGen.columnName] = rowI[idx]; + } + this.state.currValue = newCurrValue; + this.state.currI = i; + } + + return this.state.currValue[columnName]; + } +} diff --git a/drizzle-seed/src/generators/apiVersion.ts b/drizzle-seed/src/generators/apiVersion.ts new file mode 100644 index 0000000000..2f7f35dd1a --- /dev/null +++ b/drizzle-seed/src/generators/apiVersion.ts @@ -0,0 +1 @@ +export const latestVersion = 3; diff --git a/drizzle-seed/src/generators/utils.ts b/drizzle-seed/src/generators/utils.ts new file mode 100644 index 0000000000..628962a125 --- /dev/null +++ b/drizzle-seed/src/generators/utils.ts @@ -0,0 +1,212 @@ +/* eslint-disable drizzle-internal/require-entity-kind */ + +export const fastCartesianProduct = < + SetsT extends ((number | string | boolean | object)[] | OrderedNumberRange | OrderedBigintRange)[], +>( + sets: SetsT, + index: number, +) => { + const resultList: SetsT[number][number][] = []; + let currSet: (typeof sets)[number]; + let element: (typeof sets)[number][number]; + + for (let i = sets.length - 1; i >= 0; i--) { + currSet = sets[i]!; + element = currSet[index % Number(currSet.length)]!; + resultList.unshift(element); + index = Math.floor(index / Number(currSet.length)); + } + + return resultList; +}; + +export const fastCartesianProductForBigint = < + SetsT extends ((number | string | boolean | object)[] | OrderedNumberRange | OrderedBigintRange)[], +>( + sets: SetsT, + index: bigint, +) => { + const resultList: SetsT[number][number][] = []; + let currSet: (typeof sets)[number]; + let element: (typeof sets)[number][number]; + + for (let i = sets.length - 1; i >= 0; i--) { + currSet = sets[i]!; + const remainder = index % BigInt(currSet.length); + + // TODO check how it works + // remainder = remainder <= Number.MAX_SAFE_INTEGER ? Number(remainder) : remainder; + element = currSet[remainder as any]!; + resultList.unshift(element); + index = index / BigInt(currSet.length); + } + + return resultList; +}; + +export class OrderedNumberRange { + // Tell TS “obj[n]” will be a T: + [index: number]: T; + public readonly length: number; + + constructor( + private readonly min: T, + private readonly max: T, + private readonly step: T, + ) { + this.length = Math.floor((this.max - this.min) / this.step) + 1; + + const handler: ProxyHandler> = { + get( + target: OrderedNumberRange, + prop: PropertyKey, + receiver: any, + ): T | unknown { + if (typeof prop === 'string' && /^\d+$/.test(prop)) { + const idx = Number(prop); + if (idx >= target.length) return undefined; + return (target.min + idx * target.step) as T; + } + // fallback to normal lookup (and TS knows this has the right signature) + return Reflect.get(target, prop, receiver); + }, + }; + + return new Proxy(this, handler); + } +} + +export class OrderedBigintRange { + // Tell TS “obj[n]” will be a T: + [index: number]: T; + public readonly length: bigint; + + constructor( + private readonly min: T, + private readonly max: T, + private readonly step: T, + ) { + this.length = BigInt((this.max - this.min) / this.step) + BigInt(1); + + const handler: ProxyHandler> = { + get( + target: OrderedBigintRange, + prop: PropertyKey, + receiver: any, + ): T | string | unknown { + if (typeof prop === 'string' && /^\d+$/.test(prop)) { + const idx = BigInt(prop); + if (idx >= target.length) return undefined; + return (target.min + idx * target.step).toString(); + } + // fallback to normal lookup (and TS knows this has the right signature) + return Reflect.get(target, prop, receiver); + }, + }; + + return new Proxy(this, handler); + } +} + +export const abs = (n: number | bigint) => (n < 0n) ? -n : n; + +const sumArray = (weights: number[]) => { + const scale = 1e10; + const scaledSum = weights.reduce((acc, currVal) => acc + Math.round(currVal * scale), 0); + return scaledSum / scale; +}; + +/** + * @param weights positive number in range [0, 1], that represents probabilities to choose index of array. Example: weights = [0.2, 0.8] + * @param [accuracy=100] approximate number of elements in returning array + * @returns Example: with weights = [0.2, 0.8] and accuracy = 10 returning array of indices gonna equal this: [0, 0, 1, 1, 1, 1, 1, 1, 1, 1] + */ +export const getWeightedIndices = (weights: number[], accuracy = 100) => { + const weightsSum = sumArray(weights); + if (weightsSum !== 1) { + throw new Error( + `The weights for the Weighted Random feature must add up to exactly 1. Please review your weights to ensure they total 1 before proceeding`, + ); + } + + // const accuracy = 100; + const weightedIndices: number[] = []; + for (const [index, weight] of weights.entries()) { + const ticketsNumb = Math.floor(weight * accuracy); + weightedIndices.push(...Array.from({ length: ticketsNumb }).fill(index)); + } + + return weightedIndices; +}; + +/** + * @param param0.template example: "#####" or "#####-####" + * @param param0.values example: ["3", "2", "h"] + * @param param0.defaultValue example: "0" + * @returns + */ +export const fillTemplate = ({ template, placeholdersCount, values, defaultValue = ' ' }: { + template: string; + placeholdersCount?: number; + values: string[]; + defaultValue?: string; +}) => { + if (placeholdersCount === undefined) { + const iterArray = [...template.matchAll(/#/g)]; + placeholdersCount = iterArray.length; + } + + const diff = placeholdersCount - values.length; + if (diff > 0) { + values.unshift(...Array.from({ length: diff }).fill(defaultValue)); + } + + let resultStr = '', valueIdx = 0; + for (const si of template) { + if (si === '#') { + resultStr += values[valueIdx]; + valueIdx += 1; + continue; + } + resultStr += si; + } + + return resultStr; +}; + +// is variable is object-like. +// Example: +// isObject({f: 4}) === true; +// isObject([1,2,3]) === false; +// isObject(new Set()) === false; +export const isObject = (value: any) => { + if (value !== null && value !== undefined && value.constructor === Object) return true; + return false; +}; + +// const main = () => { +// console.time('range'); +// const range = new OrderedBigintRange(BigInt(-10), BigInt(10), BigInt(1)); + +// console.log(range.length); +// for (let i = 0; i < Number(range.length) + 1; i++) { +// console.log(range[i]); +// } +// console.timeEnd('range'); + +// const list = Array.from({ length: 2e6 + 1 }, (_, idx) => idx); + +// console.time('list'); +// console.log(list.length); +// for (let i = 0; i < 2e6 + 1; i++) { +// list[i]; +// } +// console.timeEnd('list'); + +// // const n = 5; +// // for (let i = 0; i < n; i++) { +// // console.log(fastCartesianProduct([[1, 2], [1, 2]], i)); +// // } +// }; + +// main(); diff --git a/drizzle-seed/src/services/versioning/v2.ts b/drizzle-seed/src/generators/versioning/v2.ts similarity index 74% rename from drizzle-seed/src/services/versioning/v2.ts rename to drizzle-seed/src/generators/versioning/v2.ts index f4dbf32f4b..4347b8b599 100644 --- a/drizzle-seed/src/services/versioning/v2.ts +++ b/drizzle-seed/src/generators/versioning/v2.ts @@ -2,7 +2,7 @@ import prand from 'pure-rand'; import { AbstractGenerator } from '../Generators.ts'; -export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ +export type GenerateUniqueIntervalV2T = { fields?: | 'year' | 'month' @@ -18,7 +18,8 @@ export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ | 'hour to second' | 'minute to second'; isUnique?: boolean; -}> { +}; +export class GenerateUniqueIntervalV2 extends AbstractGenerator { static override readonly 'entityKind': string = 'GenerateUniqueInterval'; static override readonly version: number = 2; @@ -27,7 +28,9 @@ export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ fieldsToGenerate: string[]; intervalSet: Set; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public override maxUniqueCount: number; + private config: { [key: string]: { from: number; to: number } } = { year: { from: 0, @@ -55,32 +58,38 @@ export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ }, }; - override init({ count, seed }: { count: number; seed: number }) { + public fieldsToGenerate: string[]; + + constructor(params?: GenerateUniqueIntervalV2T) { + super(params); + const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; - let fieldsToGenerate: string[] = allFields; + this.fieldsToGenerate = allFields; if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { const tokens = this.params.fields.split(' to '); const endIdx = allFields.indexOf(tokens[1]!); - fieldsToGenerate = allFields.slice(0, endIdx + 1); + this.fieldsToGenerate = allFields.slice(0, endIdx + 1); } else if (this.params.fields !== undefined) { const endIdx = allFields.indexOf(this.params.fields); - fieldsToGenerate = allFields.slice(0, endIdx + 1); + this.fieldsToGenerate = allFields.slice(0, endIdx + 1); } - let maxUniqueIntervalsNumber = 1; - for (const field of fieldsToGenerate) { + this.maxUniqueCount = 1; + for (const field of this.fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; - maxUniqueIntervalsNumber *= from - to + 1; + this.maxUniqueCount *= from - to + 1; } + } - if (count > maxUniqueIntervalsNumber) { - throw new RangeError(`count exceeds max number of unique intervals(${maxUniqueIntervalsNumber})`); + override init({ count, seed }: { count: number; seed: number }) { + if (count > this.maxUniqueCount) { + throw new RangeError(`count exceeds max number of unique intervals(${this.maxUniqueCount})`); } const rng = prand.xoroshiro128plus(seed); const intervalSet = new Set(); - this.state = { rng, fieldsToGenerate, intervalSet }; + this.state = { rng, fieldsToGenerate: this.fieldsToGenerate, intervalSet }; } generate() { @@ -109,6 +118,7 @@ export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ } } +// TODO need to rework this generator export class GenerateStringV2 extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; @@ -128,8 +138,8 @@ export class GenerateStringV2 extends AbstractGenerator<{ let minStringLength = 7; let maxStringLength = 20; - if (this.stringLength !== undefined) { - maxStringLength = this.stringLength; + if (this.typeParams?.length !== undefined) { + maxStringLength = this.typeParams?.length; if (maxStringLength === 1) minStringLength = maxStringLength; if (maxStringLength < minStringLength) minStringLength = 1; } @@ -164,6 +174,8 @@ export class GenerateStringV2 extends AbstractGenerator<{ ); currStr += stringChars[idx]; } + + if (this.dataType === 'object') return Buffer.from(currStr); return currStr; } } @@ -177,26 +189,34 @@ export class GenerateUniqueStringV2 extends AbstractGenerator<{ isUnique?: boole minStringLength: number; maxStringLength: number; } | undefined; - public override isUnique = true; + public override isGeneratorUnique = true; + public maxStringLength: number = 20; + public minStringLength: number = 7; + + override getMaxUniqueCount(): number { + if (this.maxUniqueCount >= 0) return this.maxUniqueCount; + + this.maxStringLength = this.typeParams?.length ?? this.maxStringLength; + this.maxUniqueCount = Number.parseInt('f'.repeat(this.maxStringLength), 16); + return this.maxUniqueCount; + } override init({ seed, count }: { seed: number; count: number }) { const rng = prand.xoroshiro128plus(seed); - let minStringLength = 7; - let maxStringLength = 20; // TODO: revise later - if (this.stringLength !== undefined) { - maxStringLength = this.stringLength; - if (maxStringLength === 1 || maxStringLength < minStringLength) minStringLength = maxStringLength; + this.maxStringLength = this.typeParams?.length ?? this.maxStringLength; + if (this.maxStringLength === 1 || this.maxStringLength < this.minStringLength) { + this.minStringLength = this.maxStringLength; } - if (maxStringLength < count.toString(16).length) { + if (count > this.getMaxUniqueCount()) { throw new Error( - `You can't generate ${count} unique strings, with a maximum string length of ${maxStringLength}.`, + `You can't generate ${count} unique strings, with a maximum string length of ${this.maxStringLength}.`, ); } - this.state = { rng, minStringLength, maxStringLength }; + this.state = { rng, minStringLength: this.minStringLength, maxStringLength: this.maxStringLength }; } generate({ i }: { i: number }) { @@ -227,6 +247,7 @@ export class GenerateUniqueStringV2 extends AbstractGenerator<{ isUnique?: boole currStr += stringChars[idx]; } + if (this.dataType === 'object') return Buffer.from(uniqueStr + currStr); return uniqueStr + currStr; } } diff --git a/drizzle-seed/src/generators/versioning/v3.ts b/drizzle-seed/src/generators/versioning/v3.ts new file mode 100644 index 0000000000..4b3ec45219 --- /dev/null +++ b/drizzle-seed/src/generators/versioning/v3.ts @@ -0,0 +1,24 @@ +import { AbstractGenerator } from '../Generators.ts'; + +/* eslint-disable drizzle-internal/require-entity-kind */ +export class GenerateHashFromStringV3 extends AbstractGenerator<{}> { + static override readonly entityKind: string = 'GenerateHashFromString'; + static override readonly version: number = 3; + + override init() {} + generate({ input }: { i: number; input: string }) { + let hash = 0n; + // p and m are prime numbers + const p = 53n; + const m = 28871271685163n; // < 2^53 + + let power = 1n; // will track p^i, where i is character index + + for (const ch of input) { + hash = (hash + (BigInt(ch.codePointAt(0) || 0) * power)) % m; + power = (power * p) % m; + } + + return Number(hash); + } +} diff --git a/drizzle-seed/src/index.ts b/drizzle-seed/src/index.ts index 348a6be2cb..669c5a9a1d 100644 --- a/drizzle-seed/src/index.ts +++ b/drizzle-seed/src/index.ts @@ -1,139 +1,98 @@ /* eslint-disable drizzle-internal/require-entity-kind */ -import { Column as DrizzleOrmColumn, getColumnTable, getTableName, is, sql } from 'drizzle-orm'; -import { createTableRelationsHelpers, extractTablesRelationalConfig, One, Relations } from 'drizzle-orm/_relations'; +import { is } from 'drizzle-orm'; +import type { Relations } from 'drizzle-orm/_relations'; -import type { MySqlColumn, MySqlSchema } from 'drizzle-orm/mysql-core'; -import { getTableConfig as getMysqlTableConfig, MySqlDatabase, MySqlTable } from 'drizzle-orm/mysql-core'; +import type { MySqlColumn, MySqlSchema, MySqlTable } from 'drizzle-orm/mysql-core'; +import { MySqlDatabase } from 'drizzle-orm/mysql-core'; -import { PgArray, PgColumn, PgSchema } from 'drizzle-orm/pg-core'; -import { getTableConfig as getPgTableConfig, PgDatabase, PgTable } from 'drizzle-orm/pg-core'; +import type { PgColumn, PgSchema, PgTable } from 'drizzle-orm/pg-core'; +import { PgDatabase } from 'drizzle-orm/pg-core'; -import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'; -import { BaseSQLiteDatabase, getTableConfig as getSqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { SQLiteColumn, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; -import { generatorsFuncs, generatorsFuncsV2 } from './services/GeneratorFuncs.ts'; -import type { AbstractGenerator } from './services/Generators.ts'; -import { SeedService } from './services/SeedService.ts'; -import type { DrizzleStudioObjectType, DrizzleStudioRelationType } from './types/drizzleStudio.ts'; -import type { RefinementsType } from './types/seedService.ts'; -import type { Column, Relation, RelationWithReferences, Table } from './types/tables.ts'; +import type { MsSqlColumn, MsSqlSchema, MsSqlTable } from 'drizzle-orm/mssql-core'; +import { MsSqlDatabase } from 'drizzle-orm/mssql-core'; -type InferCallbackType< - DB extends - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, - SCHEMA extends { - [key: string]: PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; - }, -> = DB extends PgDatabase ? SCHEMA extends { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations; - } ? { - // iterates through schema fields. example -> schema: {"tableName": PgTable} - [ - table in keyof SCHEMA as SCHEMA[table] extends PgTable ? table - : never - ]?: { - count?: number; - columns?: { - // iterates through table fields. example -> table: {"columnName": PgColumn} - [ - column in keyof SCHEMA[table] as SCHEMA[table][column] extends PgColumn ? column - : never - ]?: AbstractGenerator; - }; - with?: { - [ - refTable in keyof SCHEMA as SCHEMA[refTable] extends PgTable ? refTable - : never - ]?: - | number - | { weight: number; count: number | number[] }[]; - }; - }; - } - : {} - : DB extends MySqlDatabase ? SCHEMA extends { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations; - } ? { - // iterates through schema fields. example -> schema: {"tableName": MySqlTable} +import type { CockroachColumn, CockroachSchema, CockroachTable } from 'drizzle-orm/cockroach-core'; +import { CockroachDatabase } from 'drizzle-orm/cockroach-core'; + +import type { SingleStoreColumn, SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; + +import { filterCockroachSchema, resetCockroach, seedCockroach } from './cockroach-core/index.ts'; +import { generatorsFuncs, generatorsFuncsV2, type generatorsFuncsV3 } from './generators/GeneratorFuncs.ts'; +import type { AbstractGenerator } from './generators/Generators.ts'; +import { filterMsSqlTables, resetMsSql, seedMsSql } from './mssql-core/index.ts'; +import { filterMysqlTables, resetMySql, seedMySql } from './mysql-core/index.ts'; +import { filterPgSchema, resetPostgres, seedPostgres } from './pg-core/index.ts'; +import { SeedService } from './SeedService.ts'; +import { filterSingleStoreTables, resetSingleStore, seedSingleStore } from './singlestore-core/index.ts'; +import { filterSqliteTables, resetSqlite, seedSqlite } from './sqlite-core/index.ts'; +import type { DrizzleStudioObjectType, DrizzleStudioRelationType } from './types/drizzleStudio.ts'; +import type { DbType, RefinementsType } from './types/seedService.ts'; +import type { Relation, Table } from './types/tables.ts'; + +type SchemaValuesType = + | PgTable + | PgSchema + | MySqlTable + | MySqlSchema + | SQLiteTable + | MsSqlTable + | MsSqlSchema + | CockroachTable + | CockroachSchema + | SingleStoreTable + | SingleStoreSchema + | Relations + | any; + +export type RefineTypes = SCHEMA extends { + [key: string]: SchemaValuesType; +} ? { + // iterates through schema fields. example -> schema: {"tableName": PgTable} + [ + fieldName in keyof SCHEMA as SCHEMA[fieldName] extends TableT ? fieldName + : never + ]?: { + count?: number; + columns?: { + // iterates through table fields. example -> table: {"columnName": PgColumn} [ - table in keyof SCHEMA as SCHEMA[table] extends MySqlTable ? table + column in keyof SCHEMA[fieldName] as SCHEMA[fieldName][column] extends ColumnT ? column : never - ]?: { - count?: number; - columns?: { - // iterates through table fields. example -> table: {"columnName": MySqlColumn} - [ - column in keyof SCHEMA[table] as SCHEMA[table][column] extends MySqlColumn ? column - : never - ]?: AbstractGenerator; - }; - with?: { - [ - refTable in keyof SCHEMA as SCHEMA[refTable] extends MySqlTable ? refTable - : never - ]?: - | number - | { weight: number; count: number | number[] }[]; - }; - }; - } - : {} - : DB extends BaseSQLiteDatabase ? SCHEMA extends { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations; - } ? { - // iterates through schema fields. example -> schema: {"tableName": SQLiteTable} + ]?: AbstractGenerator | false; + }; + with?: { [ - table in keyof SCHEMA as SCHEMA[table] extends SQLiteTable ? table + refTable in keyof SCHEMA as SCHEMA[refTable] extends TableT ? refTable : never - ]?: { - count?: number; - columns?: { - // iterates through table fields. example -> table: {"columnName": SQLiteColumn} - [ - column in keyof SCHEMA[table] as SCHEMA[table][column] extends SQLiteColumn ? column - : never - ]?: AbstractGenerator; - }; - with?: { - [ - refTable in keyof SCHEMA as SCHEMA[refTable] extends SQLiteTable ? refTable - : never - ]?: - | number - | { weight: number; count: number | number[] }[]; - }; - }; - } - : {} + ]?: + | number + | { weight: number; count: number | number[] }[]; + }; + }; + } + : {}; + +export type InferCallbackType< + DB extends DbType, + SCHEMA extends { + [key: string]: SchemaValuesType; + }, +> = DB extends PgDatabase ? RefineTypes + : DB extends MySqlDatabase ? RefineTypes + : DB extends BaseSQLiteDatabase ? RefineTypes + : DB extends MsSqlDatabase ? RefineTypes + : DB extends CockroachDatabase ? RefineTypes + : DB extends SingleStoreDatabase ? RefineTypes : {}; class SeedPromise< - DB extends - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + DB extends DbType, SCHEMA extends { - [key: string]: PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; + [key: string]: SchemaValuesType; }, VERSION extends string | undefined, > implements Promise { @@ -198,9 +157,11 @@ class SeedPromise< } } -type FunctionsVersioning = VERSION extends `1` ? typeof generatorsFuncs +export type FunctionsVersioning = VERSION extends `1` + ? typeof generatorsFuncs : VERSION extends `2` ? typeof generatorsFuncsV2 - : typeof generatorsFuncsV2; + : VERSION extends `3` ? typeof generatorsFuncsV3 + : typeof generatorsFuncsV3; export function getGeneratorsFunctions() { return generatorsFuncs; @@ -208,7 +169,7 @@ export function getGeneratorsFunctions() { export async function seedForDrizzleStudio( { sqlDialect, drizzleStudioObject, drizzleStudioRelations, schemasRefinements, options }: { - sqlDialect: 'postgresql' | 'mysql' | 'sqlite'; + sqlDialect: 'postgresql' | 'mysql' | 'sqlite' | 'mssql' | 'cockroach' | 'singlestore'; drizzleStudioObject: DrizzleStudioObjectType; drizzleStudioRelations: DrizzleStudioRelationType[]; schemasRefinements?: { [schemaName: string]: RefinementsType }; @@ -249,6 +210,7 @@ export async function seedForDrizzleStudio( name: tableName, columns, primaryKeys: drizzleStudioColumns.filter((col) => col.primaryKey === true).map((col) => col.name), + uniqueConstraints: [], // TODO change later }, ); } @@ -283,7 +245,12 @@ export async function seedForDrizzleStudio( undefined, undefined, { ...options, preserveData: true, insertDataInDb: false }, - ); + ) as { + tableName: string; + rows: { + [columnName: string]: string | number | boolean | undefined; + }[]; + }[]; generatedSchemas[schemaName] = { tables: generatedTables }; } @@ -337,39 +304,19 @@ export async function seedForDrizzleStudio( * ``` */ export function seed< - DB extends - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + DB extends DbType, SCHEMA extends { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; + [key: string]: SchemaValuesType; }, - VERSION extends '2' | '1' | undefined, + VERSION extends '3' | '2' | '1' | undefined, >(db: DB, schema: SCHEMA, options?: { count?: number; seed?: number; version?: VERSION }) { return new SeedPromise(db, schema, options); } const seedFunc = async ( - db: - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + db: DbType, schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; + [key: string]: SchemaValuesType; }, options: { count?: number; seed?: number; version?: string } = {}, refinements?: RefinementsType, @@ -379,15 +326,21 @@ const seedFunc = async ( version = Number(options?.version); } - if (is(db, PgDatabase)) { + if (is(db, PgDatabase)) { await seedPostgres(db, schema, { ...options, version }, refinements); - } else if (is(db, MySqlDatabase)) { + } else if (is(db, MySqlDatabase)) { await seedMySql(db, schema, { ...options, version }, refinements); - } else if (is(db, BaseSQLiteDatabase)) { + } else if (is(db, BaseSQLiteDatabase)) { await seedSqlite(db, schema, { ...options, version }, refinements); + } else if (is(db, MsSqlDatabase)) { + await seedMsSql(db, schema, { ...options, version }, refinements); + } else if (is(db, CockroachDatabase)) { + await seedCockroach(db, schema, { ...options, version }, refinements); + } else if (is(db, SingleStoreDatabase)) { + await seedSingleStore(db, schema, { ...options, version }, refinements); } else { throw new Error( - 'The drizzle-seed package currently supports only PostgreSQL, MySQL, and SQLite databases. Please ensure your database is one of these supported types', + 'The drizzle-seed package currently supports only PostgreSQL, MySQL, SQLite, Ms Sql, CockroachDB and SingleStore databases. Please ensure your database is one of these supported types', ); } @@ -435,1062 +388,56 @@ const seedFunc = async ( * ``` */ export async function reset< - DB extends - | PgDatabase - | MySqlDatabase - | BaseSQLiteDatabase, + DB extends DbType, SCHEMA extends { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | any; + [key: string]: SchemaValuesType; }, >(db: DB, schema: SCHEMA) { - if (is(db, PgDatabase)) { + if (is(db, PgDatabase)) { const { pgTables } = filterPgSchema(schema); if (Object.entries(pgTables).length > 0) { await resetPostgres(db, pgTables); } - } else if (is(db, MySqlDatabase)) { + } else if (is(db, MySqlDatabase)) { const { mysqlTables } = filterMysqlTables(schema); if (Object.entries(mysqlTables).length > 0) { await resetMySql(db, mysqlTables); } - } else if (is(db, BaseSQLiteDatabase)) { + } else if (is(db, BaseSQLiteDatabase)) { const { sqliteTables } = filterSqliteTables(schema); if (Object.entries(sqliteTables).length > 0) { await resetSqlite(db, sqliteTables); } - } else { - throw new Error( - 'The drizzle-seed package currently supports only PostgreSQL, MySQL, and SQLite databases. Please ensure your database is one of these supported types', - ); - } -} - -// Postgres----------------------------------------------------------------------------------------------------------- -const resetPostgres = async ( - db: PgDatabase, - pgTables: { [key: string]: PgTable }, -) => { - const tablesToTruncate = Object.entries(pgTables).map(([_, table]) => { - const config = getPgTableConfig(table); - config.schema = config.schema === undefined ? 'public' : config.schema; - - return `"${config.schema}"."${config.name}"`; - }); - - await db.execute(sql.raw(`truncate ${tablesToTruncate.join(',')} cascade;`)); -}; - -const filterPgSchema = (schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; -}) => { - const pgSchema = Object.fromEntries( - Object.entries(schema).filter((keyValue): keyValue is [string, PgTable | Relations] => - is(keyValue[1], PgTable) || is(keyValue[1], Relations) - ), - ); - - const pgTables = Object.fromEntries( - Object.entries(schema).filter((keyValue): keyValue is [string, PgTable] => is(keyValue[1], PgTable)), - ); - - return { pgSchema, pgTables }; -}; - -const seedPostgres = async ( - db: PgDatabase, - schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; - }, - options: { count?: number; seed?: number; version?: number } = {}, - refinements?: RefinementsType, -) => { - const seedService = new SeedService(); - - const { pgSchema, pgTables } = filterPgSchema(schema); - - const { tables, relations } = getPostgresInfo(pgSchema, pgTables); - const generatedTablesGenerators = seedService.generatePossibleGenerators( - 'postgresql', - tables, - relations, - refinements, - options, - ); - - const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); - - const tablesValues = await seedService.generateTablesValues( - relations, - generatedTablesGenerators, - db, - pgTables, - { ...options, preserveCyclicTablesData }, - ); - - const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( - generatedTablesGenerators, - ); - const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; - - await seedService.generateTablesValues( - relations, - filteredTablesGenerators, - db, - pgTables, - { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, - ); -}; - -const getPostgresInfo = ( - pgSchema: { [key: string]: PgTable | Relations }, - pgTables: { [key: string]: PgTable }, -) => { - let tableConfig: ReturnType; - let dbToTsColumnNamesMap: { [key: string]: string }; - const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( - Object.entries(pgTables).map(([key, value]) => [getTableName(value), key]), - ); - - const tables: Table[] = []; - const relations: RelationWithReferences[] = []; - const dbToTsColumnNamesMapGlobal: { - [tableName: string]: { [dbColumnName: string]: string }; - } = {}; - const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; - - const getDbToTsColumnNamesMap = (table: PgTable) => { - let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; - - const tableName = getTableName(table); - if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { - dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; - return dbToTsColumnNamesMap; - } - - const tableConfig = getPgTableConfig(table); - for (const [tsCol, col] of Object.entries(getColumnTable(tableConfig.columns[0]!))) { - // dbToTsColumnNamesMap[col.name] = tsCol; - if (is(col, DrizzleOrmColumn)) dbToTsColumnNamesMap[col.name] = tsCol; - } - dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; - - return dbToTsColumnNamesMap; - }; - - const transformFromDrizzleRelation = ( - schema: Record, - getDbToTsColumnNamesMap: (table: PgTable) => { - [dbColName: string]: string; - }, - tableRelations: { - [tableName: string]: RelationWithReferences[]; - }, - ) => { - const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); - const relations: RelationWithReferences[] = []; - for (const table of Object.values(schemaConfig.tables)) { - if (table.relations === undefined) continue; - - for (const drizzleRel of Object.values(table.relations)) { - if (!is(drizzleRel, One)) continue; - - const tableConfig = getPgTableConfig(drizzleRel.sourceTable as PgTable); - const tableDbSchema = tableConfig.schema ?? 'public'; - const tableDbName = tableConfig.name; - const tableTsName = schemaConfig.tableNamesMap[`${tableDbSchema}.${tableDbName}`] ?? tableDbName; - - const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable as PgTable); - const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) - ?? []; - - const refTableConfig = getPgTableConfig(drizzleRel.referencedTable as PgTable); - const refTableDbSchema = refTableConfig.schema ?? 'public'; - const refTableDbName = refTableConfig.name; - const refTableTsName = schemaConfig.tableNamesMap[`${refTableDbSchema}.${refTableDbName}`] - ?? refTableDbName; - - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable as PgTable); - const refColumns = drizzleRel.config?.references.map((ref) => - dbToTsColumnNamesMapForRefTable[ref.name] as string - ) - ?? []; - - if (tableRelations[refTableTsName] === undefined) { - tableRelations[refTableTsName] = []; - } - - const relation: RelationWithReferences = { - table: tableTsName, - columns, - refTable: refTableTsName, - refColumns, - refTableRels: tableRelations[refTableTsName], - type: 'one', - }; - - // do not add duplicate relation - if ( - tableRelations[tableTsName]?.some((rel) => - rel.table === relation.table - && rel.refTable === relation.refTable - ) - ) { - console.warn( - `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` - + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` - + `In this case, the foreign key constraint will be used.\n`, - ); - continue; - } - - relations.push(relation); - tableRelations[tableTsName]!.push(relation); - } - } - return relations; - }; - - for (const table of Object.values(pgTables)) { - tableConfig = getPgTableConfig(table); - - dbToTsColumnNamesMap = getDbToTsColumnNamesMap(table); - - // might be empty list - const newRelations = tableConfig.foreignKeys.map((fk) => { - const table = dbToTsTableNamesMap[tableConfig.name] as string; - const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; - - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( - fk.reference().foreignTable, - ); - - if (tableRelations[refTable] === undefined) { - tableRelations[refTable] = []; - } - return { - table, - columns: fk - .reference() - .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), - refTable, - refColumns: fk - .reference() - .foreignColumns.map( - (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, - ), - refTableRels: tableRelations[refTable], - }; - }); - - relations.push( - ...newRelations, - ); + } else if (is(db, MsSqlDatabase)) { + const { mssqlTables } = filterMsSqlTables(schema); - if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; + if (Object.entries(mssqlTables).length > 0) { + await resetMsSql(db, mssqlTables); } - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); - - const getAllBaseColumns = ( - baseColumn: PgArray['baseColumn'] & { baseColumn?: PgArray['baseColumn'] }, - ): Column['baseColumn'] => { - const baseColumnResult: Column['baseColumn'] = { - name: baseColumn.name, - columnType: baseColumn.getSQLType(), - typeParams: getTypeParams(baseColumn.getSQLType()), - dataType: baseColumn.dataType.split(' ')[0]!, - size: (baseColumn as PgArray).length, - hasDefault: baseColumn.hasDefault, - enumValues: baseColumn.enumValues, - default: baseColumn.default, - isUnique: baseColumn.isUnique, - notNull: baseColumn.notNull, - primary: baseColumn.primary, - baseColumn: baseColumn.baseColumn === undefined ? undefined : getAllBaseColumns(baseColumn.baseColumn), - }; - - return baseColumnResult; - }; - - const getTypeParams = (sqlType: string) => { - // get type params - const typeParams: Column['typeParams'] = {}; - - // handle dimensions - if (sqlType.includes('[')) { - const match = sqlType.match(/\[\w*]/g); - if (match) { - typeParams['dimensions'] = match.length; - } - } - - if ( - sqlType.startsWith('numeric') - || sqlType.startsWith('decimal') - || sqlType.startsWith('double precision') - || sqlType.startsWith('real') - ) { - const match = sqlType.match(/\((\d+), *(\d+)\)/); - if (match) { - typeParams['precision'] = Number(match[1]); - typeParams['scale'] = Number(match[2]); - } - } else if ( - sqlType.startsWith('varchar') - || sqlType.startsWith('bpchar') - || sqlType.startsWith('char') - || sqlType.startsWith('bit') - || sqlType.startsWith('time') - || sqlType.startsWith('timestamp') - || sqlType.startsWith('interval') - ) { - const match = sqlType.match(/\((\d+)\)/); - if (match) { - typeParams['length'] = Number(match[1]); - } - } - - return typeParams; - }; - - // console.log(tableConfig.columns); - tables.push({ - name: dbToTsTableNamesMap[tableConfig.name] as string, - columns: tableConfig.columns.map((column) => ({ - name: dbToTsColumnNamesMap[column.name] as string, - columnType: column.getSQLType(), - typeParams: getTypeParams(column.getSQLType()), - dataType: column.dataType.split(' ')[0]!, - size: (column as PgArray).length, - hasDefault: column.hasDefault, - default: column.default, - enumValues: column.enumValues, - isUnique: column.isUnique, - notNull: column.notNull, - primary: column.primary, - generatedIdentityType: column.generatedIdentity?.type, - baseColumn: ((column as PgArray).baseColumn === undefined) - ? undefined - : getAllBaseColumns((column as PgArray).baseColumn), - })), - primaryKeys: tableConfig.columns - .filter((column) => column.primary) - .map((column) => dbToTsColumnNamesMap[column.name] as string), - }); - } - - const transformedDrizzleRelations = transformFromDrizzleRelation(pgSchema, getDbToTsColumnNamesMap, tableRelations); - relations.push( - ...transformedDrizzleRelations, - ); + } else if (is(db, CockroachDatabase)) { + const { cockroachTables } = filterCockroachSchema(schema); - const isCyclicRelations = relations.map( - (relI) => { - // if (relations.some((relj) => relI.table === relj.refTable && relI.refTable === relj.table)) { - const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; - if (isRelationCyclic(relI)) { - tableRel['isCyclic'] = true; - return { ...relI, isCyclic: true }; - } - tableRel['isCyclic'] = false; - return { ...relI, isCyclic: false }; - }, - ); - - return { tables, relations: isCyclicRelations, tableRelations }; -}; - -const isRelationCyclic = ( - startRel: RelationWithReferences, -) => { - // self relation - if (startRel.table === startRel.refTable) return false; - - // DFS - const targetTable = startRel.table; - const queue = [startRel]; - let path: string[] = []; - while (queue.length !== 0) { - const currRel = queue.shift(); - - if (path.includes(currRel!.table)) { - const idx = path.indexOf(currRel!.table); - path = path.slice(0, idx); + if (Object.entries(cockroachTables).length > 0) { + await resetCockroach(db, cockroachTables); } - path.push(currRel!.table); - - for (const rel of currRel!.refTableRels) { - // self relation - if (rel.table === rel.refTable) continue; - - if (rel.refTable === targetTable) return true; + } else if (is(db, SingleStoreDatabase)) { + const { singleStoreTables } = filterSingleStoreTables(schema); - // found cycle, but not the one we are looking for - if (path.includes(rel.refTable)) continue; - queue.unshift(rel); + if (Object.entries(singleStoreTables).length > 0) { + await resetSingleStore(db, singleStoreTables); } - } - - return false; -}; - -// MySql----------------------------------------------------------------------------------------------------- -const resetMySql = async ( - db: MySqlDatabase, - schema: { [key: string]: MySqlTable }, -) => { - const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { - const dbTableName = getTableName(table); - return dbTableName; - }); - - await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 0;')); - - for (const tableName of tablesToTruncate) { - const sqlQuery = `truncate \`${tableName}\`;`; - await db.execute(sql.raw(sqlQuery)); - } - - await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 1;')); -}; - -const filterMysqlTables = (schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | any; -}) => { - const mysqlSchema = Object.fromEntries( - Object.entries(schema).filter( - (keyValue): keyValue is [string, MySqlTable | Relations] => - is(keyValue[1], MySqlTable) || is(keyValue[1], Relations), - ), - ); - - const mysqlTables = Object.fromEntries( - Object.entries(schema).filter( - (keyValue): keyValue is [string, MySqlTable] => is(keyValue[1], MySqlTable), - ), - ); - - return { mysqlSchema, mysqlTables }; -}; - -const seedMySql = async ( - db: MySqlDatabase, - schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; - }, - options: { count?: number; seed?: number; version?: number } = {}, - refinements?: RefinementsType, -) => { - const { mysqlSchema, mysqlTables } = filterMysqlTables(schema); - const { tables, relations } = getMySqlInfo(mysqlSchema, mysqlTables); - - const seedService = new SeedService(); - - const generatedTablesGenerators = seedService.generatePossibleGenerators( - 'mysql', - tables, - relations, - refinements, - options, - ); - - const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); - - const tablesValues = await seedService.generateTablesValues( - relations, - generatedTablesGenerators, - db, - mysqlTables, - { ...options, preserveCyclicTablesData }, - ); - - const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( - generatedTablesGenerators, - ); - const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; - - await seedService.generateTablesValues( - relations, - filteredTablesGenerators, - db, - mysqlTables, - { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, - ); -}; - -const getMySqlInfo = ( - mysqlSchema: { [key: string]: MySqlTable | Relations }, - mysqlTables: { [key: string]: MySqlTable }, -) => { - let tableConfig: ReturnType; - let dbToTsColumnNamesMap: { [key: string]: string }; - - const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( - Object.entries(mysqlTables).map(([key, value]) => [getTableName(value), key]), - ); - - const tables: Table[] = []; - const relations: RelationWithReferences[] = []; - const dbToTsColumnNamesMapGlobal: { - [tableName: string]: { [dbColumnName: string]: string }; - } = {}; - const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; - - const getDbToTsColumnNamesMap = (table: MySqlTable) => { - let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; - - const tableName = getTableName(table); - if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { - dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; - return dbToTsColumnNamesMap; - } - - const tableConfig = getMysqlTableConfig(table); - for (const [tsCol, col] of Object.entries(getColumnTable(tableConfig.columns[0]!))) { - if (is(col, DrizzleOrmColumn)) dbToTsColumnNamesMap[col.name] = tsCol; - } - dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; - - return dbToTsColumnNamesMap; - }; - - const transformFromDrizzleRelation = ( - schema: Record, - getDbToTsColumnNamesMap: (table: MySqlTable) => { - [dbColName: string]: string; - }, - tableRelations: { - [tableName: string]: RelationWithReferences[]; - }, - ) => { - const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); - const relations: RelationWithReferences[] = []; - for (const table of Object.values(schemaConfig.tables)) { - if (table.relations === undefined) continue; - - for (const drizzleRel of Object.values(table.relations)) { - if (!is(drizzleRel, One)) continue; - - const tableConfig = getMysqlTableConfig(drizzleRel.sourceTable as MySqlTable); - const tableDbSchema = tableConfig.schema ?? 'public'; - const tableDbName = tableConfig.name; - const tableTsName = schemaConfig.tableNamesMap[`${tableDbSchema}.${tableDbName}`] ?? tableDbName; - - const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable as MySqlTable); - const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) - ?? []; - - const refTableConfig = getMysqlTableConfig(drizzleRel.referencedTable as MySqlTable); - const refTableDbSchema = refTableConfig.schema ?? 'public'; - const refTableDbName = refTableConfig.name; - const refTableTsName = schemaConfig.tableNamesMap[`${refTableDbSchema}.${refTableDbName}`] - ?? refTableDbName; - - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable as MySqlTable); - const refColumns = drizzleRel.config?.references.map((ref) => - dbToTsColumnNamesMapForRefTable[ref.name] as string - ) - ?? []; - - if (tableRelations[refTableTsName] === undefined) { - tableRelations[refTableTsName] = []; - } - - const relation: RelationWithReferences = { - table: tableTsName, - columns, - refTable: refTableTsName, - refColumns, - refTableRels: tableRelations[refTableTsName], - type: 'one', - }; - - // do not add duplicate relation - if ( - tableRelations[tableTsName]?.some((rel) => - rel.table === relation.table - && rel.refTable === relation.refTable - ) - ) { - console.warn( - `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` - + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` - + `In this case, the foreign key constraint will be used.\n`, - ); - continue; - } - - relations.push(relation); - tableRelations[tableTsName]!.push(relation); - } - } - return relations; - }; - - for (const table of Object.values(mysqlTables)) { - tableConfig = getMysqlTableConfig(table); - - dbToTsColumnNamesMap = getDbToTsColumnNamesMap(table); - - const newRelations = tableConfig.foreignKeys.map((fk) => { - const table = dbToTsTableNamesMap[tableConfig.name] as string; - const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( - fk.reference().foreignTable, - ); - - if (tableRelations[refTable] === undefined) { - tableRelations[refTable] = []; - } - return { - table, - columns: fk - .reference() - .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), - refTable, - refColumns: fk - .reference() - .foreignColumns.map( - (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, - ), - refTableRels: tableRelations[refTable], - }; - }); - relations.push( - ...newRelations, - ); - - if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; - } - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); - - const getTypeParams = (sqlType: string) => { - // get type params and set only type - const typeParams: Column['typeParams'] = {}; - - if ( - sqlType.startsWith('decimal') - || sqlType.startsWith('real') - || sqlType.startsWith('double') - || sqlType.startsWith('float') - ) { - const match = sqlType.match(/\((\d+), *(\d+)\)/); - if (match) { - typeParams['precision'] = Number(match[1]); - typeParams['scale'] = Number(match[2]); - } - } else if ( - sqlType.startsWith('char') - || sqlType.startsWith('varchar') - || sqlType.startsWith('binary') - || sqlType.startsWith('varbinary') - ) { - const match = sqlType.match(/\((\d+)\)/); - if (match) { - typeParams['length'] = Number(match[1]); - } - } - - return typeParams; - }; - - tables.push({ - name: dbToTsTableNamesMap[tableConfig.name] as string, - columns: tableConfig.columns.map((column) => ({ - name: dbToTsColumnNamesMap[column.name] as string, - columnType: column.getSQLType(), - typeParams: getTypeParams(column.getSQLType()), - dataType: column.dataType.split(' ')[0]!, - hasDefault: column.hasDefault, - default: column.default, - enumValues: column.enumValues, - isUnique: column.isUnique, - notNull: column.notNull, - primary: column.primary, - })), - primaryKeys: tableConfig.columns - .filter((column) => column.primary) - .map((column) => dbToTsColumnNamesMap[column.name] as string), - }); - } - - const transformedDrizzleRelations = transformFromDrizzleRelation( - mysqlSchema, - getDbToTsColumnNamesMap, - tableRelations, - ); - relations.push( - ...transformedDrizzleRelations, - ); - - const isCyclicRelations = relations.map( - (relI) => { - const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; - if (isRelationCyclic(relI)) { - tableRel['isCyclic'] = true; - return { ...relI, isCyclic: true }; - } - tableRel['isCyclic'] = false; - return { ...relI, isCyclic: false }; - }, - ); - - return { tables, relations: isCyclicRelations, tableRelations }; -}; - -// Sqlite------------------------------------------------------------------------------------------------------------------------ -const resetSqlite = async ( - db: BaseSQLiteDatabase, - schema: { [key: string]: SQLiteTable }, -) => { - const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { - const dbTableName = getTableName(table); - return dbTableName; - }); - - await db.run(sql.raw('PRAGMA foreign_keys = OFF')); - - for (const tableName of tablesToTruncate) { - const sqlQuery = `delete from \`${tableName}\`;`; - await db.run(sql.raw(sqlQuery)); - } - - await db.run(sql.raw('PRAGMA foreign_keys = ON')); -}; - -const filterSqliteTables = (schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | any; -}) => { - const sqliteSchema = Object.fromEntries( - Object.entries(schema).filter( - (keyValue): keyValue is [string, SQLiteTable | Relations] => - is(keyValue[1], SQLiteTable) || is(keyValue[1], Relations), - ), - ); - - const sqliteTables = Object.fromEntries( - Object.entries(schema).filter( - (keyValue): keyValue is [string, SQLiteTable] => is(keyValue[1], SQLiteTable), - ), - ); - - return { sqliteSchema, sqliteTables }; -}; - -const seedSqlite = async ( - db: BaseSQLiteDatabase, - schema: { - [key: string]: - | PgTable - | PgSchema - | MySqlTable - | MySqlSchema - | SQLiteTable - | Relations - | any; - }, - options: { count?: number; seed?: number; version?: number } = {}, - refinements?: RefinementsType, -) => { - const { sqliteSchema, sqliteTables } = filterSqliteTables(schema); - - const { tables, relations } = getSqliteInfo(sqliteSchema, sqliteTables); - - const seedService = new SeedService(); - - const generatedTablesGenerators = seedService.generatePossibleGenerators( - 'sqlite', - tables, - relations, - refinements, - options, - ); - - const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); - - const tablesValues = await seedService.generateTablesValues( - relations, - generatedTablesGenerators, - db, - sqliteTables, - { ...options, preserveCyclicTablesData }, - ); - - const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( - generatedTablesGenerators, - ); - const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; - - await seedService.generateTablesValues( - relations, - filteredTablesGenerators, - db, - sqliteTables, - { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, - ); -}; - -const getSqliteInfo = ( - sqliteSchema: { [key: string]: SQLiteTable | Relations }, - sqliteTables: { [key: string]: SQLiteTable }, -) => { - let tableConfig: ReturnType; - let dbToTsColumnNamesMap: { [key: string]: string }; - const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( - Object.entries(sqliteTables).map(([key, value]) => [getTableName(value), key]), - ); - - const tables: Table[] = []; - const relations: RelationWithReferences[] = []; - const dbToTsColumnNamesMapGlobal: { - [tableName: string]: { [dbColumnName: string]: string }; - } = {}; - const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; - - const getDbToTsColumnNamesMap = (table: SQLiteTable) => { - let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; - - const tableName = getTableName(table); - if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { - dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; - return dbToTsColumnNamesMap; - } - - const tableConfig = getSqliteTableConfig(table); - for (const [tsCol, col] of Object.entries(getColumnTable(tableConfig.columns[0]!))) { - if (is(col, DrizzleOrmColumn)) dbToTsColumnNamesMap[col.name] = tsCol; - } - dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; - - return dbToTsColumnNamesMap; - }; - - const transformFromDrizzleRelation = ( - schema: Record, - getDbToTsColumnNamesMap: (table: SQLiteTable) => { - [dbColName: string]: string; - }, - tableRelations: { - [tableName: string]: RelationWithReferences[]; - }, - ) => { - const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); - const relations: RelationWithReferences[] = []; - for (const table of Object.values(schemaConfig.tables)) { - if (table.relations === undefined) continue; - - for (const drizzleRel of Object.values(table.relations)) { - if (!is(drizzleRel, One)) continue; - - const tableConfig = getSqliteTableConfig(drizzleRel.sourceTable as SQLiteTable); - const tableDbName = tableConfig.name; - // TODO: tableNamesMap: have {public.customer: 'customer'} structure in sqlite - const tableTsName = schemaConfig.tableNamesMap[`public.${tableDbName}`] ?? tableDbName; - - const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable as SQLiteTable); - const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) - ?? []; - - const refTableConfig = getSqliteTableConfig(drizzleRel.referencedTable as SQLiteTable); - const refTableDbName = refTableConfig.name; - const refTableTsName = schemaConfig.tableNamesMap[`public.${refTableDbName}`] - ?? refTableDbName; - - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable as SQLiteTable); - const refColumns = drizzleRel.config?.references.map((ref) => - dbToTsColumnNamesMapForRefTable[ref.name] as string - ) - ?? []; - - if (tableRelations[refTableTsName] === undefined) { - tableRelations[refTableTsName] = []; - } - - const relation: RelationWithReferences = { - table: tableTsName, - columns, - refTable: refTableTsName, - refColumns, - refTableRels: tableRelations[refTableTsName], - type: 'one', - }; - - // do not add duplicate relation - if ( - tableRelations[tableTsName]?.some((rel) => - rel.table === relation.table - && rel.refTable === relation.refTable - ) - ) { - console.warn( - `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` - + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` - + `In this case, the foreign key constraint will be used.\n`, - ); - continue; - } - - relations.push(relation); - tableRelations[tableTsName]!.push(relation); - } - } - return relations; - }; - - for (const table of Object.values(sqliteTables)) { - tableConfig = getSqliteTableConfig(table); - - dbToTsColumnNamesMap = getDbToTsColumnNamesMap(table); - - const newRelations = tableConfig.foreignKeys.map((fk) => { - const table = dbToTsTableNamesMap[tableConfig.name] as string; - const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; - const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( - fk.reference().foreignTable, - ); - - if (tableRelations[refTable] === undefined) { - tableRelations[refTable] = []; - } - return { - table, - columns: fk - .reference() - .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), - refTable, - refColumns: fk - .reference() - .foreignColumns.map( - (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, - ), - refTableRels: tableRelations[refTable], - }; - }); - - relations.push( - ...newRelations, + } else { + throw new Error( + 'The drizzle-seed package currently supports only PostgreSQL, MySQL, SQLite, Ms Sql, CockroachDB and SingleStore databases. Please ensure your database is one of these supported types', ); - - if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; - } - tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); - - const getTypeParams = (sqlType: string) => { - // get type params and set only type - const typeParams: Column['typeParams'] = {}; - - if ( - sqlType.startsWith('decimal') - ) { - const match = sqlType.match(/\((\d+), *(\d+)\)/); - if (match) { - typeParams['precision'] = Number(match[1]); - typeParams['scale'] = Number(match[2]); - } - } else if ( - sqlType.startsWith('char') - || sqlType.startsWith('varchar') - || sqlType.startsWith('text') - ) { - const match = sqlType.match(/\((\d+)\)/); - if (match) { - typeParams['length'] = Number(match[1]); - } - } - - return typeParams; - }; - - tables.push({ - name: dbToTsTableNamesMap[tableConfig.name] as string, - columns: tableConfig.columns.map((column) => ({ - name: dbToTsColumnNamesMap[column.name] as string, - columnType: column.getSQLType(), - typeParams: getTypeParams(column.getSQLType()), - dataType: column.dataType.split(' ')[0]!, - hasDefault: column.hasDefault, - default: column.default, - enumValues: column.enumValues, - isUnique: column.isUnique, - notNull: column.notNull, - primary: column.primary, - })), - primaryKeys: tableConfig.columns - .filter((column) => column.primary) - .map((column) => dbToTsColumnNamesMap[column.name] as string), - }); } - - const transformedDrizzleRelations = transformFromDrizzleRelation( - sqliteSchema, - getDbToTsColumnNamesMap, - tableRelations, - ); - relations.push( - ...transformedDrizzleRelations, - ); - - const isCyclicRelations = relations.map( - (relI) => { - const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; - if (isRelationCyclic(relI)) { - tableRel['isCyclic'] = true; - return { ...relI, isCyclic: true }; - } - tableRel['isCyclic'] = false; - return { ...relI, isCyclic: false }; - }, - ); - - return { tables, relations: isCyclicRelations, tableRelations }; -}; +} export { default as cities } from './datasets/cityNames.ts'; export { default as countries } from './datasets/countries.ts'; export { default as firstNames } from './datasets/firstNames.ts'; export { default as lastNames } from './datasets/lastNames.ts'; -export { SeedService } from './services/SeedService.ts'; +export { SeedService } from './SeedService.ts'; diff --git a/drizzle-seed/src/mssql-core/index.ts b/drizzle-seed/src/mssql-core/index.ts new file mode 100644 index 0000000000..cdb5573f87 --- /dev/null +++ b/drizzle-seed/src/mssql-core/index.ts @@ -0,0 +1,257 @@ +import { is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { MsSqlDatabase, MsSqlInt, MsSqlSchema } from 'drizzle-orm/mssql-core'; +import { getTableConfig, MsSqlTable } from 'drizzle-orm/mssql-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +type TableRelatedFkConstraintsT = { + [fkName: string]: { + fkName: string; + parentSchema: string; + parentTable: string; + referencedSchema: string; + referencedTable: string; + parentColumns: string[]; + referencedColumns: string[]; + onDeleteAction: string; + onUpdateAction: string; + relation: 'inbound' | 'outbound'; + }; +}; + +type AllFkConstraintsT = { + [tableIdentifier: string]: TableRelatedFkConstraintsT; +}; + +// MySql----------------------------------------------------------------------------------------------------- +export const resetMsSql = async ( + db: MsSqlDatabase, + schema: { [key: string]: MsSqlTable }, +) => { + const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { + const tableConfig = getTableConfig(table); + return { dbName: tableConfig.name, dbSchema: tableConfig.schema ?? 'dbo' }; + }); + + const allFkConstraints: AllFkConstraintsT = {}; + + for (const table of tablesToTruncate) { + const gatherTableRelatedFkConstraints = ` + DECLARE @objectId INT + = OBJECT_ID( QUOTENAME('${table.dbSchema}') + '.' + QUOTENAME('${table.dbName}') ); + + SELECT + fk.name AS fkName, + OBJECT_SCHEMA_NAME(fk.parent_object_id) AS parentSchema, + OBJECT_NAME(fk.parent_object_id) AS parentTable, + OBJECT_SCHEMA_NAME(fk.referenced_object_id) AS referencedSchema, + OBJECT_NAME(fk.referenced_object_id) AS referencedTable, + -- fkc.constraint_column_id AS Column_Ordinal, + pc.name AS parentColumn, + rc.name AS referencedColumn, + fk.delete_referential_action_desc AS onDeleteAction, + fk.update_referential_action_desc AS onUpdateAction, + CASE + WHEN fk.parent_object_id = @objectId THEN 'outbound' -- your table → another table + ELSE 'inbound' -- another table → your table + END AS relation + FROM sys.foreign_keys AS fk + JOIN sys.foreign_key_columns fkc + ON fk.object_id = fkc.constraint_object_id + JOIN sys.columns pc + ON fkc.parent_object_id = pc.object_id + AND fkc.parent_column_id = pc.column_id + JOIN sys.columns rc + ON fkc.referenced_object_id = rc.object_id + AND fkc.referenced_column_id = rc.column_id + WHERE fk.parent_object_id = @objectId + OR fk.referenced_object_id = @objectId + ORDER BY relation, fkName; + `; + const rawRes = await db.execute(sql.raw(gatherTableRelatedFkConstraints)); + const res: { + fkName: string; + parentSchema: string; + parentTable: string; + referencedSchema: string; + referencedTable: string; + parentColumn: string; + referencedColumn: string; + onDeleteAction: string; + onUpdateAction: string; + relation: 'inbound' | 'outbound'; + }[] = rawRes.recordset; + + const tableRelatedFkConstraints: TableRelatedFkConstraintsT = {}; + for (const fkInfo of res) { + if (tableRelatedFkConstraints[fkInfo.fkName] === undefined) { + const { parentColumn: _, referencedColumn: __, ...filteredFkInfo } = fkInfo; + tableRelatedFkConstraints[fkInfo.fkName] = { + ...filteredFkInfo, + parentColumns: res.filter(({ fkName }) => fkName === fkInfo.fkName).map(({ parentColumn }) => parentColumn), + referencedColumns: res.filter(({ fkName }) => fkName === fkInfo.fkName).map(({ referencedColumn }) => + referencedColumn + ), + }; + } + } + + allFkConstraints[`${table.dbSchema}.${table.dbName}`] = tableRelatedFkConstraints; + + // drop all table related fk constraints + for (const fkInfo of Object.values(tableRelatedFkConstraints)) { + const dropFkConstraints = + `ALTER TABLE [${fkInfo.parentSchema}].[${fkInfo.parentTable}] DROP CONSTRAINT [${fkInfo.fkName}];`; + await db.execute(sql.raw(dropFkConstraints)); + } + + // truncating + const truncateTable = `truncate table [${table.dbSchema}].[${table.dbName}];`; + await db.execute(sql.raw(truncateTable)); + } + + // add all table related fk constraints + for (const table of tablesToTruncate) { + const tableRelatedFkConstraints = allFkConstraints[`${table.dbSchema}.${table.dbName}`]!; + + for (const fkInfo of Object.values(tableRelatedFkConstraints)) { + const addFkConstraints = ` + ALTER TABLE [${fkInfo.parentSchema}].[${fkInfo.parentTable}] + ADD CONSTRAINT [${fkInfo.fkName}] + FOREIGN KEY(${fkInfo.parentColumns.map((colName) => `[${colName}]`).join(',')}) + REFERENCES [${fkInfo.referencedSchema}].[${fkInfo.referencedTable}] (${ + fkInfo.referencedColumns.map((colName) => `[${colName}]`).join(',') + }) + ON DELETE ${fkInfo.onDeleteAction.split('_').join(' ')} + ON UPDATE ${fkInfo.onUpdateAction.split('_').join(' ')}; + `; + await db.execute(sql.raw(addFkConstraints)); + } + } +}; + +export const filterMsSqlTables = (schema: { + [key: string]: + | MsSqlTable + | MsSqlSchema + | Relations + | any; +}) => { + const mssqlSchema = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, MsSqlTable | Relations] => + is(keyValue[1], MsSqlTable) || is(keyValue[1], Relations), + ), + ); + + const mssqlTables = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, MsSqlTable] => is(keyValue[1], MsSqlTable), + ), + ); + + return { mssqlSchema, mssqlTables }; +}; + +export const seedMsSql = async ( + db: MsSqlDatabase, + schema: { + [key: string]: + | MsSqlTable + | MsSqlSchema + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const { mssqlSchema, mssqlTables } = filterMsSqlTables(schema); + const { tables, relations } = getSchemaInfo(mssqlSchema, mssqlTables, mapMsSqlColumns); + + const seedService = new SeedService(); + + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'mssql', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + mssqlTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + mssqlTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +const mapMsSqlColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + // TODO: rewrite + const getTypeParams = (sqlType: string) => { + // get type params and set only type + const typeParams: Column['typeParams'] = {}; + + if ( + sqlType.startsWith('decimal') + || sqlType.startsWith('real') + || sqlType.startsWith('float') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('char') + || sqlType.startsWith('varchar') + || sqlType.startsWith('binary') + || sqlType.startsWith('varbinary') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } + + return typeParams; + }; + + const mappedColumns = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + identity: (column as MsSqlInt).identity ? true : false, + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/mssql-core/selectGensForColumn.ts b/drizzle-seed/src/mssql-core/selectGensForColumn.ts new file mode 100644 index 0000000000..02296393d0 --- /dev/null +++ b/drizzle-seed/src/mssql-core/selectGensForColumn.ts @@ -0,0 +1,159 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { Column, Table } from '../types/tables.ts'; + +export const selectGeneratorForMssqlColumn = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // INT ------------------------------------------------------------------------------------------------------------ + if ( + col.columnType.includes('int') && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + return generator; + } + + let minValue: number | bigint | undefined; + let maxValue: number | bigint | undefined; + if (col.columnType.includes('int')) { + if (col.columnType === 'tinyint') { + // 2^8 / 2 - 1, 1 bytes + // more like unsigned tinyint + minValue = 0; + maxValue = 255; + } else if (col.columnType === 'smallint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -32768; + maxValue = 32767; + } else if (col.columnType === 'int') { + // 2^32 / 2 - 1, 4 bytes + minValue = -2147483648; + maxValue = 2147483647; + } else if (col.columnType === 'bigint') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt('-9223372036854775808'); + maxValue = BigInt('9223372036854775807'); + } + + const generator = new generatorsMap.GenerateInt[0]({ + minValue, + maxValue, + }); + return generator; + } + + // NUMBER(real, decimal, numeric, float) + if ( + col.columnType.startsWith('real') + || col.columnType.startsWith('decimal') + || col.columnType.startsWith('float') + || col.columnType.startsWith('numeric') + ) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + + const generator = new generatorsMap.GenerateNumber[0](); + return generator; + } + + // STRING + if ( + (col.columnType === 'text' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + return generator; + } + + if ( + col.columnType === 'text' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary') + ) { + const generator = new generatorsMap.GenerateString[0](); + return generator; + } + + // bit + if (col.columnType === 'bit') { + const generator = new generatorsMap.GenerateBoolean[0](); + return generator; + } + + // DATE, TIME, TIMESTAMP, DATETIME, YEAR + if (col.columnType.includes('datetime')) { + const generator = new generatorsMap.GenerateDatetime[0](); + return generator; + } + + if (col.columnType.includes('date')) { + const generator = new generatorsMap.GenerateDate[0](); + return generator; + } + + if (col.columnType === 'time') { + const generator = new generatorsMap.GenerateTime[0](); + return generator; + } + + // JSON + if (col.columnType === 'json') { + const generator = new generatorsMap.GenerateJson[0](); + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + + return generator; +}; diff --git a/drizzle-seed/src/mysql-core/index.ts b/drizzle-seed/src/mysql-core/index.ts new file mode 100644 index 0000000000..d7bb4f66d1 --- /dev/null +++ b/drizzle-seed/src/mysql-core/index.ts @@ -0,0 +1,150 @@ +import { getTableName, is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { MySqlDatabase, MySqlSchema } from 'drizzle-orm/mysql-core'; +import { MySqlTable } from 'drizzle-orm/mysql-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +// MySql----------------------------------------------------------------------------------------------------- +export const resetMySql = async ( + db: MySqlDatabase, + schema: { [key: string]: MySqlTable }, +) => { + const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { + const dbTableName = getTableName(table); + return dbTableName; + }); + + await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 0;')); + + for (const tableName of tablesToTruncate) { + const sqlQuery = `truncate \`${tableName}\`;`; + await db.execute(sql.raw(sqlQuery)); + } + + await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 1;')); +}; + +export const filterMysqlTables = (schema: { + [key: string]: + | MySqlTable + | MySqlSchema + | Relations + | any; +}) => { + const mysqlSchema = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, MySqlTable | Relations] => + is(keyValue[1], MySqlTable) || is(keyValue[1], Relations), + ), + ); + + const mysqlTables = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, MySqlTable] => is(keyValue[1], MySqlTable), + ), + ); + + return { mysqlSchema, mysqlTables }; +}; + +export const seedMySql = async ( + db: MySqlDatabase, + schema: { + [key: string]: + | MySqlTable + | MySqlSchema + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const { mysqlSchema, mysqlTables } = filterMysqlTables(schema); + const { tables, relations } = getSchemaInfo(mysqlSchema, mysqlTables, mapMySqlColumns); + + const seedService = new SeedService(); + + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'mysql', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + mysqlTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + mysqlTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +export const mapMySqlColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + const getTypeParams = (sqlType: string) => { + // get type params and set only type + const typeParams: Column['typeParams'] = {}; + + if ( + sqlType.startsWith('decimal') + || sqlType.startsWith('real') + || sqlType.startsWith('double') + || sqlType.startsWith('float') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('char') + || sqlType.startsWith('varchar') + || sqlType.startsWith('binary') + || sqlType.startsWith('varbinary') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } + + return typeParams; + }; + + const mappedColumns = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/mysql-core/selectGensForColumn.ts b/drizzle-seed/src/mysql-core/selectGensForColumn.ts new file mode 100644 index 0000000000..cbc51c82a8 --- /dev/null +++ b/drizzle-seed/src/mysql-core/selectGensForColumn.ts @@ -0,0 +1,198 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { Column, Table } from '../types/tables.ts'; + +export const selectGeneratorForMysqlColumn = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // INT ------------------------------------------------------------------------------------------------------------ + if ( + (col.columnType.includes('serial') || col.columnType.includes('int')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + return generator; + } + + let minValue: number | bigint | undefined; + let maxValue: number | bigint | undefined; + if (col.columnType === 'serial') { + // 2^64 % 2 - 1, 8 bytes + minValue = BigInt(0); + maxValue = BigInt('9223372036854775807'); + } else if (col.columnType.includes('int')) { + if (col.columnType === 'tinyint') { + // 2^8 / 2 - 1, 1 bytes + minValue = -128; + maxValue = 127; + } else if (col.columnType === 'smallint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -32768; + maxValue = 32767; + } else if (col.columnType === 'mediumint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -8388608; + maxValue = 8388607; + } else if (col.columnType === 'int') { + // 2^32 / 2 - 1, 4 bytes + minValue = -2147483648; + maxValue = 2147483647; + } else if (col.columnType === 'bigint') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt('-9223372036854775808'); + maxValue = BigInt('9223372036854775807'); + } + } + + if (col.columnType.includes('int')) { + const generator = new generatorsMap.GenerateInt[0]({ + minValue, + maxValue, + }); + return generator; + } + + if (col.columnType.includes('serial')) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + generator.maxValue = maxValue; + return generator; + } + + // NUMBER(real, double, decimal, float) + if ( + col.columnType.startsWith('real') + || col.columnType.startsWith('double') + || col.columnType.startsWith('decimal') + || col.columnType.startsWith('float') + || col.columnType.startsWith('numeric') + ) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + + const generator = new generatorsMap.GenerateNumber[0](); + return generator; + } + + // STRING + if ( + (col.columnType === 'text' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + return generator; + } + + if ( + col.columnType === 'text' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary') + ) { + const generator = new generatorsMap.GenerateString[0](); + return generator; + } + + // BOOLEAN + if (col.columnType === 'boolean') { + const generator = new generatorsMap.GenerateBoolean[0](); + return generator; + } + + // DATE, TIME, TIMESTAMP, DATETIME, YEAR + if (col.columnType.includes('datetime')) { + const generator = new generatorsMap.GenerateDatetime[0](); + return generator; + } + + if (col.columnType.includes('date')) { + const generator = new generatorsMap.GenerateDate[0](); + return generator; + } + + if (col.columnType === 'time') { + const generator = new generatorsMap.GenerateTime[0](); + return generator; + } + + if (col.columnType.includes('timestamp')) { + const generator = new generatorsMap.GenerateTimestamp[0](); + return generator; + } + + if (col.columnType === 'year') { + const generator = new generatorsMap.GenerateYear[0](); + return generator; + } + + // JSON + if (col.columnType === 'json') { + const generator = new generatorsMap.GenerateJson[0](); + return generator; + } + + // ENUM + if (col.enumValues !== undefined) { + const generator = new generatorsMap.GenerateEnum[0]({ + enumValues: col.enumValues, + }); + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + + return generator; +}; diff --git a/drizzle-seed/src/pg-core/index.ts b/drizzle-seed/src/pg-core/index.ts new file mode 100644 index 0000000000..f7a8fee79f --- /dev/null +++ b/drizzle-seed/src/pg-core/index.ts @@ -0,0 +1,181 @@ +import { is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { PgArray, PgDatabase, PgSchema } from 'drizzle-orm/pg-core'; +import { getTableConfig, PgTable } from 'drizzle-orm/pg-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +// Postgres----------------------------------------------------------------------------------------------------------- +export const resetPostgres = async ( + db: PgDatabase, + pgTables: { [key: string]: PgTable }, +) => { + const tablesToTruncate = Object.entries(pgTables).map(([_, table]) => { + const config = getTableConfig(table); + config.schema = config.schema === undefined ? 'public' : config.schema; + + return `"${config.schema}"."${config.name}"`; + }); + + await db.execute(sql.raw(`truncate ${tablesToTruncate.join(',')} cascade;`)); +}; + +export const filterPgSchema = (schema: { + [key: string]: + | PgTable + | PgSchema + | Relations + | any; +}) => { + const pgSchema = Object.fromEntries( + Object.entries(schema).filter((keyValue): keyValue is [string, PgTable | Relations] => + is(keyValue[1], PgTable) || is(keyValue[1], Relations) + ), + ); + + const pgTables = Object.fromEntries( + Object.entries(schema).filter((keyValue): keyValue is [string, PgTable] => is(keyValue[1], PgTable)), + ); + + return { pgSchema, pgTables }; +}; + +export const seedPostgres = async ( + db: PgDatabase, + schema: { + [key: string]: + | PgTable + | PgSchema + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const seedService = new SeedService(); + + const { pgSchema, pgTables } = filterPgSchema(schema); + + const { tables, relations } = getSchemaInfo(pgSchema, pgTables, mapPgColumns); + // const { tables, relations } = getPostgresInfo(pgSchema, pgTables); + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'postgresql', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + pgTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + pgTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +export const mapPgColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + const getAllBaseColumns = ( + baseColumn: PgArray['baseColumn'] & { baseColumn?: PgArray['baseColumn'] }, + ): Column['baseColumn'] => { + const baseColumnResult: Column['baseColumn'] = { + name: baseColumn.name, + columnType: baseColumn.getSQLType(), + typeParams: getTypeParams(baseColumn.getSQLType()), + dataType: baseColumn.dataType.split(' ')[0]!, + size: (baseColumn as PgArray).length, + hasDefault: baseColumn.hasDefault, + enumValues: baseColumn.enumValues, + default: baseColumn.default, + isUnique: baseColumn.isUnique, + notNull: baseColumn.notNull, + primary: baseColumn.primary, + baseColumn: baseColumn.baseColumn === undefined ? undefined : getAllBaseColumns(baseColumn.baseColumn), + }; + + return baseColumnResult; + }; + + const getTypeParams = (sqlType: string) => { + // get type params + const typeParams: Column['typeParams'] = {}; + + // handle dimensions + if (sqlType.includes('[')) { + const match = sqlType.match(/\[\w*]/g); + if (match) { + typeParams['dimensions'] = match.length; + } + } + + if ( + sqlType.startsWith('numeric') + || sqlType.startsWith('decimal') + || sqlType.startsWith('double precision') + || sqlType.startsWith('real') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('varchar') + || sqlType.startsWith('bpchar') + || sqlType.startsWith('char') + || sqlType.startsWith('bit') + || sqlType.startsWith('vector') + || sqlType.startsWith('time') + || sqlType.startsWith('timestamp') + || sqlType.startsWith('interval') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } + + return typeParams; + }; + + const mappedColumns: Column[] = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + size: (column as PgArray).length, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + generatedIdentityType: column.generatedIdentity?.type, + baseColumn: ((column as PgArray).baseColumn === undefined) + ? undefined + : getAllBaseColumns((column as PgArray).baseColumn), + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/pg-core/selectGensForColumn.ts b/drizzle-seed/src/pg-core/selectGensForColumn.ts new file mode 100644 index 0000000000..412d6bf072 --- /dev/null +++ b/drizzle-seed/src/pg-core/selectGensForColumn.ts @@ -0,0 +1,327 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { AbstractGenerator, GenerateInterval } from '../generators/Generators.ts'; +import type { Column, Table } from '../types/tables.ts'; + +// TODO: revise serial part generators +export const selectGeneratorForPostgresColumn = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // ARRAY + if (col.columnType.match(/\[\w*]/g) !== null && col.baseColumn !== undefined) { + const baseColumnGen = selectGeneratorForPostgresColumn( + table, + col.baseColumn!, + ) as AbstractGenerator; + if (baseColumnGen === undefined) { + throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); + } + + // const getBaseColumnDataType = (baseColumn: Column) => { + // if (baseColumn.baseColumn !== undefined) { + // return getBaseColumnDataType(baseColumn.baseColumn); + // } + + // return baseColumn.dataType; + // }; + // const baseColumnDataType = getBaseColumnDataType(col.baseColumn); + + const generator = new generatorsMap.GenerateArray[0]({ baseColumnGen, size: col.size }); + // generator.baseColumnDataType = baseColumnDataType; + + return generator; + } + + // ARRAY for studio + if (col.columnType.match(/\[\w*]/g) !== null) { + // remove dimensions from type + const baseColumnType = col.columnType.replace(/\[\w*]/g, ''); + const baseColumn: Column = { + ...col, + }; + baseColumn.columnType = baseColumnType; + + const baseColumnGen = selectGeneratorForPostgresColumn(table, baseColumn) as AbstractGenerator; + if (baseColumnGen === undefined) { + throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); + } + + let generator = new generatorsMap.GenerateArray[0]({ baseColumnGen }); + + for (let i = 0; i < col.typeParams.dimensions! - 1; i++) { + generator = new generatorsMap.GenerateArray[0]({ baseColumnGen: generator }); + } + + return generator; + } + + // INT ------------------------------------------------------------------------------------------------------------ + if ( + (col.columnType.includes('serial') + || col.columnType === 'integer' + || col.columnType === 'smallint' + || col.columnType.includes('bigint')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + + return generator; + } + + let minValue: number | bigint | undefined; + let maxValue: number | bigint | undefined; + if (col.columnType.includes('serial')) { + minValue = 1; + if (col.columnType === 'smallserial') { + // 2^16 / 2 - 1, 2 bytes + maxValue = 32767; + } else if (col.columnType === 'serial') { + // 2^32 / 2 - 1, 4 bytes + maxValue = 2147483647; + } else if (col.columnType === 'bigserial') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt(1); + maxValue = BigInt('9223372036854775807'); + } + } else if (col.columnType.includes('int')) { + if (col.columnType === 'smallint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -32768; + maxValue = 32767; + } else if (col.columnType === 'integer') { + // 2^32 / 2 - 1, 4 bytes + minValue = -2147483648; + maxValue = 2147483647; + } else if (col.columnType.includes('bigint')) { + if (col.dataType === 'bigint') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt('-9223372036854775808'); + maxValue = BigInt('9223372036854775807'); + } else { + // if (col.dataType === 'number') + // if you’re expecting values above 2^31 but below 2^53 + minValue = -9007199254740991; + maxValue = 9007199254740991; + } + } + } + + if ( + col.columnType.includes('int') + && !col.columnType.includes('interval') + && !col.columnType.includes('point') + ) { + const generator = new generatorsMap.GenerateInt[0]({ + minValue, + maxValue, + }); + + return generator; + } + + if (col.columnType.includes('serial')) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + + generator.maxValue = maxValue; + + return generator; + } + + // NUMBER(real, double, decimal, numeric) + if ( + col.columnType.startsWith('real') + || col.columnType.startsWith('double precision') + || col.columnType.startsWith('decimal') + || col.columnType.startsWith('numeric') + ) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + const generator = new generatorsMap.GenerateNumber[0](); + + return generator; + } + + // STRING + if ( + (col.columnType === 'text' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + + return generator; + } + + if ( + (col.columnType === 'text' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + + return generator; + } + + if ( + col.columnType === 'text' + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('char') + ) { + const generator = new generatorsMap.GenerateString[0](); + + return generator; + } + + // BIT + if (col.columnType.startsWith('bit')) { + const generator = new generatorsMap.GenerateBitString[0](); + + return generator; + } + + // INET + if (col.columnType === 'inet') { + const generator = new generatorsMap.GenerateInet[0](); + + return generator; + } + + // geometry(point) + if (col.columnType.startsWith('geometry')) { + const generator = new generatorsMap.GenerateGeometry[0](); + + return generator; + } + + // vector + if (col.columnType.startsWith('vector')) { + const generator = new generatorsMap.GenerateVector[0](); + + return generator; + } + + // UUID + if (col.columnType === 'uuid') { + const generator = new generatorsMap.GenerateUUID[0](); + + return generator; + } + + // BOOLEAN + if (col.columnType === 'boolean') { + const generator = new generatorsMap.GenerateBoolean[0](); + + return generator; + } + + // DATE, TIME, TIMESTAMP + if (col.columnType.includes('date')) { + const generator = new generatorsMap.GenerateDate[0](); + + return generator; + } + + if (col.columnType === 'time') { + const generator = new generatorsMap.GenerateTime[0](); + + return generator; + } + + if (col.columnType.includes('timestamp')) { + const generator = new generatorsMap.GenerateTimestamp[0](); + + return generator; + } + + // JSON, JSONB + if (col.columnType === 'json' || col.columnType === 'jsonb') { + const generator = new generatorsMap.GenerateJson[0](); + + return generator; + } + + // if (col.columnType === "jsonb") { + // const generator = new GenerateJsonb({}); + // return generator; + // } + + // ENUM + if (col.enumValues !== undefined) { + const generator = new generatorsMap.GenerateEnum[0]({ + enumValues: col.enumValues, + }); + + return generator; + } + + // INTERVAL + if (col.columnType.startsWith('interval')) { + if (col.columnType === 'interval') { + const generator = new generatorsMap.GenerateInterval[0](); + + return generator; + } + + const fields = col.columnType.replace('interval ', '') as GenerateInterval['params']['fields']; + const generator = new generatorsMap.GenerateInterval[0]({ fields }); + + return generator; + } + + // POINT, LINE + if (col.columnType.includes('point')) { + const generator = new generatorsMap.GeneratePoint[0](); + + return generator; + } + + if (col.columnType.includes('line')) { + const generator = new generatorsMap.GenerateLine[0](); + + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + if (generator !== undefined) { + generator.isUnique = col.isUnique; + generator.dataType = col.dataType; + generator.typeParams = col.typeParams; + // generator.stringLength = col.typeParams.length; + } + + return generator; +}; diff --git a/drizzle-seed/src/services/apiVersion.ts b/drizzle-seed/src/services/apiVersion.ts deleted file mode 100644 index 6cda0267e6..0000000000 --- a/drizzle-seed/src/services/apiVersion.ts +++ /dev/null @@ -1 +0,0 @@ -export const latestVersion = 2; diff --git a/drizzle-seed/src/services/utils.ts b/drizzle-seed/src/services/utils.ts deleted file mode 100644 index c972e7bd1e..0000000000 --- a/drizzle-seed/src/services/utils.ts +++ /dev/null @@ -1,105 +0,0 @@ -export const fastCartesianProduct = (sets: (number | string | boolean | object)[][], index: number) => { - const resultList = []; - let currSet: (typeof sets)[number]; - let element: (typeof sets)[number][number]; - - for (let i = sets.length - 1; i >= 0; i--) { - currSet = sets[i]!; - element = currSet[index % currSet.length]!; - resultList.unshift(element); - index = Math.floor(index / currSet.length); - } - - return resultList; -}; - -const sumArray = (weights: number[]) => { - const scale = 1e10; - const scaledSum = weights.reduce((acc, currVal) => acc + Math.round(currVal * scale), 0); - return scaledSum / scale; -}; - -/** - * @param weights positive number in range [0, 1], that represents probabilities to choose index of array. Example: weights = [0.2, 0.8] - * @param [accuracy=100] approximate number of elements in returning array - * @returns Example: with weights = [0.2, 0.8] and accuracy = 10 returning array of indices gonna equal this: [0, 0, 1, 1, 1, 1, 1, 1, 1, 1] - */ -export const getWeightedIndices = (weights: number[], accuracy = 100) => { - const weightsSum = sumArray(weights); - if (weightsSum !== 1) { - throw new Error( - `The weights for the Weighted Random feature must add up to exactly 1. Please review your weights to ensure they total 1 before proceeding`, - ); - } - - // const accuracy = 100; - const weightedIndices: number[] = []; - for (const [index, weight] of weights.entries()) { - const ticketsNumb = Math.floor(weight * accuracy); - weightedIndices.push(...Array.from({ length: ticketsNumb }).fill(index)); - } - - return weightedIndices; -}; - -export const generateHashFromString = (s: string) => { - let hash = 0; - // p and m are prime numbers - const p = 53; - const m = 28871271685163; - - for (let i = 0; i < s.length; i++) { - hash += ((s.codePointAt(i) || 0) * Math.pow(p, i)) % m; - } - - return hash; -}; - -/** - * @param param0.template example: "#####" or "#####-####" - * @param param0.values example: ["3", "2", "h"] - * @param param0.defaultValue example: "0" - * @returns - */ -export const fillTemplate = ({ template, placeholdersCount, values, defaultValue = ' ' }: { - template: string; - placeholdersCount?: number; - values: string[]; - defaultValue?: string; -}) => { - if (placeholdersCount === undefined) { - const iterArray = [...template.matchAll(/#/g)]; - placeholdersCount = iterArray.length; - } - - const diff = placeholdersCount - values.length; - if (diff > 0) { - values.unshift(...Array.from({ length: diff }).fill(defaultValue)); - } - - let resultStr = '', valueIdx = 0; - for (const si of template) { - if (si === '#') { - resultStr += values[valueIdx]; - valueIdx += 1; - continue; - } - resultStr += si; - } - - return resultStr; -}; - -// is variable is object-like. -// Example: -// isObject({f: 4}) === true; -// isObject([1,2,3]) === false; -// isObject(new Set()) === false; -export const isObject = (value: any) => { - if (value !== null && value !== undefined && value.constructor === Object) return true; - return false; -}; - -export const equalSets = (set1: Set, set2: Set) => { - return set1.size === set2.size && [...set1].every((si) => set2.has(si)); -}; diff --git a/drizzle-seed/src/singlestore-core/index.ts b/drizzle-seed/src/singlestore-core/index.ts new file mode 100644 index 0000000000..bc7d433ef0 --- /dev/null +++ b/drizzle-seed/src/singlestore-core/index.ts @@ -0,0 +1,157 @@ +import { getTableName, is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { SingleStoreDatabase, SingleStoreSchema } from 'drizzle-orm/singlestore-core'; +import { SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +// SingleStore----------------------------------------------------------------------------------------------------- +export const resetSingleStore = async ( + db: SingleStoreDatabase, + schema: { [key: string]: SingleStoreTable }, +) => { + const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { + const dbTableName = getTableName(table); + return dbTableName; + }); + + await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 0;')); + + for (const tableName of tablesToTruncate) { + const sqlQuery = `truncate \`${tableName}\`;`; + await db.execute(sql.raw(sqlQuery)); + } + + await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 1;')); +}; + +export const filterSingleStoreTables = (schema: { + [key: string]: + | SingleStoreTable + | SingleStoreSchema + | Relations + | any; +}) => { + const singleStoreSchema = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, SingleStoreTable | Relations] => + is(keyValue[1], SingleStoreTable) || is(keyValue[1], Relations), + ), + ); + + const singleStoreTables = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, SingleStoreTable] => is(keyValue[1], SingleStoreTable), + ), + ); + + return { singleStoreSchema, singleStoreTables }; +}; + +export const seedSingleStore = async ( + db: SingleStoreDatabase, + schema: { + [key: string]: + | SingleStoreTable + | SingleStoreSchema + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const { singleStoreSchema, singleStoreTables } = filterSingleStoreTables(schema); + const { tables, relations } = getSchemaInfo(singleStoreSchema, singleStoreTables, mapSingleStoreColumns); + + const seedService = new SeedService(); + + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'singlestore', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + singleStoreTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + singleStoreTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +export const mapSingleStoreColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + const getTypeParams = (sqlType: string) => { + // get type params and set only type + const typeParams: Column['typeParams'] = {}; + + if ( + sqlType.startsWith('decimal') + || sqlType.startsWith('real') + || sqlType.startsWith('double') + || sqlType.startsWith('float') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('char') + || sqlType.startsWith('varchar') + || sqlType.startsWith('text') + || sqlType.startsWith('binary') + || sqlType.startsWith('varbinary') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } else if (sqlType.startsWith('vector')) { + const match = sqlType.match(/\((\d+),? ?((F|I)\d{1,2})?\)/); + if (match) { + typeParams['length'] = Number(match[1]); + typeParams['vectorValueType'] = match[2] as typeof typeParams['vectorValueType']; + } + } + + return typeParams; + }; + + const mappedColumns = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/singlestore-core/selectGensForColumn.ts b/drizzle-seed/src/singlestore-core/selectGensForColumn.ts new file mode 100644 index 0000000000..0455dc698f --- /dev/null +++ b/drizzle-seed/src/singlestore-core/selectGensForColumn.ts @@ -0,0 +1,247 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { Column, Table } from '../types/tables.ts'; + +export const selectGeneratorForSingleStoreColumn = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // INT ------------------------------------------------------------------------------------------------------------ + if ( + (col.columnType.includes('serial') || col.columnType.includes('int')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + return generator; + } + + let minValue: number | bigint | undefined; + let maxValue: number | bigint | undefined; + if (col.columnType === 'serial') { + // 2^64 % 2 - 1, 8 bytes + minValue = BigInt(0); + maxValue = BigInt('9223372036854775807'); + } else if (col.columnType.includes('int')) { + if (col.columnType === 'tinyint') { + // 2^8 / 2 - 1, 1 bytes + minValue = -128; + maxValue = 127; + } else if (col.columnType === 'smallint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -32768; + maxValue = 32767; + } else if (col.columnType === 'mediumint') { + // 2^16 / 2 - 1, 2 bytes + minValue = -8388608; + maxValue = 8388607; + } else if (col.columnType === 'int') { + // 2^32 / 2 - 1, 4 bytes + minValue = -2147483648; + maxValue = 2147483647; + } else if (col.columnType === 'bigint') { + // 2^64 / 2 - 1, 8 bytes + minValue = BigInt('-9223372036854775808'); + maxValue = BigInt('9223372036854775807'); + } + } + + if (col.columnType.includes('int')) { + const generator = new generatorsMap.GenerateInt[0]({ + minValue, + maxValue, + }); + return generator; + } + + if (col.columnType.includes('serial')) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + generator.maxValue = maxValue; + return generator; + } + + // NUMBER(real, double, decimal, float) + if ( + col.columnType.startsWith('real') + || col.columnType.startsWith('double') + || col.columnType.startsWith('decimal') + || col.columnType.startsWith('float') + || col.columnType.startsWith('numeric') + ) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + + const generator = new generatorsMap.GenerateNumber[0](); + return generator; + } + + // STRING + if ( + (col.columnType === 'tinytext' + || col.columnType === 'mediumtext' + || col.columnType === 'text' + || col.columnType === 'longtext' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + return generator; + } + + if ( + (col.columnType === 'tinytext' + || col.columnType === 'mediumtext' + || col.columnType === 'text' + || col.columnType === 'longtext' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + return generator; + } + + if ( + (col.columnType === 'tinytext' + || col.columnType === 'mediumtext' + || col.columnType === 'text' + || col.columnType === 'longtext' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + return generator; + } + + if ( + col.columnType === 'tinytext' + || col.columnType === 'mediumtext' + || col.columnType === 'text' + || col.columnType === 'longtext' + || col.columnType === 'blob' + || col.columnType.startsWith('char') + || col.columnType.startsWith('varchar') + || col.columnType.startsWith('binary') + || col.columnType.startsWith('varbinary') + ) { + const generator = new generatorsMap.GenerateString[0](); + return generator; + } + + // BOOLEAN + if (col.columnType === 'boolean') { + const generator = new generatorsMap.GenerateBoolean[0](); + return generator; + } + + // DATE, TIME, TIMESTAMP, DATETIME, YEAR + if (col.columnType.includes('datetime')) { + const generator = new generatorsMap.GenerateDatetime[0](); + return generator; + } + + if (col.columnType.includes('date')) { + const generator = new generatorsMap.GenerateDate[0](); + return generator; + } + + if (col.columnType === 'time') { + const generator = new generatorsMap.GenerateTime[0](); + return generator; + } + + if (col.columnType.includes('timestamp')) { + const generator = new generatorsMap.GenerateTimestamp[0](); + return generator; + } + + if (col.columnType === 'year') { + const generator = new generatorsMap.GenerateYear[0](); + return generator; + } + + // JSON + if (col.columnType === 'json') { + const generator = new generatorsMap.GenerateJson[0](); + return generator; + } + + // ENUM + if (col.enumValues !== undefined) { + const generator = new generatorsMap.GenerateEnum[0]({ + enumValues: col.enumValues, + }); + return generator; + } + + // vector + if (col.columnType.startsWith('vector')) { + let minValue: number | undefined, + maxValue: number | undefined, + decimalPlaces: number | undefined; + if (col.typeParams.vectorValueType === 'I8') { + minValue = -128; + maxValue = 127; + decimalPlaces = 0; + } else if (col.typeParams.vectorValueType === 'I16') { + minValue = -32768; + maxValue = 32767; + decimalPlaces = 0; + } else if (col.typeParams.vectorValueType === 'I32') { + minValue = -2147483648; + maxValue = 2147483647; + decimalPlaces = 0; + } else if (col.typeParams.vectorValueType === 'I64') { + minValue = Number.MIN_SAFE_INTEGER; + maxValue = Number.MAX_SAFE_INTEGER; + // minValue = -BigInt('9223372036854775808'); + // maxValue = BigInt('9223372036854775807'); + decimalPlaces = 0; + } else if (col.typeParams.vectorValueType === 'F32') { + minValue = -2147483648; + maxValue = 2147483647; + decimalPlaces = 6; + } else if (col.typeParams.vectorValueType === 'F64') { + minValue = -524288; + maxValue = 524287; + decimalPlaces = 10; + } + + const generator = new generatorsMap.GenerateVector[0]({ minValue, maxValue, decimalPlaces }); + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + + return generator; +}; diff --git a/drizzle-seed/src/sqlite-core/index.ts b/drizzle-seed/src/sqlite-core/index.ts new file mode 100644 index 0000000000..ce79ab3492 --- /dev/null +++ b/drizzle-seed/src/sqlite-core/index.ts @@ -0,0 +1,144 @@ +import { getTableName, is, sql } from 'drizzle-orm'; +import { Relations } from 'drizzle-orm/_relations'; +import type { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; +import { SQLiteTable } from 'drizzle-orm/sqlite-core'; +import { getSchemaInfo } from '../common.ts'; +import { SeedService } from '../SeedService.ts'; +import type { RefinementsType } from '../types/seedService.ts'; +import type { Column, TableConfigT } from '../types/tables.ts'; + +// Sqlite------------------------------------------------------------------------------------------------------------------------ +export const resetSqlite = async ( + db: BaseSQLiteDatabase, + schema: { [key: string]: SQLiteTable }, +) => { + const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { + const dbTableName = getTableName(table); + return dbTableName; + }); + + await db.run(sql.raw('PRAGMA foreign_keys = OFF')); + + for (const tableName of tablesToTruncate) { + const sqlQuery = `delete from \`${tableName}\`;`; + await db.run(sql.raw(sqlQuery)); + } + + await db.run(sql.raw('PRAGMA foreign_keys = ON')); +}; + +export const filterSqliteTables = (schema: { + [key: string]: + | SQLiteTable + | Relations + | any; +}) => { + const sqliteSchema = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, SQLiteTable | Relations] => + is(keyValue[1], SQLiteTable) || is(keyValue[1], Relations), + ), + ); + + const sqliteTables = Object.fromEntries( + Object.entries(schema).filter( + (keyValue): keyValue is [string, SQLiteTable] => is(keyValue[1], SQLiteTable), + ), + ); + + return { sqliteSchema, sqliteTables }; +}; + +export const seedSqlite = async ( + db: BaseSQLiteDatabase, + schema: { + [key: string]: + | SQLiteTable + | Relations + | any; + }, + options: { count?: number; seed?: number; version?: number } = {}, + refinements?: RefinementsType, +) => { + const { sqliteSchema, sqliteTables } = filterSqliteTables(schema); + const { tables, relations } = getSchemaInfo(sqliteSchema, sqliteTables, mapSqliteColumns); + + const seedService = new SeedService(); + + const generatedTablesGenerators = seedService.generatePossibleGenerators( + 'sqlite', + tables, + relations, + refinements, + options, + ); + + const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); + + const tablesValues = await seedService.generateTablesValues( + relations, + generatedTablesGenerators, + db, + sqliteTables, + { ...options, preserveCyclicTablesData }, + ); + + const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( + generatedTablesGenerators, + ); + const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; + + await seedService.generateTablesValues( + relations, + filteredTablesGenerators, + db, + sqliteTables, + { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, + ); +}; + +export const mapSqliteColumns = ( + tableConfig: TableConfigT, + dbToTsColumnNamesMap: { [key: string]: string }, +): Column[] => { + const getTypeParams = (sqlType: string) => { + // get type params and set only type + const typeParams: Column['typeParams'] = {}; + + if ( + sqlType.startsWith('decimal') + ) { + const match = sqlType.match(/\((\d+), *(\d+)\)/); + if (match) { + typeParams['precision'] = Number(match[1]); + typeParams['scale'] = Number(match[2]); + } + } else if ( + sqlType.startsWith('char') + || sqlType.startsWith('varchar') + || sqlType.startsWith('text') + ) { + const match = sqlType.match(/\((\d+)\)/); + if (match) { + typeParams['length'] = Number(match[1]); + } + } + + return typeParams; + }; + + const mappedColumns = tableConfig.columns.map((column) => ({ + name: dbToTsColumnNamesMap[column.name] as string, + columnType: column.getSQLType(), + typeParams: getTypeParams(column.getSQLType()), + dataType: column.dataType.split(' ')[0]!, + hasDefault: column.hasDefault, + default: column.default, + enumValues: column.enumValues, + isUnique: column.isUnique, + notNull: column.notNull, + primary: column.primary, + })); + + return mappedColumns; +}; diff --git a/drizzle-seed/src/sqlite-core/selectGensForColumn.ts b/drizzle-seed/src/sqlite-core/selectGensForColumn.ts new file mode 100644 index 0000000000..f68cbdf5a8 --- /dev/null +++ b/drizzle-seed/src/sqlite-core/selectGensForColumn.ts @@ -0,0 +1,117 @@ +import { generatorsMap } from '../generators/GeneratorFuncs.ts'; +import type { Column, Table } from '../types/tables.ts'; + +export const selectGeneratorForSqlite = ( + table: Table, + col: Column, +) => { + const pickGenerator = (table: Table, col: Column) => { + // int section --------------------------------------------------------------------------------------- + if ( + (col.columnType === 'integer' || col.columnType === 'numeric') + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateIntPrimaryKey[0](); + return generator; + } + + if (col.columnType === 'integer' && col.dataType === 'boolean') { + const generator = new generatorsMap.GenerateBoolean[0](); + return generator; + } + + if ((col.columnType === 'integer' && col.dataType === 'object')) { + const generator = new generatorsMap.GenerateTimestamp[0](); + return generator; + } + + if ( + col.columnType === 'integer' + || (col.dataType === 'bigint' && col.columnType === 'blob') + ) { + const generator = new generatorsMap.GenerateInt[0](); + return generator; + } + + // number section ------------------------------------------------------------------------------------ + if (col.columnType.startsWith('real') || col.columnType.startsWith('numeric')) { + if (col.typeParams.precision !== undefined) { + const precision = col.typeParams.precision; + const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; + + const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); + const generator = new generatorsMap.GenerateNumber[0]({ + minValue: -maxAbsoluteValue, + maxValue: maxAbsoluteValue, + precision: Math.pow(10, scale), + }); + return generator; + } + + const generator = new generatorsMap.GenerateNumber[0](); + return generator; + } + + // string section ------------------------------------------------------------------------------------ + if ( + (col.columnType.startsWith('text') + || col.columnType.startsWith('numeric') + || col.columnType.startsWith('blob')) + && table.primaryKeys.includes(col.name) + ) { + const generator = new generatorsMap.GenerateUniqueString[0](); + return generator; + } + + if ( + (col.columnType.startsWith('text') + || col.columnType.startsWith('numeric') + || col.columnType.startsWith('blob')) + && col.name.toLowerCase().includes('name') + ) { + const generator = new generatorsMap.GenerateFirstName[0](); + return generator; + } + + if ( + (col.columnType.startsWith('text') + || col.columnType.startsWith('numeric') + || col.columnType.startsWith('blob')) + && col.name.toLowerCase().includes('email') + ) { + const generator = new generatorsMap.GenerateEmail[0](); + return generator; + } + + if ( + col.columnType.startsWith('text') + || col.columnType.startsWith('numeric') + || col.columnType.startsWith('blob') + || col.columnType.startsWith('blobbuffer') + ) { + const generator = new generatorsMap.GenerateString[0](); + return generator; + } + + if ( + (col.columnType.startsWith('text') && col.dataType === 'json') + || (col.columnType.startsWith('blob') && col.dataType === 'json') + ) { + const generator = new generatorsMap.GenerateJson[0](); + return generator; + } + + if (col.hasDefault && col.default !== undefined) { + const generator = new generatorsMap.GenerateDefault[0]({ + defaultValue: col.default, + }); + return generator; + } + + return; + }; + + const generator = pickGenerator(table, col); + + return generator; +}; diff --git a/drizzle-seed/src/types/seedService.ts b/drizzle-seed/src/types/seedService.ts index 1ae06f44c6..808840c22a 100644 --- a/drizzle-seed/src/types/seedService.ts +++ b/drizzle-seed/src/types/seedService.ts @@ -1,6 +1,24 @@ -import type { AbstractGenerator } from '../services/Generators.ts'; +import type { CockroachDatabase, CockroachTable } from 'drizzle-orm/cockroach-core'; +import type { MsSqlDatabase, MsSqlTable } from 'drizzle-orm/mssql-core'; +import type { MySqlDatabase, MySqlTable } from 'drizzle-orm/mysql-core'; +import type { PgDatabase, PgTable } from 'drizzle-orm/pg-core'; +import type { SingleStoreDatabase, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import type { BaseSQLiteDatabase, SQLiteTable } from 'drizzle-orm/sqlite-core'; +import type { AbstractGenerator } from '../generators/Generators.ts'; import type { Prettify } from './tables.ts'; +export type GeneratedValueType = number | bigint | string | Buffer | boolean | undefined; + +export type DbType = + | PgDatabase + | MySqlDatabase + | BaseSQLiteDatabase + | MsSqlDatabase + | CockroachDatabase + | SingleStoreDatabase; + +export type TableType = PgTable | MySqlTable | SQLiteTable | MsSqlTable | CockroachTable | SingleStoreTable; + export type TableGeneratorsType = { [columnName: string]: Prettify< { @@ -18,6 +36,7 @@ export type GeneratePossibleGeneratorsColumnType = { notNull: boolean; primary: boolean; generatedIdentityType?: 'always' | 'byDefault' | undefined; + identity?: boolean; wasRefined: boolean; wasDefinedBefore: boolean; isCyclic: boolean; @@ -43,7 +62,7 @@ export type GeneratePossibleGeneratorsTableType = Prettify<{ export type RefinementsType = Prettify<{ [tableName: string]: { count?: number; - columns: { [columnName: string]: AbstractGenerator<{}> }; + columns: { [columnName: string]: AbstractGenerator<{}> | false }; with?: { [tableName: string]: number | { weight: number; count: number | number[] }[] }; }; }>; diff --git a/drizzle-seed/src/types/tables.ts b/drizzle-seed/src/types/tables.ts index 2fadd23f00..0603263ced 100644 --- a/drizzle-seed/src/types/tables.ts +++ b/drizzle-seed/src/types/tables.ts @@ -1,5 +1,17 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ +import type { AnyColumn } from 'drizzle-orm'; +import type { + CockroachTable, + ForeignKey as CockroachFK, + UniqueConstraint as CockroachUniCon, +} from 'drizzle-orm/cockroach-core'; +import type { ForeignKey as MsSqlFK, MsSqlTable, UniqueConstraint as MsSqlUniCon } from 'drizzle-orm/mssql-core'; +import type { ForeignKey as MySqlFK, MySqlTable, UniqueConstraint as MySqlUniCon } from 'drizzle-orm/mysql-core'; +import type { ForeignKey as PgFK, PgTable, UniqueConstraint as PgUniCon } from 'drizzle-orm/pg-core'; +import type { SingleStoreTable, UniqueConstraint as SingleStoreUniCon } from 'drizzle-orm/singlestore-core'; +import type { ForeignKey as SQLiteFK, SQLiteTable, UniqueConstraint as SQLiteUniCon } from 'drizzle-orm/sqlite-core'; + export type Column = { name: string; dataType: string; @@ -9,6 +21,7 @@ export type Column = { scale?: number; length?: number; dimensions?: number; + vectorValueType?: 'I8' | 'I16' | 'I32' | 'I64' | 'F32' | 'F64'; }; size?: number; default?: any; @@ -18,12 +31,14 @@ export type Column = { notNull: boolean; primary: boolean; generatedIdentityType?: 'always' | 'byDefault' | undefined; + identity?: boolean; baseColumn?: Omit; }; export type Table = { name: string; columns: Column[]; + uniqueConstraints: string[][]; primaryKeys: string[]; }; @@ -45,3 +60,21 @@ export type Prettify = [K in keyof T]: T[K]; } & {}; + +export type DrizzleTable = PgTable | MySqlTable | SQLiteTable | CockroachTable | MsSqlTable | SingleStoreTable; +export type DrizzleForeignKey = PgFK | MySqlFK | SQLiteFK | CockroachFK | MsSqlFK; +export type DrizzleUniqueConstraint = + | PgUniCon + | MySqlUniCon + | SQLiteUniCon + | CockroachUniCon + | MsSqlUniCon + | SingleStoreUniCon; + +export type TableConfigT = { + name: string; + schema?: string; + columns: AnyColumn[]; + uniqueConstraints: DrizzleUniqueConstraint[]; + foreignKeys?: DrizzleForeignKey[]; +}; diff --git a/drizzle-seed/src/utils.ts b/drizzle-seed/src/utils.ts new file mode 100644 index 0000000000..6be4381f68 --- /dev/null +++ b/drizzle-seed/src/utils.ts @@ -0,0 +1,39 @@ +import type { RelationWithReferences } from './types/tables'; + +export const isRelationCyclic = ( + startRel: RelationWithReferences, +) => { + // self relation + if (startRel.table === startRel.refTable) return false; + + // DFS + const targetTable = startRel.table; + const queue = [startRel]; + let path: string[] = []; + while (queue.length !== 0) { + const currRel = queue.shift(); + + if (path.includes(currRel!.table)) { + const idx = path.indexOf(currRel!.table); + path = path.slice(0, idx); + } + path.push(currRel!.table); + + for (const rel of currRel!.refTableRels) { + // self relation + if (rel.table === rel.refTable) continue; + + if (rel.refTable === targetTable) return true; + + // found cycle, but not the one we are looking for + if (path.includes(rel.refTable)) continue; + queue.unshift(rel); + } + } + + return false; +}; + +export const equalSets = (set1: Set, set2: Set) => { + return set1.size === set2.size && [...set1].every((si) => set2.has(si)); +}; diff --git a/drizzle-seed/tests/benchmarks/generatorsBenchmark.ts b/drizzle-seed/tests/benchmarks/generatorsBenchmark.ts index 23fca0c6c0..7147287993 100644 --- a/drizzle-seed/tests/benchmarks/generatorsBenchmark.ts +++ b/drizzle-seed/tests/benchmarks/generatorsBenchmark.ts @@ -40,7 +40,7 @@ import { GenerateValuesFromArray, GenerateYear, WeightedRandomGenerator, -} from '../../src/services/Generators.ts'; +} from '../../src/generators/Generators.ts'; const benchmark = ({ generatorName, generator, count = 100000, seed = 1 }: { generatorName: string; diff --git a/drizzle-seed/tests/cockroach/allDataTypesTest/cockroachSchema.ts b/drizzle-seed/tests/cockroach/allDataTypesTest/cockroachSchema.ts new file mode 100644 index 0000000000..921c7ae162 --- /dev/null +++ b/drizzle-seed/tests/cockroach/allDataTypesTest/cockroachSchema.ts @@ -0,0 +1,97 @@ +import { + bit, + bool, + char, + cockroachEnum, + cockroachTable, + date, + decimal, + float, + geometry, + inet, + int2, + int4, + int8, + interval, + jsonb, + numeric, + real, + string, + time, + timestamp, + uuid, + varchar, + vector, +} from 'drizzle-orm/cockroach-core'; + +export const moodEnum = cockroachEnum('mood_enum', ['sad', 'ok', 'happy']); + +export const allDataTypes = cockroachTable('all_data_types', { + int4: int4('int4'), + int2: int2('int2'), + int8: int8('int8', { mode: 'bigint' }), + int8Number: int8('int8_number', { mode: 'number' }), + numeric: numeric('numeric'), + decimal: decimal('decimal'), + real: real('real'), + doublePrecision: float('double_precision'), + boolean: bool('boolean'), + char: char('char', { length: 256 }), + varchar: varchar('varchar', { length: 256 }), + string: string('string'), + bit: bit('bit', { length: 11 }), + jsonb: jsonb('jsonb'), + time: time('time'), + timestampDate: timestamp('timestamp_date', { mode: 'date' }), + timestampString: timestamp('timestamp_string', { mode: 'string' }), + dateString: date('date_string', { mode: 'string' }), + date: date('date', { mode: 'date' }), + interval: interval('interval'), + moodEnum: moodEnum('mood_enum'), + uuid: uuid('uuid'), + inet: inet('inet'), + geometry: geometry('geometry', { type: 'point', mode: 'tuple', srid: 0 }), + vector: vector('vector', { dimensions: 3 }), +}); + +export const allArrayDataTypes = cockroachTable('all_array_data_types', { + int4Array: int4('int4_array').array(), + int2Array: int2('int2_array').array(), + int8Array: int8('int8_array', { mode: 'bigint' }).array(), + int8NumberArray: int8('int8_number_array', { mode: 'number' }).array(), + numericArray: numeric('numeric_array').array(), + decimalArray: decimal('decimal_array').array(), + realArray: real('real_array').array(), + doublePrecisionArray: float('double_precision_array').array(), + booleanArray: bool('boolean_array').array(), + charArray: char('char_array', { length: 256 }).array(), + varcharArray: varchar('varchar_array', { length: 256 }).array(), + stringArray: string('string_array').array(), + bitArray: bit('bit_array', { length: 11 }).array(), + timeArray: time('time_array').array(), + timestampDateArray: timestamp('timestamp_date_array', { mode: 'date' }).array(), + timestampStringArray: timestamp('timestamp_string_array', { mode: 'string' }).array(), + dateStringArray: date('date_string_array', { mode: 'string' }).array(), + dateArray: date('date_array', { mode: 'date' }).array(), + intervalArray: interval('interval_array').array(), + moodEnumArray: moodEnum('mood_enum_array').array(), + uuidArray: uuid('uuid_array').array(), + inetArray: inet('inet_array').array(), + geometryArray: geometry('geometry_array', { type: 'point', mode: 'tuple', srid: 0 }).array(1), +}); + +export const intervals = cockroachTable('intervals', { + intervalYear: interval({ fields: 'year' }), + intervalYearToMonth: interval({ fields: 'year to month' }), + intervalMonth: interval({ fields: 'month' }), + intervalDay: interval({ fields: 'day' }), + intervalDayToHour: interval({ fields: 'day to hour' }), + intervalDayToMinute: interval({ fields: 'day to minute' }), + intervalDayToSecond: interval({ fields: 'day to second' }), + intervalHour: interval({ fields: 'hour' }), + intervalHourToMinute: interval({ fields: 'hour to minute' }), + intervalHourToSecond: interval({ fields: 'hour to second' }), + intervalMinute: interval({ fields: 'minute' }), + intervalMinuteToSecond: interval({ fields: 'minute to second' }), + intervalSecond: interval({ fields: 'second' }), +}); diff --git a/drizzle-seed/tests/cockroach/allDataTypesTest/cockroach_all_data_types.test.ts b/drizzle-seed/tests/cockroach/allDataTypesTest/cockroach_all_data_types.test.ts new file mode 100644 index 0000000000..a69c197676 --- /dev/null +++ b/drizzle-seed/tests/cockroach/allDataTypesTest/cockroach_all_data_types.test.ts @@ -0,0 +1,144 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { cockroachTest as test } from '../instrumentation.ts'; +import * as schema from './cockroachSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TYPE "mood_enum" AS ENUM('sad', 'ok', 'happy'); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "all_data_types" ( + "int2" int2, + "int4" int4, + "int8" int8, + "int8_number" int8, + "numeric" numeric, + "decimal" numeric, + "real" real, + "double_precision" double precision, + "boolean" boolean, + "char" char(256), + "varchar" varchar(256), + "string" string, + "bit" bit(11), + "jsonb" jsonb, + "time" time, + "timestamp_date" timestamp, + "timestamp_string" timestamp, + "date_string" date, + "date" date, + "interval" interval, + "mood_enum" "mood_enum", + "uuid" uuid, + "inet" inet, + "geometry" geometry(point, 0), + "vector" vector(3) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "all_array_data_types" ( + "int2_array" int2[], + "int4_array" int4[], + "int8_array" int8[], + "int8_number_array" int8[], + "numeric_array" numeric[], + "decimal_array" numeric[], + "real_array" real[], + "double_precision_array" double precision[], + "boolean_array" boolean[], + "char_array" char(256)[], + "varchar_array" varchar(256)[], + "string_array" string[], + "bit_array" bit(11)[], + "time_array" time[], + "timestamp_date_array" timestamp[], + "timestamp_string_array" timestamp[], + "date_string_array" date[], + "date_array" date[], + "interval_array" interval[], + "mood_enum_array" "mood_enum"[], + "uuid_array" uuid[], + "inet_array" inet[], + "geometry_array" geometry(point, 0)[] + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "intervals" ( + "intervalYear" interval year, + "intervalYearToMonth" interval year to month, + "intervalMonth" interval month, + "intervalDay" interval day, + "intervalDayToHour" interval day to hour, + "intervalDayToMinute" interval day to minute, + "intervalDayToSecond" interval day to second, + "intervalHour" interval hour, + "intervalHourToMinute" interval hour to minute, + "intervalHourToSecond" interval hour to second, + "intervalMinute" interval minute, + "intervalMinuteToSecond" interval minute to second, + "intervalSecond" interval second + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('all data types test', async ({ db }) => { + await seed(db, { allDataTypes: schema.allDataTypes }, { count: 10000 }); + + const allDataTypes = await db.select().from(schema.allDataTypes); + // every value in each rows does not equal undefined. + const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + + expect(predicate).toBe(true); +}); + +test('all array data types test', async ({ db }) => { + await seed(db, { allArrayDataTypes: schema.allArrayDataTypes }, { count: 1 }); + + const allArrayDataTypes = await db.select().from(schema.allArrayDataTypes); + // every value in each rows does not equal undefined. + const predicate = allArrayDataTypes.every((row) => + Object.values(row).every((val) => val !== undefined && val !== null && (val.length === 10 || val.length === 1)) + ); + + expect(predicate).toBe(true); +}); + +test('intervals test', async ({ db }) => { + await seed(db, { intervals: schema.intervals }, { count: 1000 }); + + const intervals = await db.select().from(schema.intervals); + // every value in each rows does not equal undefined. + const predicate = intervals.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/cockroach/cockroach.test.ts b/drizzle-seed/tests/cockroach/cockroach.test.ts new file mode 100644 index 0000000000..70eb497f15 --- /dev/null +++ b/drizzle-seed/tests/cockroach/cockroach.test.ts @@ -0,0 +1,411 @@ +import { sql } from 'drizzle-orm'; +import { relations } from 'drizzle-orm/_relations'; +import { expect, vi } from 'vitest'; +import { reset, seed } from '../../src/index.ts'; +import * as schema from './cockroachSchema.ts'; +import { cockroachTest as test } from './instrumentation.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "customer" ( + "id" varchar(256) PRIMARY KEY NOT NULL, + "company_name" text NOT NULL, + "contact_name" text NOT NULL, + "contact_title" text NOT NULL, + "address" text NOT NULL, + "city" text NOT NULL, + "postal_code" text, + "region" text, + "country" text NOT NULL, + "phone" text NOT NULL, + "fax" text + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "order_detail" ( + "unit_price" numeric NOT NULL, + "quantity" integer NOT NULL, + "discount" numeric NOT NULL, + "order_id" integer NOT NULL, + "product_id" integer NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "employee" ( + "id" integer PRIMARY KEY NOT NULL, + "last_name" text NOT NULL, + "first_name" text, + "title" text NOT NULL, + "title_of_courtesy" text NOT NULL, + "birth_date" timestamp NOT NULL, + "hire_date" timestamp NOT NULL, + "address" text NOT NULL, + "city" text NOT NULL, + "postal_code" text NOT NULL, + "country" text NOT NULL, + "home_phone" text NOT NULL, + "extension" integer NOT NULL, + "notes" text NOT NULL, + "reports_to" integer, + "photo_path" text + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "order" ( + "id" integer PRIMARY KEY NOT NULL, + "order_date" timestamp NOT NULL, + "required_date" timestamp NOT NULL, + "shipped_date" timestamp, + "ship_via" integer NOT NULL, + "freight" numeric NOT NULL, + "ship_name" text NOT NULL, + "ship_city" text NOT NULL, + "ship_region" text, + "ship_postal_code" text, + "ship_country" text NOT NULL, + "customer_id" text NOT NULL, + "employee_id" integer NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "product" ( + "id" integer PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "quantity_per_unit" text NOT NULL, + "unit_price" numeric NOT NULL, + "units_in_stock" integer NOT NULL, + "units_on_order" integer NOT NULL, + "reorder_level" integer NOT NULL, + "discontinued" integer NOT NULL, + "supplier_id" integer NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "supplier" ( + "id" integer PRIMARY KEY NOT NULL, + "company_name" text NOT NULL, + "contact_name" text NOT NULL, + "contact_title" text NOT NULL, + "address" text NOT NULL, + "city" text NOT NULL, + "region" text, + "postal_code" text NOT NULL, + "country" text NOT NULL, + "phone" text NOT NULL + ); + `, + ); + + await db.execute( + sql` + ALTER TABLE "order_detail" ADD CONSTRAINT "order_detail_order_id_order_id_fk" FOREIGN KEY ("order_id") REFERENCES "order"("id") ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE "order_detail" ADD CONSTRAINT "order_detail_product_id_product_id_fk" FOREIGN KEY ("product_id") REFERENCES "product"("id") ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE "employee" ADD CONSTRAINT "employee_reports_to_employee_id_fk" FOREIGN KEY ("reports_to") REFERENCES "employee"("id") ON DELETE no action ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE "order" ADD CONSTRAINT "order_customer_id_customer_id_fk" FOREIGN KEY ("customer_id") REFERENCES "customer"("id") ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE "order" ADD CONSTRAINT "order_employee_id_employee_id_fk" FOREIGN KEY ("employee_id") REFERENCES "employee"("id") ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE "product" ADD CONSTRAINT "product_supplier_id_supplier_id_fk" FOREIGN KEY ("supplier_id") REFERENCES "supplier"("id") ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "identity_columns_table" ( + "id" integer generated always as identity, + "id1" integer generated by default as identity, + "name" text + ); + `, + ); + + await db.execute( + sql` + create table "users" + ( + id serial + primary key, + name text, + "invitedBy" integer + constraint "users_invitedBy_user_id_fk" + references "users" + ); + `, + ); + + await db.execute( + sql` + create table "posts" + ( + id serial + primary key, + name text, + content text, + "userId" integer + constraint "users_userId_user_id_fk" + references "users" + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + await seed(db, schema); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(10); + expect(details.length).toBe(10); + expect(employees.length).toBe(10); + expect(orders.length).toBe(10); + expect(products.length).toBe(10); + expect(suppliers.length).toBe(10); +}); + +test('seed with options.count:11 test', async ({ db }) => { + await seed(db, schema, { count: 11 }); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(11); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('redefine(refine) customers count', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('redefine(refine) all tables count', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + details: { + count: 13, + }, + employees: { + count: 14, + }, + orders: { + count: 15, + }, + products: { + count: 16, + }, + suppliers: { + count: 17, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(13); + expect(employees.length).toBe(14); + expect(orders.length).toBe(15); + expect(products.length).toBe(16); + expect(suppliers.length).toBe(17); +}); + +test("redefine(refine) orders count using 'with' in customers", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 13, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test("sequential using of 'with'", async ({ db }) => { + const currSchema = { + customers: schema.customers, + details: schema.details, + employees: schema.employees, + orders: schema.orders, + products: schema.products, + suppliers: schema.suppliers, + }; + await seed(db, currSchema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 12, + with: { + details: 3, + }, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(24); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('seeding with identity columns', async ({ db }) => { + await seed(db, { identityColumnsTable: schema.identityColumnsTable }); + + const result = await db.select().from(schema.identityColumnsTable); + + expect(result.length).toBe(10); +}); + +test('seeding with self relation', async ({ db }) => { + await seed(db, { users: schema.users }); + + const result = await db.select().from(schema.users); + + expect(result.length).toBe(10); + const predicate = result.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('overlapping a foreign key constraint with a one-to-many relation', async ({ db }) => { + const postsRelation = relations(schema.posts, ({ one }) => ({ + user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), + })); + + const consoleMock = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + await reset(db, { users: schema.users, posts: schema.posts, postsRelation }); + await seed(db, { users: schema.users, posts: schema.posts, postsRelation }); + // expecting to get a warning + expect(consoleMock).toBeCalled(); + expect(consoleMock).toBeCalledWith(expect.stringMatching(/^You are providing a one-to-many relation.+/)); + + const users = await db.select().from(schema.users); + const posts = await db.select().from(schema.posts); + + expect(users.length).toBe(10); + let predicate = users.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(posts.length).toBe(10); + predicate = posts.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/cockroach/cockroachSchema.ts b/drizzle-seed/tests/cockroach/cockroachSchema.ts new file mode 100644 index 0000000000..056ac79252 --- /dev/null +++ b/drizzle-seed/tests/cockroach/cockroachSchema.ts @@ -0,0 +1,127 @@ +import type { AnyCockroachColumn } from 'drizzle-orm/cockroach-core'; +import { cockroachTable, int4, numeric, text, timestamp, varchar } from 'drizzle-orm/cockroach-core'; + +export const customers = cockroachTable('customer', { + id: varchar('id', { length: 256 }).primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code'), + region: text('region'), + country: text('country').notNull(), + phone: text('phone').notNull(), + fax: text('fax'), +}); + +export const employees = cockroachTable( + 'employee', + { + id: int4('id').primaryKey(), + lastName: text('last_name').notNull(), + firstName: text('first_name'), + title: text('title').notNull(), + titleOfCourtesy: text('title_of_courtesy').notNull(), + birthDate: timestamp('birth_date').notNull(), + hireDate: timestamp('hire_date').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + homePhone: text('home_phone').notNull(), + extension: int4('extension').notNull(), + notes: text('notes').notNull(), + reportsTo: int4('reports_to').references((): AnyCockroachColumn => employees.id), + photoPath: text('photo_path'), + }, +); + +export const orders = cockroachTable('order', { + id: int4('id').primaryKey(), + orderDate: timestamp('order_date').notNull(), + requiredDate: timestamp('required_date').notNull(), + shippedDate: timestamp('shipped_date'), + shipVia: int4('ship_via').notNull(), + freight: numeric('freight').notNull(), + shipName: text('ship_name').notNull(), + shipCity: text('ship_city').notNull(), + shipRegion: text('ship_region'), + shipPostalCode: text('ship_postal_code'), + shipCountry: text('ship_country').notNull(), + + customerId: text('customer_id') + .notNull() + .references(() => customers.id, { onDelete: 'cascade' }), + + employeeId: int4('employee_id') + .notNull() + .references(() => employees.id, { onDelete: 'cascade' }), +}); + +export const suppliers = cockroachTable('supplier', { + id: int4('id').primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + region: text('region'), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + phone: text('phone').notNull(), +}); + +export const products = cockroachTable('product', { + id: int4('id').primaryKey(), + name: text('name').notNull(), + quantityPerUnit: text('quantity_per_unit').notNull(), + unitPrice: numeric('unit_price').notNull(), + unitsInStock: int4('units_in_stock').notNull(), + unitsOnOrder: int4('units_on_order').notNull(), + reorderLevel: int4('reorder_level').notNull(), + discontinued: int4('discontinued').notNull(), + + supplierId: int4('supplier_id') + .notNull() + .references(() => suppliers.id, { onDelete: 'cascade' }), +}); + +export const details = cockroachTable('order_detail', { + unitPrice: numeric('unit_price').notNull(), + quantity: int4('quantity').notNull(), + discount: numeric('discount').notNull(), + + orderId: int4('order_id') + .notNull() + .references(() => orders.id, { onDelete: 'cascade' }), + + productId: int4('product_id') + .notNull() + .references(() => products.id, { onDelete: 'cascade' }), +}); + +export const identityColumnsTable = cockroachTable('identity_columns_table', { + id: int4().generatedAlwaysAsIdentity(), + id1: int4().generatedByDefaultAsIdentity(), + name: text(), +}); + +export const users = cockroachTable( + 'users', + { + id: int4().primaryKey(), + name: text(), + invitedBy: int4().references((): AnyCockroachColumn => users.id), + }, +); + +export const posts = cockroachTable( + 'posts', + { + id: int4().primaryKey(), + name: text(), + content: text(), + userId: int4().references(() => users.id), + }, +); diff --git a/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroach.test.ts b/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroach.test.ts new file mode 100644 index 0000000000..f6700f8b80 --- /dev/null +++ b/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroach.test.ts @@ -0,0 +1,202 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { cockroachTest as test } from '../instrumentation.ts'; +import * as schema from './cockroachSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "composite_example" ( + "id" int4 not null, + "name" text not null, + CONSTRAINT "composite_example_id_name_unique" UNIQUE("id","name"), + CONSTRAINT "custom_name" UNIQUE("id","name") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_two_0" ( + "id" int4 not null unique, + "name" text not null, + CONSTRAINT "custom_name0" UNIQUE("id","name") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_two_1" ( + "id" int4 not null, + "name" text not null, + CONSTRAINT "custom_name1" UNIQUE("id","name"), + CONSTRAINT "custom_name1_id" UNIQUE("id") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_three_0" ( + "id" int4 not null unique, + "name" text not null, + "slug" text not null, + CONSTRAINT "custom_name2" UNIQUE("id","name","slug") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_three_1" ( + "id" int4 not null, + "name" text not null, + "slug" text not null, + CONSTRAINT "custom_name3" UNIQUE("id","name","slug"), + CONSTRAINT "custom_name3_id" UNIQUE("id") + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------------ + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------------ + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------------ + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + // ------------------------------------------------------------------ + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + // ------------------------------------------------------------------ + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroachSchema.ts b/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroachSchema.ts new file mode 100644 index 0000000000..c949d5b6e1 --- /dev/null +++ b/drizzle-seed/tests/cockroach/compositeUniqueKey/cockroachSchema.ts @@ -0,0 +1,40 @@ +import { cockroachTable, int4, text, unique, varchar } from 'drizzle-orm/cockroach-core'; + +export const composite = cockroachTable('composite_example', { + id: int4('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = cockroachTable('unique_column_in_composite_of_two_0', { + id: int4('id').notNull().unique(), + name: varchar('name', { length: 8 }).notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = cockroachTable('unique_column_in_composite_of_two_1', { + id: int4('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = cockroachTable('unique_column_in_composite_of_three_0', { + id: int4('id').notNull().unique(), + name: text('name').notNull(), + slug: varchar('slug').notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = cockroachTable('unique_column_in_composite_of_three_1', { + id: int4('id').notNull(), + name: text('name').notNull(), + slug: varchar('slug').notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/cockroach/cyclicTables/cockroachSchema.ts b/drizzle-seed/tests/cockroach/cyclicTables/cockroachSchema.ts new file mode 100644 index 0000000000..d57a61cc58 --- /dev/null +++ b/drizzle-seed/tests/cockroach/cyclicTables/cockroachSchema.ts @@ -0,0 +1,88 @@ +import { relations } from 'drizzle-orm/_relations'; +import type { AnyCockroachColumn } from 'drizzle-orm/cockroach-core'; +import { cockroachTable, foreignKey, int4, string, varchar } from 'drizzle-orm/cockroach-core'; + +// MODEL +export const modelTable = cockroachTable( + 'model', + { + id: int4().primaryKey().generatedByDefaultAsIdentity(), + name: varchar().notNull(), + defaultImageId: int4(), + }, + (t) => [ + foreignKey({ + columns: [t.defaultImageId], + foreignColumns: [modelImageTable.id], + }), + ], +); + +export const modelRelations = relations(modelTable, ({ one, many }) => ({ + images: many(modelImageTable), + defaultImage: one(modelImageTable, { + fields: [modelTable.defaultImageId], + references: [modelImageTable.id], + }), +})); + +// MODEL IMAGE +export const modelImageTable = cockroachTable( + 'model_image', + { + id: int4().primaryKey(), + url: varchar().notNull(), + caption: varchar(), + modelId: int4() + .notNull() + .references((): AnyCockroachColumn => modelTable.id), + }, +); + +export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ + model: one(modelTable, { + fields: [modelImageTable.modelId], + references: [modelTable.id], + }), +})); + +// 3 tables case +export const modelTable1 = cockroachTable( + 'model1', + { + id: int4().primaryKey(), + name: varchar().notNull(), + userId: int4() + .references(() => user.id), + defaultImageId: int4(), + }, + (t) => [ + foreignKey({ + columns: [t.defaultImageId], + foreignColumns: [modelImageTable1.id], + }), + ], +); + +export const modelImageTable1 = cockroachTable( + 'model_image1', + { + id: int4().primaryKey(), + url: varchar().notNull(), + caption: varchar(), + modelId: int4().notNull() + .references((): AnyCockroachColumn => modelTable1.id), + }, +); + +export const user = cockroachTable( + 'user', + { + id: int4().primaryKey(), + name: string(), + invitedBy: int4().references((): AnyCockroachColumn => user.id), + imageId: int4() + .notNull() + .references((): AnyCockroachColumn => modelImageTable1.id), + }, +); diff --git a/drizzle-seed/tests/cockroach/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/cockroach/cyclicTables/cyclicTables.test.ts new file mode 100644 index 0000000000..d0619686c5 --- /dev/null +++ b/drizzle-seed/tests/cockroach/cyclicTables/cyclicTables.test.ts @@ -0,0 +1,157 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { cockroachTest as test } from '../instrumentation.ts'; +import * as schema from './cockroachSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + create table model_image + ( + id int4 generated by default as identity + primary key, + url varchar not null, + caption varchar, + "modelId" int4 not null + ); + `, + ); + + await db.execute( + sql` + create table model + ( + id int4 generated by default as identity + primary key, + name varchar not null, + "defaultImageId" int4 + constraint "model_defaultImageId_model_image_id_fk" + references model_image + ); + `, + ); + + await db.execute( + sql` + alter table model_image + add constraint "model_image_modelId_model_id_fk" + foreign key ("modelId") references model; + `, + ); + + // 3 tables case + await db.execute( + sql` + create table model_image1 + ( + id int4 generated by default as identity + primary key, + url varchar not null, + caption varchar, + "modelId" int4 not null + ); + `, + ); + + await db.execute( + sql` + create table "user" + ( + id int4 generated by default as identity + primary key, + name string, + "invitedBy" int4 + constraint "user_invitedBy_user_id_fk" + references "user", + "imageId" int4 not null + constraint "user_imageId_model_image1_id_fk" + references model_image1 + ); + `, + ); + + await db.execute( + sql` + create table model1 + ( + id int4 generated by default as identity + primary key, + name varchar not null, + "userId" int4 + constraint "model1_userId_user_id_fk" + references "user", + "defaultImageId" int4 + constraint "model1_defaultImageId_model_image1_id_fk" + references model_image1 + ); + `, + ); + + await db.execute( + sql` + alter table model_image1 + add constraint "model_image1_modelId_model1_id_fk" + foreign key ("modelId") references model1; + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('2 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable: schema.modelTable, + modelImageTable: schema.modelImageTable, + }); + + const modelTable = await db.select().from(schema.modelTable); + const modelImageTable = await db.select().from(schema.modelImageTable); + + expect(modelTable.length).toBe(10); + let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable.length).toBe(10); + predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('3 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable1: schema.modelTable1, + modelImageTable1: schema.modelImageTable1, + user: schema.user, + }); + + const modelTable1 = await db.select().from(schema.modelTable1); + const modelImageTable1 = await db.select().from(schema.modelImageTable1); + const user = await db.select().from(schema.user); + + expect(modelTable1.length).toBe(10); + let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable1.length).toBe(10); + predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(user.length).toBe(10); + predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/cockroach/instrumentation.ts b/drizzle-seed/tests/cockroach/instrumentation.ts new file mode 100644 index 0000000000..c14d30fb90 --- /dev/null +++ b/drizzle-seed/tests/cockroach/instrumentation.ts @@ -0,0 +1,80 @@ +import { drizzle } from 'drizzle-orm/cockroach'; +import type { CockroachDatabase } from 'drizzle-orm/cockroach-core'; +import { Client } from 'pg'; +import { test as base } from 'vitest'; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/cockroach/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const prepareTest = () => { + return base.extend< + { + client: { + client: Client; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + db: CockroachDatabase; + push: (schema: any) => Promise; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + const envurl = process.env['COCKROACH_CONNECTION_STRING']; + if (!envurl) throw new Error('No cockroach url provided'); + + const client = new Client(envurl); + await client.connect(); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await batch(['drop database if exists drizzle;', 'create database drizzle;', 'use drizzle;']); + + await use({ client, query, batch }); + await client.end(); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = drizzle({ client: client.client }); + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema); + + await use(push); + }, + { scope: 'worker' }, + ], + }); +}; + +export const cockroachTest = prepareTest(); +export type Test = ReturnType; diff --git a/drizzle-seed/tests/cockroach/softRelationsTest/cockroachSchema.ts b/drizzle-seed/tests/cockroach/softRelationsTest/cockroachSchema.ts new file mode 100644 index 0000000000..c05fe41226 --- /dev/null +++ b/drizzle-seed/tests/cockroach/softRelationsTest/cockroachSchema.ts @@ -0,0 +1,128 @@ +import { relations } from 'drizzle-orm/_relations'; +import { cockroachTable, int4, numeric, string, timestamp, varchar } from 'drizzle-orm/cockroach-core'; + +export const customers = cockroachTable('customer', { + id: varchar('id', { length: 256 }).primaryKey(), + companyName: string('company_name').notNull(), + contactName: string('contact_name').notNull(), + contactTitle: string('contact_title').notNull(), + address: string('address').notNull(), + city: string('city').notNull(), + postalCode: string('postal_code'), + region: string('region'), + country: string('country').notNull(), + phone: string('phone').notNull(), + fax: string('fax'), +}); + +export const employees = cockroachTable( + 'employee', + { + id: int4('id').primaryKey(), + lastName: string('last_name').notNull(), + firstName: string('first_name'), + title: string('title').notNull(), + titleOfCourtesy: string('title_of_courtesy').notNull(), + birthDate: timestamp('birth_date').notNull(), + hireDate: timestamp('hire_date').notNull(), + address: string('address').notNull(), + city: string('city').notNull(), + postalCode: string('postal_code').notNull(), + country: string('country').notNull(), + homePhone: string('home_phone').notNull(), + extension: int4('extension').notNull(), + notes: string('notes').notNull(), + reportsTo: int4('reports_to'), + photoPath: string('photo_path'), + }, +); + +export const employeesRelations = relations(employees, ({ one }) => ({ + employee: one(employees, { + fields: [employees.reportsTo], + references: [employees.id], + }), +})); + +export const orders = cockroachTable('order', { + id: int4('id').primaryKey(), + orderDate: timestamp('order_date').notNull(), + requiredDate: timestamp('required_date').notNull(), + shippedDate: timestamp('shipped_date'), + shipVia: int4('ship_via').notNull(), + freight: numeric('freight').notNull(), + shipName: string('ship_name').notNull(), + shipCity: string('ship_city').notNull(), + shipRegion: string('ship_region'), + shipPostalCode: string('ship_postal_code'), + shipCountry: string('ship_country').notNull(), + + customerId: string('customer_id').notNull(), + + employeeId: int4('employee_id').notNull(), +}); + +export const ordersRelations = relations(orders, ({ one }) => ({ + customer: one(customers, { + fields: [orders.customerId], + references: [customers.id], + }), + employee: one(employees, { + fields: [orders.employeeId], + references: [employees.id], + }), +})); + +export const suppliers = cockroachTable('supplier', { + id: int4('id').primaryKey(), + companyName: string('company_name').notNull(), + contactName: string('contact_name').notNull(), + contactTitle: string('contact_title').notNull(), + address: string('address').notNull(), + city: string('city').notNull(), + region: string('region'), + postalCode: string('postal_code').notNull(), + country: string('country').notNull(), + phone: string('phone').notNull(), +}); + +export const products = cockroachTable('product', { + id: int4('id').primaryKey(), + name: string('name').notNull(), + quantityPerUnit: string('quantity_per_unit').notNull(), + unitPrice: numeric('unit_price').notNull(), + unitsInStock: int4('units_in_stock').notNull(), + unitsOnOrder: int4('units_on_order').notNull(), + reorderLevel: int4('reorder_level').notNull(), + discontinued: int4('discontinued').notNull(), + + supplierId: int4('supplier_id').notNull(), +}); + +export const productsRelations = relations(products, ({ one }) => ({ + supplier: one(suppliers, { + fields: [products.supplierId], + references: [suppliers.id], + }), +})); + +export const details = cockroachTable('order_detail', { + unitPrice: numeric('unit_price').notNull(), + quantity: int4('quantity').notNull(), + discount: numeric('discount').notNull(), + + orderId: int4('order_id').notNull(), + + productId: int4('product_id').notNull(), +}); + +export const detailsRelations = relations(details, ({ one }) => ({ + order: one(orders, { + fields: [details.orderId], + references: [orders.id], + }), + product: one(products, { + fields: [details.productId], + references: [products.id], + }), +})); diff --git a/drizzle-seed/tests/cockroach/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/cockroach/softRelationsTest/softRelations.test.ts new file mode 100644 index 0000000000..e2712e8d9d --- /dev/null +++ b/drizzle-seed/tests/cockroach/softRelationsTest/softRelations.test.ts @@ -0,0 +1,253 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { cockroachTest as test } from '../instrumentation.ts'; +import * as schema from './cockroachSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "customer" ( + "id" varchar(256) PRIMARY KEY NOT NULL, + "company_name" string NOT NULL, + "contact_name" string NOT NULL, + "contact_title" string NOT NULL, + "address" string NOT NULL, + "city" string NOT NULL, + "postal_code" string, + "region" string, + "country" string NOT NULL, + "phone" string NOT NULL, + "fax" string + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "order_detail" ( + "unit_price" numeric NOT NULL, + "quantity" int4 NOT NULL, + "discount" numeric NOT NULL, + "order_id" int4 NOT NULL, + "product_id" int4 NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "employee" ( + "id" int4 PRIMARY KEY NOT NULL, + "last_name" string NOT NULL, + "first_name" string, + "title" string NOT NULL, + "title_of_courtesy" string NOT NULL, + "birth_date" timestamp NOT NULL, + "hire_date" timestamp NOT NULL, + "address" string NOT NULL, + "city" string NOT NULL, + "postal_code" string NOT NULL, + "country" string NOT NULL, + "home_phone" string NOT NULL, + "extension" int4 NOT NULL, + "notes" string NOT NULL, + "reports_to" int4, + "photo_path" string + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "order" ( + "id" int4 PRIMARY KEY NOT NULL, + "order_date" timestamp NOT NULL, + "required_date" timestamp NOT NULL, + "shipped_date" timestamp, + "ship_via" int4 NOT NULL, + "freight" numeric NOT NULL, + "ship_name" string NOT NULL, + "ship_city" string NOT NULL, + "ship_region" string, + "ship_postal_code" string, + "ship_country" string NOT NULL, + "customer_id" string NOT NULL, + "employee_id" int4 NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "product" ( + "id" int4 PRIMARY KEY NOT NULL, + "name" string NOT NULL, + "quantity_per_unit" string NOT NULL, + "unit_price" numeric NOT NULL, + "units_in_stock" int4 NOT NULL, + "units_on_order" int4 NOT NULL, + "reorder_level" int4 NOT NULL, + "discontinued" int4 NOT NULL, + "supplier_id" int4 NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "supplier" ( + "id" int4 PRIMARY KEY NOT NULL, + "company_name" string NOT NULL, + "contact_name" string NOT NULL, + "contact_title" string NOT NULL, + "address" string NOT NULL, + "city" string NOT NULL, + "region" string, + "postal_code" string NOT NULL, + "country" string NOT NULL, + "phone" string NOT NULL + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +const checkSoftRelations = ( + customers: (typeof schema.customers.$inferSelect)[], + details: (typeof schema.details.$inferSelect)[], + employees: (typeof schema.employees.$inferSelect)[], + orders: (typeof schema.orders.$inferSelect)[], + products: (typeof schema.products.$inferSelect)[], + suppliers: (typeof schema.suppliers.$inferSelect)[], +) => { + // employees soft relations check + const employeeIds = new Set(employees.map((employee) => employee.id)); + const employeesPredicate = employees.every((employee) => + employee.reportsTo !== null && employeeIds.has(employee.reportsTo) + ); + expect(employeesPredicate).toBe(true); + + // orders soft relations check + const customerIds = new Set(customers.map((customer) => customer.id)); + const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); + expect(ordersPredicate1).toBe(true); + + const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); + expect(ordersPredicate2).toBe(true); + + // product soft relations check + const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); + const productsPredicate = products.every((product) => + product.supplierId !== null && supplierIds.has(product.supplierId) + ); + expect(productsPredicate).toBe(true); + + // details soft relations check + const orderIds = new Set(orders.map((order) => order.id)); + const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); + expect(detailsPredicate1).toBe(true); + + const productIds = new Set(products.map((product) => product.id)); + const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); + expect(detailsPredicate2).toBe(true); +}; + +test('basic seed, soft relations test', async ({ db }) => { + await seed(db, schema); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(10); + expect(details.length).toBe(10); + expect(employees.length).toBe(10); + expect(orders.length).toBe(10); + expect(products.length).toBe(10); + expect(suppliers.length).toBe(10); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("redefine(refine) orders count using 'with' in customers, soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 13, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("sequential using of 'with', soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 12, + with: { + details: 3, + }, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(24); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); diff --git a/drizzle-seed/tests/cockroach/utils.ts b/drizzle-seed/tests/cockroach/utils.ts new file mode 100644 index 0000000000..0c1318e010 --- /dev/null +++ b/drizzle-seed/tests/cockroach/utils.ts @@ -0,0 +1,33 @@ +import Docker from 'dockerode'; +import getPort from 'get-port'; +import { v4 as uuidV4 } from 'uuid'; + +export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { + const docker = new Docker(); + const port = await getPort({ port: 26257 }); + const image = 'cockroachdb/cockroach:v25.2.0'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + const cockroachdbContainer = await docker.createContainer({ + Image: image, + Cmd: ['start-single-node', '--insecure'], + name: `drizzle-seed-tests-${uuidV4()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '26257/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await cockroachdbContainer.start(); + + return { + connectionString: `postgresql://root@127.0.0.1:${port}/defaultdb?sslmode=disable`, + container: cockroachdbContainer, + }; +} diff --git a/drizzle-seed/tests/mssql/allDataTypesTest/mssqlSchema.ts b/drizzle-seed/tests/mssql/allDataTypesTest/mssqlSchema.ts new file mode 100644 index 0000000000..9aaccfe656 --- /dev/null +++ b/drizzle-seed/tests/mssql/allDataTypesTest/mssqlSchema.ts @@ -0,0 +1,50 @@ +import { + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + mssqlTable, + numeric, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, +} from 'drizzle-orm/mssql-core'; + +export const allDataTypes = mssqlTable('all_data_types', { + int: int('integer'), + tinyint: tinyint('tinyint'), + smallint: smallint('smallint'), + biginteger: bigint('bigint', { mode: 'bigint' }), + bigintNumber: bigint('bigint_number', { mode: 'number' }), + real: real('real'), + decimal: decimal('decimal'), + numeric: numeric('numeric'), + float: float('float'), + binary: binary('binary', { length: 5 }), + varbinary: varbinary('varbinary', { length: 5 }), + char: char('char', { length: 5 }), + varchar: varchar('varchar', { length: 5 }), + text: text('text'), + bit: bit('bit'), + dateString: date('date_string', { mode: 'string' }), + date: date('date', { mode: 'date' }), + datetime: datetime('datetime', { mode: 'date' }), + datetimeString: datetime('datetime_string', { mode: 'string' }), + datetime2: datetime2('datetime2', { mode: 'date' }), + datetime2String: datetime2('datetime2_string', { mode: 'string' }), + datetimeOffset: datetimeoffset('datetime_offset', { mode: 'date' }), + datetimeOffsetString: datetimeoffset('datetime_offset_string', { mode: 'string' }), + time: time('time'), + // json: json('json'), +}); diff --git a/drizzle-seed/tests/mssql/allDataTypesTest/mssql_all_data_types.test.ts b/drizzle-seed/tests/mssql/allDataTypesTest/mssql_all_data_types.test.ts new file mode 100644 index 0000000000..a414abbfa4 --- /dev/null +++ b/drizzle-seed/tests/mssql/allDataTypesTest/mssql_all_data_types.test.ts @@ -0,0 +1,65 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { mssqlTest as test } from '../instrumentation.ts'; +import * as schema from './mssqlSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE [all_data_types] ( + [integer] int, + [tinyint] tinyint, + [smallint] smallint, + [bigint] bigint, + [bigint_number] bigint, + [real] real, + [decimal] decimal, + [numeric] numeric, + [float] float, + [binary] binary(5), + [varbinary] varbinary(5), + [char] char(5), + [varchar] varchar(5), + [text] text, + [bit] bit, + [date_string] date, + [date] date, + [datetime] datetime, + [datetime_string] datetime, + [datetime2] datetime2, + [datetime2_string] datetime2, + [datetime_offset] datetimeoffset, + [datetime_offset_string] datetimeoffset, + [time] time + ); + `, + ); + + resolveFunc(''); + } + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + await seed(db, schema, { count: 10000 }); + + const allDataTypes = await db.select().from(schema.allDataTypes); + + // every value in each 10 rows does not equal undefined. + const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/mssql/compositeUniqueKey/mssql.test.ts b/drizzle-seed/tests/mssql/compositeUniqueKey/mssql.test.ts new file mode 100644 index 0000000000..26d21fa576 --- /dev/null +++ b/drizzle-seed/tests/mssql/compositeUniqueKey/mssql.test.ts @@ -0,0 +1,196 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { mssqlTest as test } from '../instrumentation.ts'; +import * as schema from './mssqlSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE [composite_example] ( + [id] int not null, + [name] varchar(256) not null, + CONSTRAINT [composite_example_id_name_unique] UNIQUE([id],[name]), + CONSTRAINT [custom_name] UNIQUE([id],[name]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [unique_column_in_composite_of_two_0] ( + [id] int not null unique, + [name] varchar(256) not null, + CONSTRAINT [custom_name0] UNIQUE([id],[name]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [unique_column_in_composite_of_two_1] ( + [id] int not null, + [name] varchar(256) not null, + CONSTRAINT [custom_name1] UNIQUE([id],[name]), + CONSTRAINT [custom_name1_id] UNIQUE([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [unique_column_in_composite_of_three_0] ( + [id] int not null unique, + [name] varchar(256) not null, + [slug] varchar(256) not null, + CONSTRAINT [custom_name2] UNIQUE([id],[name],[slug]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [unique_column_in_composite_of_three_1] ( + [id] int not null, + [name] varchar(256) not null, + [slug] varchar(256) not null, + CONSTRAINT [custom_name3] UNIQUE([id],[name],[slug]), + CONSTRAINT [custom_name3_id] UNIQUE([id]) + ); + `, + ); + + resolveFunc(''); + } + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/mssql/compositeUniqueKey/mssqlSchema.ts b/drizzle-seed/tests/mssql/compositeUniqueKey/mssqlSchema.ts new file mode 100644 index 0000000000..62cb304f95 --- /dev/null +++ b/drizzle-seed/tests/mssql/compositeUniqueKey/mssqlSchema.ts @@ -0,0 +1,40 @@ +import { int, mssqlTable, unique, varchar } from 'drizzle-orm/mssql-core'; + +export const composite = mssqlTable('composite_example', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = mssqlTable('unique_column_in_composite_of_two_0', { + id: int('id').notNull().unique(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = mssqlTable('unique_column_in_composite_of_two_1', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = mssqlTable('unique_column_in_composite_of_three_0', { + id: int('id').notNull().unique(), + name: varchar('name', { length: 256 }).notNull(), + slug: varchar('slug', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = mssqlTable('unique_column_in_composite_of_three_1', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), + slug: varchar('slug', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/mssql/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/mssql/cyclicTables/cyclicTables.test.ts new file mode 100644 index 0000000000..1414e60b4c --- /dev/null +++ b/drizzle-seed/tests/mssql/cyclicTables/cyclicTables.test.ts @@ -0,0 +1,154 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { mssqlTest as test } from '../instrumentation.ts'; +import * as schema from './mssqlSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + await db.execute( + sql` + create table [model] + ( + [id] int identity not null + primary key, + [name] varchar(256) not null, + [defaultImageId] int null + ); + `, + ); + + await db.execute( + sql` + create table [model_image] + ( + [id] int identity not null + primary key, + [url] varchar(256) not null, + [caption] varchar(256) null, + [modelId] int not null, + constraint [model_image_modelId_model_id_fk] + foreign key ([modelId]) references [model] ([id]) + ); + `, + ); + + await db.execute( + sql` + alter table [model] + add constraint [model_defaultImageId_model_image_id_fk] + foreign key ([defaultImageId]) references [model_image] ([id]); + `, + ); + + // 3 tables case + await db.execute( + sql` + create table [model1] + ( + [id] int identity not null + primary key, + [name] varchar(256) not null, + [userId] int null, + [defaultImageId] int null + ); + `, + ); + + await db.execute( + sql` + create table [model_image1] + ( + [id] int identity not null + primary key, + [url] varchar(256) not null, + [caption] varchar(256) null, + [modelId] int not null, + constraint [model_image1_modelId_model1_id_fk] + foreign key ([modelId]) references [model1] ([id]) + ); + `, + ); + + await db.execute( + sql` + create table [user] + ( + [id] int identity not null + primary key, + [name] text null, + [invitedBy] int null, + [imageId] int not null, + constraint [user_imageId_model_image1_id_fk] + foreign key ([imageId]) references [model_image1] ([id]), + constraint [user_invitedBy_user_id_fk] + foreign key ([invitedBy]) references [user] ([id]) + ); + `, + ); + + await db.execute( + sql` + alter table [model1] + add constraint [model1_userId_user_id_fk] + foreign key ([userId]) references [user] ([id]); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('2 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable: schema.modelTable, + modelImageTable: schema.modelImageTable, + }); + + const modelTable = await db.select().from(schema.modelTable); + const modelImageTable = await db.select().from(schema.modelImageTable); + + expect(modelTable.length).toBe(10); + let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable.length).toBe(10); + predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('3 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable1: schema.modelTable1, + modelImageTable1: schema.modelImageTable1, + user: schema.user, + }); + + const modelTable1 = await db.select().from(schema.modelTable1); + const modelImageTable1 = await db.select().from(schema.modelImageTable1); + const user = await db.select().from(schema.user); + + expect(modelTable1.length).toBe(10); + let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable1.length).toBe(10); + predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(user.length).toBe(10); + predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/mssql/cyclicTables/mssqlSchema.ts b/drizzle-seed/tests/mssql/cyclicTables/mssqlSchema.ts new file mode 100644 index 0000000000..b0c869b1f0 --- /dev/null +++ b/drizzle-seed/tests/mssql/cyclicTables/mssqlSchema.ts @@ -0,0 +1,76 @@ +import { relations } from 'drizzle-orm/_relations'; +import type { AnyMsSqlColumn } from 'drizzle-orm/mssql-core'; +import { int, mssqlTable, text, varchar } from 'drizzle-orm/mssql-core'; + +// MODEL +export const modelTable = mssqlTable( + 'model', + { + id: int().identity().primaryKey(), + name: varchar({ length: 256 }).notNull(), + defaultImageId: int().references(() => modelImageTable.id), + }, +); + +export const modelRelations = relations(modelTable, ({ one, many }) => ({ + images: many(modelImageTable), + defaultImage: one(modelImageTable, { + fields: [modelTable.defaultImageId], + references: [modelImageTable.id], + }), +})); + +// MODEL IMAGE +export const modelImageTable = mssqlTable( + 'model_image', + { + id: int().identity().primaryKey(), + url: varchar({ length: 256 }).notNull(), + caption: varchar({ length: 256 }), + modelId: int() + .notNull() + .references((): AnyMsSqlColumn => modelTable.id), + }, +); + +export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ + model: one(modelTable, { + fields: [modelImageTable.modelId], + references: [modelTable.id], + }), +})); + +// 3 tables case +export const modelTable1 = mssqlTable( + 'model1', + { + id: int().identity().primaryKey(), + name: varchar({ length: 256 }).notNull(), + userId: int() + .references(() => user.id), + defaultImageId: int(), + }, +); + +export const modelImageTable1 = mssqlTable( + 'model_image1', + { + id: int().identity().primaryKey(), + url: varchar({ length: 256 }).notNull(), + caption: varchar({ length: 256 }), + modelId: int().notNull() + .references((): AnyMsSqlColumn => modelTable1.id), + }, +); + +export const user = mssqlTable( + 'user', + { + id: int().identity().primaryKey(), + name: text(), + invitedBy: int().references((): AnyMsSqlColumn => user.id), + imageId: int() + .notNull() + .references((): AnyMsSqlColumn => modelImageTable1.id), + }, +); diff --git a/drizzle-seed/tests/mssql/instrumentation.ts b/drizzle-seed/tests/mssql/instrumentation.ts new file mode 100644 index 0000000000..0e645a3a30 --- /dev/null +++ b/drizzle-seed/tests/mssql/instrumentation.ts @@ -0,0 +1,89 @@ +import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import mssql from 'mssql'; +import { test as base } from 'vitest'; +import { parseMssqlUrl } from './utils'; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/mssql/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const prepareTest = () => { + return base.extend< + { + client: { + client: mssql.ConnectionPool; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + db: MySqlDatabase; + push: (schema: any) => Promise; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + const envurl = process.env['MSSQL_CONNECTION_STRING']; + if (!envurl) throw new Error('No mssql url provided'); + + const options = parseMssqlUrl(envurl); + const client = await mssql.connect(options); + await client.connect(); + + const query = async (sql: string, params: any[] = []) => { + const request = client.request(); + for (const [index, param] of params.entries()) { + request.input(`par${index}`, param); + } + + const res = await request.query(sql); + return res.recordset as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await client.query('drop database if exists drizzle;'); + await client.query('create database drizzle;'); + await client.query('use drizzle;'); + + await use({ client, query, batch }); + await client.close(); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = drizzle({ client: client.client }); + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema); + + await use(push); + }, + { scope: 'worker' }, + ], + }); +}; + +export const mssqlTest = prepareTest(); +export type Test = ReturnType; diff --git a/drizzle-seed/tests/mssql/mssql.test.ts b/drizzle-seed/tests/mssql/mssql.test.ts new file mode 100644 index 0000000000..6525cf9212 --- /dev/null +++ b/drizzle-seed/tests/mssql/mssql.test.ts @@ -0,0 +1,386 @@ +import { sql } from 'drizzle-orm'; +import { relations } from 'drizzle-orm/_relations'; +import { expect, vi } from 'vitest'; +import { reset, seed } from '../../src/index.ts'; +import { mssqlTest as test } from './instrumentation.ts'; +import * as schema from './mssqlSchema.ts'; + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE [customer] ( + [id] varchar(256) NOT NULL, + [company_name] varchar(max) NOT NULL, + [contact_name] varchar(max) NOT NULL, + [contact_title] varchar(max) NOT NULL, + [address] varchar(max) NOT NULL, + [city] varchar(max) NOT NULL, + [postal_code] varchar(max), + [region] varchar(max), + [country] varchar(max) NOT NULL, + [phone] varchar(max) NOT NULL, + [fax] varchar(max), + CONSTRAINT [customer_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [order_detail] ( + [unit_price] float NOT NULL, + [quantity] int NOT NULL, + [discount] float NOT NULL, + [order_id] int NOT NULL, + [product_id] int NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [employee] ( + [id] int NOT NULL, + [last_name] varchar(max) NOT NULL, + [first_name] varchar(max), + [title] varchar(max) NOT NULL, + [title_of_courtesy] varchar(max) NOT NULL, + [birth_date] datetime NOT NULL, + [hire_date] datetime NOT NULL, + [address] varchar(max) NOT NULL, + [city] varchar(max) NOT NULL, + [postal_code] varchar(max) NOT NULL, + [country] varchar(max) NOT NULL, + [home_phone] varchar(max) NOT NULL, + [extension] int NOT NULL, + [notes] varchar(max) NOT NULL, + [reports_to] int, + [photo_path] varchar(max), + CONSTRAINT [employee_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [order] ( + [id] int NOT NULL, + [order_date] datetime NOT NULL, + [required_date] datetime NOT NULL, + [shipped_date] datetime, + [ship_via] int NOT NULL, + [freight] float NOT NULL, + [ship_name] varchar(max) NOT NULL, + [ship_city] varchar(max) NOT NULL, + [ship_region] varchar(max), + [ship_postal_code] varchar(max), + [ship_country] varchar(max) NOT NULL, + [customer_id] varchar(256) NOT NULL, + [employee_id] int NOT NULL, + CONSTRAINT [order_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [product] ( + [id] int NOT NULL, + [name] varchar(max) NOT NULL, + [quantity_per_unit] varchar(max) NOT NULL, + [unit_price] float NOT NULL, + [units_in_stock] int NOT NULL, + [units_on_order] int NOT NULL, + [reorder_level] int NOT NULL, + [discontinued] int NOT NULL, + [supplier_id] int NOT NULL, + CONSTRAINT [product_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [supplier] ( + [id] int NOT NULL, + [company_name] varchar(max) NOT NULL, + [contact_name] varchar(max) NOT NULL, + [contact_title] varchar(max) NOT NULL, + [address] varchar(max) NOT NULL, + [city] varchar(max) NOT NULL, + [region] varchar(max), + [postal_code] varchar(max) NOT NULL, + [country] varchar(max) NOT NULL, + [phone] varchar(max) NOT NULL, + CONSTRAINT [supplier_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [users] ( + [id] int, + [name] varchar(max), + [invitedBy] int, + CONSTRAINT [users_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [posts] ( + [id] int, + [name] varchar(max), + [content] varchar(max), + [userId] int, + CONSTRAINT [posts_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + ALTER TABLE [order_detail] ADD CONSTRAINT [order_detail_order_id_order_id_fk] FOREIGN KEY ([order_id]) REFERENCES [order]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [order_detail] ADD CONSTRAINT [order_detail_product_id_product_id_fk] FOREIGN KEY ([product_id]) REFERENCES [product]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [employee] ADD CONSTRAINT [employee_reports_to_employee_id_fk] FOREIGN KEY ([reports_to]) REFERENCES [employee]([id]) ON DELETE no action ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [order] ADD CONSTRAINT [order_customer_id_customer_id_fk] FOREIGN KEY ([customer_id]) REFERENCES [customer]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [order] ADD CONSTRAINT [order_employee_id_employee_id_fk] FOREIGN KEY ([employee_id]) REFERENCES [employee]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [product] ADD CONSTRAINT [product_supplier_id_supplier_id_fk] FOREIGN KEY ([supplier_id]) REFERENCES [supplier]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [users] ADD CONSTRAINT [users_invitedBy_users_id_fk] FOREIGN KEY ([invitedBy]) REFERENCES [users]([id]) ON DELETE no action ON UPDATE no action; + `, + ); + + await db.execute( + sql` + ALTER TABLE [posts] ADD CONSTRAINT [posts_userId_users_id_fk] FOREIGN KEY ([userId]) REFERENCES [users]([id]) ON DELETE cascade ON UPDATE no action; + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test('basic seed test', async ({ db }) => { + await seed(db, schema); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(10); + expect(details.length).toBe(10); + expect(employees.length).toBe(10); + expect(orders.length).toBe(10); + expect(products.length).toBe(10); + expect(suppliers.length).toBe(10); +}); + +test('seed with options.count:11 test', async ({ db }) => { + await seed(db, schema, { count: 11 }); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(11); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('redefine(refine) customers count', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('redefine(refine) all tables count', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + details: { + count: 13, + }, + employees: { + count: 14, + }, + orders: { + count: 15, + }, + products: { + count: 16, + }, + suppliers: { + count: 17, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(13); + expect(employees.length).toBe(14); + expect(orders.length).toBe(15); + expect(products.length).toBe(16); + expect(suppliers.length).toBe(17); +}); + +test("redefine(refine) orders count using 'with' in customers", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 13, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test("sequential using of 'with'", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 12, + with: { + details: 3, + }, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(24); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); +}); + +test('overlapping a foreign key constraint with a one-to-many relation', async ({ db }) => { + const postsRelation = relations(schema.posts, ({ one }) => ({ + user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), + })); + + const consoleMock = vi.spyOn(console, 'warn').mockImplementation(() => {}); + + await reset(db, { users: schema.users, posts: schema.posts, postsRelation }); + await seed(db, { users: schema.users, posts: schema.posts, postsRelation }); + // expecting to get a warning + expect(consoleMock).toBeCalled(); + expect(consoleMock).toBeCalledWith(expect.stringMatching(/^You are providing a one-to-many relation.+/)); + + const users = await db.select().from(schema.users); + const posts = await db.select().from(schema.posts); + + expect(users.length).toBe(10); + let predicate = users.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(posts.length).toBe(10); + predicate = posts.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/mssql/mssqlSchema.ts b/drizzle-seed/tests/mssql/mssqlSchema.ts new file mode 100644 index 0000000000..f1f836978d --- /dev/null +++ b/drizzle-seed/tests/mssql/mssqlSchema.ts @@ -0,0 +1,121 @@ +import type { AnyMsSqlColumn } from 'drizzle-orm/mssql-core'; +import { datetime, float, int, mssqlTable, text, varchar } from 'drizzle-orm/mssql-core'; + +export const customers = mssqlTable('customer', { + id: varchar('id', { length: 256 }).primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code'), + region: text('region'), + country: text('country').notNull(), + phone: text('phone').notNull(), + fax: text('fax'), +}); + +export const employees = mssqlTable( + 'employee', + { + id: int('id').primaryKey(), + lastName: text('last_name').notNull(), + firstName: text('first_name'), + title: text('title').notNull(), + titleOfCourtesy: text('title_of_courtesy').notNull(), + birthDate: datetime('birth_date').notNull(), + hireDate: datetime('hire_date').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + homePhone: text('home_phone').notNull(), + extension: int('extension').notNull(), + notes: text('notes').notNull(), + reportsTo: int('reports_to').references((): AnyMsSqlColumn => employees.id), + photoPath: text('photo_path'), + }, +); + +export const orders = mssqlTable('order', { + id: int('id').primaryKey(), + orderDate: datetime('order_date').notNull(), + requiredDate: datetime('required_date').notNull(), + shippedDate: datetime('shipped_date'), + shipVia: int('ship_via').notNull(), + freight: float('freight').notNull(), + shipName: text('ship_name').notNull(), + shipCity: text('ship_city').notNull(), + shipRegion: text('ship_region'), + shipPostalCode: text('ship_postal_code'), + shipCountry: text('ship_country').notNull(), + + customerId: varchar('customer_id', { length: 256 }) + .notNull() + .references(() => customers.id, { onDelete: 'cascade' }), + + employeeId: int('employee_id') + .notNull() + .references(() => employees.id, { onDelete: 'cascade' }), +}); + +export const suppliers = mssqlTable('supplier', { + id: int('id').primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + region: text('region'), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + phone: text('phone').notNull(), +}); + +export const products = mssqlTable('product', { + id: int('id').primaryKey(), + name: text('name').notNull(), + quantityPerUnit: text('quantity_per_unit').notNull(), + unitPrice: float('unit_price').notNull(), + unitsInStock: int('units_in_stock').notNull(), + unitsOnOrder: int('units_on_order').notNull(), + reorderLevel: int('reorder_level').notNull(), + discontinued: int('discontinued').notNull(), + + supplierId: int('supplier_id') + .notNull() + .references(() => suppliers.id, { onDelete: 'cascade' }), +}); + +export const details = mssqlTable('order_detail', { + unitPrice: float('unit_price').notNull(), + quantity: int('quantity').notNull(), + discount: float('discount').notNull(), + + orderId: int('order_id') + .notNull() + .references(() => orders.id, { onDelete: 'cascade' }), + + productId: int('product_id') + .notNull() + .references(() => products.id, { onDelete: 'cascade' }), +}); + +export const users = mssqlTable( + 'users', + { + id: int().primaryKey(), + name: text(), + invitedBy: int().references((): AnyMsSqlColumn => users.id), + }, +); + +export const posts = mssqlTable( + 'posts', + { + id: int().primaryKey(), + name: text(), + content: text(), + userId: int().references(() => users.id), + }, +); diff --git a/drizzle-seed/tests/mssql/softRelationsTest/mssqlSchema.ts b/drizzle-seed/tests/mssql/softRelationsTest/mssqlSchema.ts new file mode 100644 index 0000000000..823da6727a --- /dev/null +++ b/drizzle-seed/tests/mssql/softRelationsTest/mssqlSchema.ts @@ -0,0 +1,128 @@ +import { relations } from 'drizzle-orm/_relations'; +import { datetime, float, int, mssqlTable, text, varchar } from 'drizzle-orm/mssql-core'; + +export const customers = mssqlTable('customer', { + id: varchar('id', { length: 256 }).primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code'), + region: text('region'), + country: text('country').notNull(), + phone: text('phone').notNull(), + fax: text('fax'), +}); + +export const employees = mssqlTable( + 'employee', + { + id: int('id').primaryKey(), + lastName: text('last_name').notNull(), + firstName: text('first_name'), + title: text('title').notNull(), + titleOfCourtesy: text('title_of_courtesy').notNull(), + birthDate: datetime('birth_date').notNull(), + hireDate: datetime('hire_date').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + homePhone: text('home_phone').notNull(), + extension: int('extension').notNull(), + notes: text('notes').notNull(), + reportsTo: int('reports_to'), + photoPath: text('photo_path'), + }, +); + +export const employeesRelations = relations(employees, ({ one }) => ({ + employee: one(employees, { + fields: [employees.reportsTo], + references: [employees.id], + }), +})); + +export const orders = mssqlTable('order', { + id: int('id').primaryKey(), + orderDate: datetime('order_date').notNull(), + requiredDate: datetime('required_date').notNull(), + shippedDate: datetime('shipped_date'), + shipVia: int('ship_via').notNull(), + freight: float('freight').notNull(), + shipName: text('ship_name').notNull(), + shipCity: text('ship_city').notNull(), + shipRegion: text('ship_region'), + shipPostalCode: text('ship_postal_code'), + shipCountry: text('ship_country').notNull(), + + customerId: varchar('customer_id', { length: 256 }).notNull(), + + employeeId: int('employee_id').notNull(), +}); + +export const ordersRelations = relations(orders, ({ one }) => ({ + customer: one(customers, { + fields: [orders.customerId], + references: [customers.id], + }), + employee: one(employees, { + fields: [orders.employeeId], + references: [employees.id], + }), +})); + +export const suppliers = mssqlTable('supplier', { + id: int('id').primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + region: text('region'), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + phone: text('phone').notNull(), +}); + +export const products = mssqlTable('product', { + id: int('id').primaryKey(), + name: text('name').notNull(), + quantityPerUnit: text('quantity_per_unit').notNull(), + unitPrice: float('unit_price').notNull(), + unitsInStock: int('units_in_stock').notNull(), + unitsOnOrder: int('units_on_order').notNull(), + reorderLevel: int('reorder_level').notNull(), + discontinued: int('discontinued').notNull(), + + supplierId: int('supplier_id').notNull(), +}); + +export const productsRelations = relations(products, ({ one }) => ({ + supplier: one(suppliers, { + fields: [products.supplierId], + references: [suppliers.id], + }), +})); + +export const details = mssqlTable('order_detail', { + unitPrice: float('unit_price').notNull(), + quantity: int('quantity').notNull(), + discount: float('discount').notNull(), + + orderId: int('order_id').notNull(), + + productId: int('product_id').notNull(), +}); + +export const detailsRelations = relations(details, ({ one }) => ({ + order: one(orders, { + fields: [details.orderId], + references: [orders.id], + }), + product: one(products, { + fields: [details.productId], + references: [products.id], + }), +})); diff --git a/drizzle-seed/tests/mssql/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/mssql/softRelationsTest/softRelations.test.ts new file mode 100644 index 0000000000..d5ceac6f45 --- /dev/null +++ b/drizzle-seed/tests/mssql/softRelationsTest/softRelations.test.ts @@ -0,0 +1,258 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { mssqlTest as test } from '../instrumentation.ts'; +import * as schema from './mssqlSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE [customer] ( + [id] varchar(256) NOT NULL, + [company_name] text NOT NULL, + [contact_name] text NOT NULL, + [contact_title] text NOT NULL, + [address] text NOT NULL, + [city] text NOT NULL, + [postal_code] text, + [region] text, + [country] text NOT NULL, + [phone] text NOT NULL, + [fax] text, + CONSTRAINT [customer_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [order_detail] ( + [unit_price] float NOT NULL, + [quantity] int NOT NULL, + [discount] float NOT NULL, + [order_id] int NOT NULL, + [product_id] int NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [employee] ( + [id] int NOT NULL, + [last_name] text NOT NULL, + [first_name] text, + [title] text NOT NULL, + [title_of_courtesy] text NOT NULL, + [birth_date] datetime NOT NULL, + [hire_date] datetime NOT NULL, + [address] text NOT NULL, + [city] text NOT NULL, + [postal_code] text NOT NULL, + [country] text NOT NULL, + [home_phone] text NOT NULL, + [extension] int NOT NULL, + [notes] text NOT NULL, + [reports_to] int, + [photo_path] text, + CONSTRAINT [employee_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [order] ( + [id] int NOT NULL, + [order_date] datetime NOT NULL, + [required_date] datetime NOT NULL, + [shipped_date] datetime, + [ship_via] int NOT NULL, + [freight] float NOT NULL, + [ship_name] text NOT NULL, + [ship_city] text NOT NULL, + [ship_region] text, + [ship_postal_code] text, + [ship_country] text NOT NULL, + [customer_id] varchar(256) NOT NULL, + [employee_id] int NOT NULL, + CONSTRAINT [order_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [product] ( + [id] int NOT NULL, + [name] text NOT NULL, + [quantity_per_unit] text NOT NULL, + [unit_price] float NOT NULL, + [units_in_stock] int NOT NULL, + [units_on_order] int NOT NULL, + [reorder_level] int NOT NULL, + [discontinued] int NOT NULL, + [supplier_id] int NOT NULL, + CONSTRAINT [product_id] PRIMARY KEY([id]) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE [supplier] ( + [id] int NOT NULL, + [company_name] text NOT NULL, + [contact_name] text NOT NULL, + [contact_title] text NOT NULL, + [address] text NOT NULL, + [city] text NOT NULL, + [region] text, + [postal_code] text NOT NULL, + [country] text NOT NULL, + [phone] text NOT NULL, + CONSTRAINT [supplier_id] PRIMARY KEY([id]) + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +const checkSoftRelations = ( + customers: (typeof schema.customers.$inferSelect)[], + details: (typeof schema.details.$inferSelect)[], + employees: (typeof schema.employees.$inferSelect)[], + orders: (typeof schema.orders.$inferSelect)[], + products: (typeof schema.products.$inferSelect)[], + suppliers: (typeof schema.suppliers.$inferSelect)[], +) => { + // employees soft relations check + const employeeIds = new Set(employees.map((employee) => employee.id)); + const employeesPredicate = employees.every((employee) => + employee.reportsTo !== null && employeeIds.has(employee.reportsTo) + ); + expect(employeesPredicate).toBe(true); + + // orders soft relations check + const customerIds = new Set(customers.map((customer) => customer.id)); + const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); + expect(ordersPredicate1).toBe(true); + + const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); + expect(ordersPredicate2).toBe(true); + + // product soft relations check + const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); + const productsPredicate = products.every((product) => + product.supplierId !== null && supplierIds.has(product.supplierId) + ); + expect(productsPredicate).toBe(true); + + // details soft relations check + const orderIds = new Set(orders.map((order) => order.id)); + const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); + expect(detailsPredicate1).toBe(true); + + const productIds = new Set(products.map((product) => product.id)); + const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); + expect(detailsPredicate2).toBe(true); +}; + +test('basic seed, soft relations test', async ({ db }) => { + await seed(db, schema); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(10); + expect(details.length).toBe(10); + expect(employees.length).toBe(10); + expect(orders.length).toBe(10); + expect(products.length).toBe(10); + expect(suppliers.length).toBe(10); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("redefine(refine) orders count using 'with' in customers, soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 13, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("sequential using of 'with', soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 12, + with: { + details: 3, + }, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(24); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); diff --git a/drizzle-seed/tests/mssql/utils.ts b/drizzle-seed/tests/mssql/utils.ts new file mode 100644 index 0000000000..1d31b37366 --- /dev/null +++ b/drizzle-seed/tests/mssql/utils.ts @@ -0,0 +1,64 @@ +import Docker from 'dockerode'; +import getPort from 'get-port'; +import type { config } from 'mssql'; +import { v4 as uuid } from 'uuid'; + +export async function createDockerDB(suffix?: string): Promise< + { container: Docker.Container; connectionString: string } +> { + const docker = new Docker(); + const port1433 = await getPort(); + // const port1431 = await getPort(); + const image = 'mcr.microsoft.com/azure-sql-edge'; + + const pullStream = await docker.pull(image); // { platform: 'linux/amd64' }); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + const password = 'drizzle123PASSWORD!'; + const createOptions: Docker.ContainerCreateOptions = { + Image: image, + // platform: 'linux/amd64', + Env: ['ACCEPT_EULA=1', `MSSQL_SA_PASSWORD=${password}`], // , 'MSSQL_TCP_PORT=1433'], + name: `drizzle-seed-tests-${suffix}-${uuid()}`, + // ExposedPorts: { '1433/tcp': {}, '1431/tcp': {} }, + HostConfig: { + AutoRemove: true, + PortBindings: { + '1433/tcp': [{ HostPort: `${port1433}` }], + }, + // CapAdd: ['SYS_PTRACE'], + }, + }; + + // createOptions.Platform = 'linux/amd64'; + + const mssqlContainer = await docker.createContainer(createOptions); + + await mssqlContainer.start(); + + return { + // real connection string + // connectionString: `Server=localhost,${port1433};User Id=SA;Password=${password};TrustServerCertificate=True;` + + // connection string to parse options + connectionString: `mssql://SA:${password}@localhost:${port1433}?encrypt=true&trustServerCertificate=true`, + container: mssqlContainer, + }; +} + +export const parseMssqlUrl = (urlString: string): config => { + const url = new URL(urlString); + return { + user: url.username, + password: url.password, + server: url.hostname, + port: Number.parseInt(url.port, 10), + database: url.pathname.replace(/^\//, ''), + options: { + encrypt: url.searchParams.get('encrypt') === 'true', + trustServerCertificate: url.searchParams.get('trustServerCertificate') === 'true', + }, + }; +}; diff --git a/drizzle-seed/tests/mysql/allDataTypesTest/mysql_all_data_types.test.ts b/drizzle-seed/tests/mysql/allDataTypesTest/mysql_all_data_types.test.ts index f39a55fef1..a01793fa47 100644 --- a/drizzle-seed/tests/mysql/allDataTypesTest/mysql_all_data_types.test.ts +++ b/drizzle-seed/tests/mysql/allDataTypesTest/mysql_all_data_types.test.ts @@ -1,76 +1,20 @@ -import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import type { Connection } from 'mysql2/promise'; -import { createConnection } from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { seed } from '../../../src/index.ts'; +import { mysqlTest as test } from '../instrumentation.ts'; import * as schema from './mysqlSchema.ts'; -let mysqlContainer: Docker.Container; -let client: Connection | undefined; -let db: MySql2Database; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = await createDockerDB(); - - const sleep = 1000; - let timeLeft = 40000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - const client = await createConnection(connectionString); - await client.connect(); - db = drizzle(client); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`all_data_types\` ( \`integer\` int, \`tinyint\` tinyint, @@ -101,15 +45,15 @@ beforeAll(async () => { \`popularity\` enum('unknown','known','popular') ); `, - ); -}); + ); + + resolveFunc(''); + } -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); + await promise; }); -test('basic seed test', async () => { +test('basic seed test', async ({ db }) => { await seed(db, schema, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); diff --git a/drizzle-seed/tests/mysql/compositeUniqueKey/mysql.test.ts b/drizzle-seed/tests/mysql/compositeUniqueKey/mysql.test.ts new file mode 100644 index 0000000000..2d815c56e8 --- /dev/null +++ b/drizzle-seed/tests/mysql/compositeUniqueKey/mysql.test.ts @@ -0,0 +1,190 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { mysqlTest as test } from '../instrumentation.ts'; +import * as schema from './mysqlSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS composite_example ( + id integer not null, + name varchar(8) not null, + CONSTRAINT composite_example_id_name_unique UNIQUE(id,name), + CONSTRAINT custom_name UNIQUE(id,name) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS unique_column_in_composite_of_two_0 ( + id integer not null unique, + name varchar(8) not null, + CONSTRAINT custom_name0 UNIQUE(id,name) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS unique_column_in_composite_of_two_1 ( + id integer not null, + name varchar(8) not null, + CONSTRAINT custom_name1 UNIQUE(id,name), + CONSTRAINT custom_name1_id UNIQUE(id) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS unique_column_in_composite_of_three_0 ( + id integer not null unique, + name varchar(8) not null, + slug varchar(8) not null, + CONSTRAINT custom_name2 UNIQUE(id,name,slug) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS unique_column_in_composite_of_three_1 ( + id integer not null, + name varchar(8) not null, + slug varchar(8) not null, + CONSTRAINT custom_name3 UNIQUE(id,name,slug), + CONSTRAINT custom_name3_id UNIQUE(id) + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/mysql/compositeUniqueKey/mysqlSchema.ts b/drizzle-seed/tests/mysql/compositeUniqueKey/mysqlSchema.ts new file mode 100644 index 0000000000..18affec4cc --- /dev/null +++ b/drizzle-seed/tests/mysql/compositeUniqueKey/mysqlSchema.ts @@ -0,0 +1,40 @@ +import { int, mysqlTable, unique, varchar } from 'drizzle-orm/mysql-core'; + +export const composite = mysqlTable('composite_example', { + id: int().notNull(), + name: varchar({ length: 8 }).notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = mysqlTable('unique_column_in_composite_of_two_0', { + id: int().notNull().unique(), + name: varchar({ length: 8 }).notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = mysqlTable('unique_column_in_composite_of_two_1', { + id: int().notNull(), + name: varchar({ length: 8 }).notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = mysqlTable('unique_column_in_composite_of_three_0', { + id: int().notNull().unique(), + name: varchar({ length: 8 }).notNull(), + slug: varchar({ length: 8 }).notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = mysqlTable('unique_column_in_composite_of_three_1', { + id: int().notNull(), + name: varchar({ length: 8 }).notNull(), + slug: varchar({ length: 8 }).notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/mysql/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/mysql/cyclicTables/cyclicTables.test.ts index 08fb7a0fe9..7483fa5205 100644 --- a/drizzle-seed/tests/mysql/cyclicTables/cyclicTables.test.ts +++ b/drizzle-seed/tests/mysql/cyclicTables/cyclicTables.test.ts @@ -1,76 +1,20 @@ -import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import type { Connection } from 'mysql2/promise'; -import { createConnection } from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; +import { mysqlTest as test } from '../instrumentation.ts'; import * as schema from './mysqlSchema.ts'; -let mysqlContainer: Docker.Container; -let client: Connection; -let db: MySql2Database; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = await createDockerDB(); - - const sleep = 1000; - let timeLeft = 40000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - db = drizzle(client); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; - await db.execute( - sql` + await db.execute( + sql` create table model ( id int not null @@ -79,10 +23,10 @@ beforeAll(async () => { defaultImageId int null ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` create table model_image ( id int not null @@ -94,19 +38,19 @@ beforeAll(async () => { foreign key (modelId) references model (id) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` alter table model add constraint model_defaultImageId_model_image_id_fk foreign key (defaultImageId) references model_image (id); `, - ); + ); - // 3 tables case - await db.execute( - sql` + // 3 tables case + await db.execute( + sql` create table model1 ( id int not null @@ -116,10 +60,10 @@ beforeAll(async () => { defaultImageId int null ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` create table model_image1 ( id int not null @@ -131,10 +75,10 @@ beforeAll(async () => { foreign key (modelId) references model1 (id) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` create table user ( id int not null @@ -148,27 +92,27 @@ beforeAll(async () => { foreign key (invitedBy) references user (id) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` alter table model1 add constraint model1_userId_user_id_fk foreign key (userId) references user (id); `, - ); -}); + ); + + resolveFunc(''); + } -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); + await promise; }); -afterEach(async () => { +test.afterEach(async ({ db }) => { await reset(db, schema); }); -test('2 cyclic tables test', async () => { +test('2 cyclic tables test', async ({ db }) => { await seed(db, { modelTable: schema.modelTable, modelImageTable: schema.modelImageTable, @@ -186,7 +130,7 @@ test('2 cyclic tables test', async () => { expect(predicate).toBe(true); }); -test('3 cyclic tables test', async () => { +test('3 cyclic tables test', async ({ db }) => { await seed(db, { modelTable1: schema.modelTable1, modelImageTable1: schema.modelImageTable1, diff --git a/drizzle-seed/tests/mysql/generatorsTest/generators.test.ts b/drizzle-seed/tests/mysql/generatorsTest/generators.test.ts index 2bef885daf..64cc7b154c 100644 --- a/drizzle-seed/tests/mysql/generatorsTest/generators.test.ts +++ b/drizzle-seed/tests/mysql/generatorsTest/generators.test.ts @@ -1,99 +1,42 @@ -import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import type { Connection } from 'mysql2/promise'; -import { createConnection } from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { seed } from '../../../src/index.ts'; +import { mysqlTest as test } from '../instrumentation.ts'; import * as schema from './mysqlSchema.ts'; -let mysqlContainer: Docker.Container; -let client: Connection; -let db: MySql2Database; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = await createDockerDB(); - - const sleep = 1000; - let timeLeft = 40000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - db = drizzle(client); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`datetime_table\` ( \`datetime\` datetime ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`year_table\` ( \`year\` year ); `, - ); -}); + ); -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); + resolveFunc(''); + } + + await promise; }); const count = 10000; - -test('datetime generator test', async () => { +test('datetime generator test', async ({ db }) => { await seed(db, { datetimeTable: schema.datetimeTable }).refine((funcs) => ({ datetimeTable: { count, @@ -110,7 +53,7 @@ test('datetime generator test', async () => { expect(predicate).toBe(true); }); -test('year generator test', async () => { +test('year generator test', async ({ db }) => { await seed(db, { yearTable: schema.yearTable }).refine((funcs) => ({ yearTable: { count, diff --git a/drizzle-seed/tests/mysql/instrumentation.ts b/drizzle-seed/tests/mysql/instrumentation.ts new file mode 100644 index 0000000000..a4eb4128a6 --- /dev/null +++ b/drizzle-seed/tests/mysql/instrumentation.ts @@ -0,0 +1,84 @@ +import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; +import type { AnyMySql2Connection } from 'drizzle-orm/mysql2'; +import { drizzle } from 'drizzle-orm/mysql2'; +import { createConnection } from 'mysql2/promise'; +import { test as base } from 'vitest'; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/mysql/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const prepareTest = () => { + return base.extend< + { + client: { + client: AnyMySql2Connection; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + db: MySqlDatabase; + push: (schema: any) => Promise; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + const envurl = process.env['MYSQL_CONNECTION_STRING']; + if (!envurl) throw new Error('No mysql url provided'); + const client = await createConnection({ + uri: envurl, + supportBigNumbers: true, + multipleStatements: true, + }); + await client.connect(); + await client.query('drop database if exists drizzle; create database drizzle; use drizzle;'); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res[0] as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await use({ client, query, batch }); + await client.end(); + client.destroy(); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = drizzle({ client: client.client as AnyMySql2Connection }); + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema); + + await use(push); + }, + { scope: 'worker' }, + ], + }); +}; + +export const mysqlTest = prepareTest(); +export type Test = ReturnType; diff --git a/drizzle-seed/tests/mysql/mysql.test.ts b/drizzle-seed/tests/mysql/mysql.test.ts index db1471293e..eac341b410 100644 --- a/drizzle-seed/tests/mysql/mysql.test.ts +++ b/drizzle-seed/tests/mysql/mysql.test.ts @@ -1,77 +1,21 @@ -import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { relations } from 'drizzle-orm/_relations'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import type { Connection } from 'mysql2/promise'; -import { createConnection } from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest'; +import { expect, vi } from 'vitest'; import { reset, seed } from '../../src/index.ts'; +import { mysqlTest as test } from './instrumentation.ts'; import * as schema from './mysqlSchema.ts'; -let mysqlContainer: Docker.Container; -let client: Connection; -let db: MySql2Database; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = await createDockerDB(); - - const sleep = 1000; - let timeLeft = 40000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - db = drizzle(client); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`customer\` ( \`id\` varchar(256) NOT NULL, \`company_name\` text NOT NULL, @@ -87,10 +31,10 @@ beforeAll(async () => { CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`order_detail\` ( \`unit_price\` float NOT NULL, \`quantity\` int NOT NULL, @@ -99,10 +43,10 @@ beforeAll(async () => { \`product_id\` int NOT NULL ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`employee\` ( \`id\` int NOT NULL, \`last_name\` text NOT NULL, @@ -123,10 +67,10 @@ beforeAll(async () => { CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`order\` ( \`id\` int NOT NULL, \`order_date\` timestamp NOT NULL, @@ -144,10 +88,10 @@ beforeAll(async () => { CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`product\` ( \`id\` int NOT NULL, \`name\` text NOT NULL, @@ -161,10 +105,10 @@ beforeAll(async () => { CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`supplier\` ( \`id\` int NOT NULL, \`company_name\` text NOT NULL, @@ -179,10 +123,10 @@ beforeAll(async () => { CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`users\` ( \`id\` int, \`name\` text, @@ -190,10 +134,10 @@ beforeAll(async () => { CONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`posts\` ( \`id\` int, \`name\` text, @@ -202,67 +146,67 @@ beforeAll(async () => { CONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_order_id_order_id_fk\` FOREIGN KEY (\`order_id\`) REFERENCES \`order\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_product_id_product_id_fk\` FOREIGN KEY (\`product_id\`) REFERENCES \`product\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`employee\` ADD CONSTRAINT \`employee_reports_to_employee_id_fk\` FOREIGN KEY (\`reports_to\`) REFERENCES \`employee\`(\`id\`) ON DELETE no action ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_customer_id_customer_id_fk\` FOREIGN KEY (\`customer_id\`) REFERENCES \`customer\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_employee_id_employee_id_fk\` FOREIGN KEY (\`employee_id\`) REFERENCES \`employee\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`product\` ADD CONSTRAINT \`product_supplier_id_supplier_id_fk\` FOREIGN KEY (\`supplier_id\`) REFERENCES \`supplier\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`users\` ADD CONSTRAINT \`users_invitedBy_users_id_fk\` FOREIGN KEY (\`invitedBy\`) REFERENCES \`users\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` ALTER TABLE \`posts\` ADD CONSTRAINT \`posts_userId_users_id_fk\` FOREIGN KEY (\`userId\`) REFERENCES \`users\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, - ); -}); + ); + + resolveFunc(''); + } -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); + await promise; }); -afterEach(async () => { +test.afterEach(async ({ db }) => { await reset(db, schema); }); -test('basic seed test', async () => { +test('basic seed test', async ({ db }) => { await seed(db, schema); const customers = await db.select().from(schema.customers); @@ -280,7 +224,7 @@ test('basic seed test', async () => { expect(suppliers.length).toBe(10); }); -test('seed with options.count:11 test', async () => { +test('seed with options.count:11 test', async ({ db }) => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); @@ -298,7 +242,7 @@ test('seed with options.count:11 test', async () => { expect(suppliers.length).toBe(11); }); -test('redefine(refine) customers count', async () => { +test('redefine(refine) customers count', async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, @@ -320,7 +264,7 @@ test('redefine(refine) customers count', async () => { expect(suppliers.length).toBe(11); }); -test('redefine(refine) all tables count', async () => { +test('redefine(refine) all tables count', async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, @@ -357,7 +301,7 @@ test('redefine(refine) all tables count', async () => { expect(suppliers.length).toBe(17); }); -test("redefine(refine) orders count using 'with' in customers", async () => { +test("redefine(refine) orders count using 'with' in customers", async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, @@ -385,7 +329,7 @@ test("redefine(refine) orders count using 'with' in customers", async () => { expect(suppliers.length).toBe(11); }); -test("sequential using of 'with'", async () => { +test("sequential using of 'with'", async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, @@ -416,7 +360,7 @@ test("sequential using of 'with'", async () => { expect(suppliers.length).toBe(11); }); -test('overlapping a foreign key constraint with a one-to-many relation', async () => { +test('overlapping a foreign key constraint with a one-to-many relation', async ({ db }) => { const postsRelation = relations(schema.posts, ({ one }) => ({ user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), })); diff --git a/drizzle-seed/tests/mysql/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/mysql/softRelationsTest/softRelations.test.ts index 7f61b80eb0..28a31a398e 100644 --- a/drizzle-seed/tests/mysql/softRelationsTest/softRelations.test.ts +++ b/drizzle-seed/tests/mysql/softRelationsTest/softRelations.test.ts @@ -1,76 +1,20 @@ -import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import type { Connection } from 'mysql2/promise'; -import { createConnection } from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; +import { mysqlTest as test } from '../instrumentation.ts'; import * as schema from './mysqlSchema.ts'; -let mysqlContainer: Docker.Container; -let client: Connection; -let db: MySql2Database; - -async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = await createDockerDB(); - - const sleep = 1000; - let timeLeft = 40000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await createConnection(connectionString); - await client.connect(); - db = drizzle(client); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`customer\` ( \`id\` varchar(256) NOT NULL, \`company_name\` text NOT NULL, @@ -86,10 +30,10 @@ beforeAll(async () => { CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`order_detail\` ( \`unit_price\` float NOT NULL, \`quantity\` int NOT NULL, @@ -98,10 +42,10 @@ beforeAll(async () => { \`product_id\` int NOT NULL ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`employee\` ( \`id\` int NOT NULL, \`last_name\` text NOT NULL, @@ -122,10 +66,10 @@ beforeAll(async () => { CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`order\` ( \`id\` int NOT NULL, \`order_date\` timestamp NOT NULL, @@ -143,10 +87,10 @@ beforeAll(async () => { CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`product\` ( \`id\` int NOT NULL, \`name\` text NOT NULL, @@ -160,10 +104,10 @@ beforeAll(async () => { CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) ); `, - ); + ); - await db.execute( - sql` + await db.execute( + sql` CREATE TABLE \`supplier\` ( \`id\` int NOT NULL, \`company_name\` text NOT NULL, @@ -178,15 +122,15 @@ beforeAll(async () => { CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) ); `, - ); -}); + ); + + resolveFunc(''); + } -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); + await promise; }); -afterEach(async () => { +test.afterEach(async ({ db }) => { await reset(db, schema); }); @@ -230,7 +174,7 @@ const checkSoftRelations = ( expect(detailsPredicate2).toBe(true); }; -test('basic seed, soft relations test', async () => { +test('basic seed, soft relations test', async ({ db }) => { await seed(db, schema); const customers = await db.select().from(schema.customers); @@ -250,7 +194,7 @@ test('basic seed, soft relations test', async () => { checkSoftRelations(customers, details, employees, orders, products, suppliers); }); -test("redefine(refine) orders count using 'with' in customers, soft relations test", async () => { +test("redefine(refine) orders count using 'with' in customers, soft relations test", async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, @@ -280,7 +224,7 @@ test("redefine(refine) orders count using 'with' in customers, soft relations te checkSoftRelations(customers, details, employees, orders, products, suppliers); }); -test("sequential using of 'with', soft relations test", async () => { +test("sequential using of 'with', soft relations test", async ({ db }) => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, diff --git a/drizzle-seed/tests/northwind/mysqlTest.ts b/drizzle-seed/tests/northwind/mysqlTest.ts index 1cbdb77044..848ee8d6fd 100644 --- a/drizzle-seed/tests/northwind/mysqlTest.ts +++ b/drizzle-seed/tests/northwind/mysqlTest.ts @@ -20,7 +20,7 @@ const mysqlPool = mysql.createPool({ // ssl: { rejectUnauthorized: false } }); -const db = drizzle(mysqlPool); +const db = drizzle({ client: mysqlPool }); console.log('database connection was established successfully.'); diff --git a/drizzle-seed/tests/northwind/pgTest.ts b/drizzle-seed/tests/northwind/pgTest.ts index 84c366b6c9..ac56dbb308 100644 --- a/drizzle-seed/tests/northwind/pgTest.ts +++ b/drizzle-seed/tests/northwind/pgTest.ts @@ -19,7 +19,7 @@ const pgPool = new PgPool({ // ssl: true }); -const db = drizzle(pgPool); +const db = drizzle({ client: pgPool }); console.log('database connection was established successfully.'); diff --git a/drizzle-seed/tests/northwind/sqliteSchema.ts b/drizzle-seed/tests/northwind/sqliteSchema.ts index fa00dd3651..494893ea05 100644 --- a/drizzle-seed/tests/northwind/sqliteSchema.ts +++ b/drizzle-seed/tests/northwind/sqliteSchema.ts @@ -34,12 +34,10 @@ export const employees = sqliteTable( reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, - (table) => ({ - reportsToFk: foreignKey(() => ({ - columns: [table.reportsTo], - foreignColumns: [table.id], - })), - }), + (table) => [foreignKey({ + columns: [table.reportsTo], + foreignColumns: [table.id], + })], ); export const orders = sqliteTable('order', { diff --git a/drizzle-seed/tests/northwind/sqliteTest.ts b/drizzle-seed/tests/northwind/sqliteTest.ts index 0267bc2887..096ab896f8 100644 --- a/drizzle-seed/tests/northwind/sqliteTest.ts +++ b/drizzle-seed/tests/northwind/sqliteTest.ts @@ -10,7 +10,7 @@ import * as schema from './sqliteSchema.ts'; const { Sqlite_PATH } = process.env; const sqliteDb = betterSqlite3(Sqlite_PATH); -const db = drizzle(sqliteDb); +const db = drizzle({ client: sqliteDb }); console.log('database connection was established successfully.'); diff --git a/drizzle-seed/tests/pg/allDataTypesTest/pgPostgisSchema.ts b/drizzle-seed/tests/pg/allDataTypesTest/pgPostgisSchema.ts new file mode 100644 index 0000000000..74c0904b59 --- /dev/null +++ b/drizzle-seed/tests/pg/allDataTypesTest/pgPostgisSchema.ts @@ -0,0 +1,9 @@ +import { geometry, pgTable } from 'drizzle-orm/pg-core'; + +export const allDataTypes = pgTable('postgis_data_types', { + geometry: geometry('geometry', { type: 'point', mode: 'tuple', srid: 0 }), +}); + +export const allArrayDataTypes = pgTable('postgis_array_data_types', { + geometryArray: geometry('geometry_array', { type: 'point', mode: 'tuple', srid: 0 }).array(1), +}); diff --git a/drizzle-seed/tests/pg/allDataTypesTest/pgSchema.ts b/drizzle-seed/tests/pg/allDataTypesTest/pgSchema.ts index 16a55baf4d..9a48137a75 100644 --- a/drizzle-seed/tests/pg/allDataTypesTest/pgSchema.ts +++ b/drizzle-seed/tests/pg/allDataTypesTest/pgSchema.ts @@ -1,11 +1,13 @@ import { bigint, bigserial, + bit, boolean, char, date, decimal, doublePrecision, + inet, integer, interval, json, @@ -13,7 +15,7 @@ import { line, numeric, pgEnum, - pgSchema, + pgTable, point, real, serial, @@ -24,13 +26,12 @@ import { timestamp, uuid, varchar, + vector, } from 'drizzle-orm/pg-core'; -export const schema = pgSchema('seeder_lib_pg'); - export const moodEnum = pgEnum('mood_enum', ['sad', 'ok', 'happy']); -export const allDataTypes = schema.table('all_data_types', { +export const allDataTypes = pgTable('all_data_types', { integer: integer('integer'), smallint: smallint('smallint'), biginteger: bigint('bigint', { mode: 'bigint' }), @@ -39,14 +40,15 @@ export const allDataTypes = schema.table('all_data_types', { smallserial: smallserial('smallserial'), bigserial: bigserial('bigserial', { mode: 'bigint' }), bigserialNumber: bigserial('bigserial_number', { mode: 'number' }), - boolean: boolean('boolean'), - text: text('text'), - varchar: varchar('varchar', { length: 256 }), - char: char('char', { length: 256 }), numeric: numeric('numeric'), decimal: decimal('decimal'), real: real('real'), doublePrecision: doublePrecision('double_precision'), + boolean: boolean('boolean'), + text: text('text'), + char: char('char', { length: 256 }), + varchar: varchar('varchar', { length: 256 }), + bit: bit('bit', { dimensions: 11 }), json: json('json'), jsonb: jsonb('jsonb'), time: time('time'), @@ -61,21 +63,25 @@ export const allDataTypes = schema.table('all_data_types', { lineTuple: line('line_tuple', { mode: 'tuple' }), moodEnum: moodEnum('mood_enum'), uuid: uuid('uuid'), + inet: inet('inet'), + // geometry: geometry('geometry', { type: 'point', mode: 'tuple', srid: 0 }), + vector: vector('vector', { dimensions: 3 }), }); -export const allArrayDataTypes = schema.table('all_array_data_types', { +export const allArrayDataTypes = pgTable('all_array_data_types', { integerArray: integer('integer_array').array(), smallintArray: smallint('smallint_array').array(), bigintegerArray: bigint('bigint_array', { mode: 'bigint' }).array(), bigintNumberArray: bigint('bigint_number_array', { mode: 'number' }).array(), - booleanArray: boolean('boolean_array').array(), - textArray: text('text_array').array(), - varcharArray: varchar('varchar_array', { length: 256 }).array(), - charArray: char('char_array', { length: 256 }).array(), numericArray: numeric('numeric_array').array(), decimalArray: decimal('decimal_array').array(), realArray: real('real_array').array(), doublePrecisionArray: doublePrecision('double_precision_array').array(), + booleanArray: boolean('boolean_array').array(), + charArray: char('char_array', { length: 256 }).array(), + varcharArray: varchar('varchar_array', { length: 256 }).array(), + textArray: text('text_array').array(), + bitArray: bit('bit_array', { dimensions: 11 }).array(), jsonArray: json('json_array').array(), jsonbArray: jsonb('jsonb_array').array(), timeArray: time('time_array').array(), @@ -89,16 +95,19 @@ export const allArrayDataTypes = schema.table('all_array_data_types', { lineArray: line('line_array', { mode: 'abc' }).array(), lineTupleArray: line('line_tuple_array', { mode: 'tuple' }).array(), moodEnumArray: moodEnum('mood_enum_array').array(), + uuidArray: uuid('uuid_array').array(), + inetArray: inet('inet_array').array(), + // geometryArray: geometry('geometry_array', { type: 'point', mode: 'tuple', srid: 0 }).array(1), }); -export const ndArrays = schema.table('nd_arrays', { +export const ndArrays = pgTable('nd_arrays', { integer1DArray: integer('integer_1d_array').array(3), integer2DArray: integer('integer_2d_array').array(3).array(4), integer3DArray: integer('integer_3d_array').array(3).array(4).array(5), integer4DArray: integer('integer_4d_array').array(3).array(4).array(5).array(6), }); -export const intervals = schema.table('intervals', { +export const intervals = pgTable('intervals', { intervalYear: interval({ fields: 'year' }), intervalYearToMonth: interval({ fields: 'year to month' }), intervalMonth: interval({ fields: 'month' }), diff --git a/drizzle-seed/tests/pg/allDataTypesTest/pg_all_data_types.test.ts b/drizzle-seed/tests/pg/allDataTypesTest/pg_all_data_types.test.ts index 62d0895c0e..b434dcf1f8 100644 --- a/drizzle-seed/tests/pg/allDataTypesTest/pg_all_data_types.test.ts +++ b/drizzle-seed/tests/pg/allDataTypesTest/pg_all_data_types.test.ts @@ -1,4 +1,5 @@ import { PGlite } from '@electric-sql/pglite'; +import { vector } from '@electric-sql/pglite/vector'; import { sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; @@ -10,16 +11,18 @@ let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { - client = new PGlite(); + client = new PGlite({ + extensions: { vector }, + }); - db = drizzle(client); + await client.query(`CREATE EXTENSION vector;`); - await db.execute(sql`CREATE SCHEMA if not exists "seeder_lib_pg";`); + db = drizzle({ client }); await db.execute( sql` DO $$ BEGIN - CREATE TYPE "seeder_lib_pg"."mood_enum" AS ENUM('sad', 'ok', 'happy'); + CREATE TYPE "mood_enum" AS ENUM('sad', 'ok', 'happy'); EXCEPTION WHEN duplicate_object THEN null; END $$; @@ -28,7 +31,7 @@ beforeAll(async () => { await db.execute( sql` - CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_data_types" ( + CREATE TABLE IF NOT EXISTS "all_data_types" ( "integer" integer, "smallint" smallint, "bigint" bigint, @@ -37,14 +40,15 @@ beforeAll(async () => { "smallserial" smallserial, "bigserial" bigserial, "bigserial_number" bigserial, - "boolean" boolean, - "text" text, - "varchar" varchar(256), - "char" char(256), "numeric" numeric, "decimal" numeric, "real" real, "double_precision" double precision, + "boolean" boolean, + "char" char(256), + "varchar" varchar(256), + "text" text, + "bit" bit(11), "json" json, "jsonb" jsonb, "time" time, @@ -57,27 +61,30 @@ beforeAll(async () => { "point_tuple" "point", "line" "line", "line_tuple" "line", - "mood_enum" "seeder_lib_pg"."mood_enum", - "uuid" "uuid" + "mood_enum" "mood_enum", + "uuid" "uuid", + "inet" inet, + "vector" vector(3) ); `, ); await db.execute( sql` - CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_array_data_types" ( + CREATE TABLE IF NOT EXISTS "all_array_data_types" ( "integer_array" integer[], "smallint_array" smallint[], "bigint_array" bigint[], "bigint_number_array" bigint[], - "boolean_array" boolean[], - "text_array" text[], - "varchar_array" varchar(256)[], - "char_array" char(256)[], "numeric_array" numeric[], "decimal_array" numeric[], "real_array" real[], "double_precision_array" double precision[], + "boolean_array" boolean[], + "char_array" char(256)[], + "varchar_array" varchar(256)[], + "text_array" text[], + "bit_array" bit(11)[], "json_array" json[], "jsonb_array" jsonb[], "time_array" time[], @@ -90,14 +97,16 @@ beforeAll(async () => { "point_tuple_array" "point"[], "line_array" "line"[], "line_tuple_array" "line"[], - "mood_enum_array" "seeder_lib_pg"."mood_enum"[] + "mood_enum_array" "mood_enum"[], + "uuid_array" uuid[], + "inet_array" inet[] ); `, ); await db.execute( sql` - CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."nd_arrays" ( + CREATE TABLE IF NOT EXISTS "nd_arrays" ( "integer_1d_array" integer[3], "integer_2d_array" integer[3][4], "integer_3d_array" integer[3][4][5], @@ -108,7 +117,7 @@ beforeAll(async () => { await db.execute( sql` - CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."intervals" ( + CREATE TABLE IF NOT EXISTS "intervals" ( "intervalYear" interval year, "intervalYearToMonth" interval year to month, "intervalMonth" interval month, diff --git a/drizzle-seed/tests/pg/allDataTypesTest/postgis_data_types.test.ts b/drizzle-seed/tests/pg/allDataTypesTest/postgis_data_types.test.ts new file mode 100644 index 0000000000..0454079410 --- /dev/null +++ b/drizzle-seed/tests/pg/allDataTypesTest/postgis_data_types.test.ts @@ -0,0 +1,47 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { seed } from '../../../src/index.ts'; +import { pgPostgisTest as test } from '../instrumentation.ts'; +import * as schema from './pgPostgisSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute(sql`CREATE EXTENSION IF NOT EXISTS postgis;`); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "postgis_data_types" ( + "geometry" geometry(point, 0) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "postgis_array_data_types" ( + "geometry_array" geometry(point, 0)[] + ); + `, + ); + + resolveFunc(''); + } + await promise; +}); + +test('postgis data types test', async ({ db }) => { + await seed(db, { allDataTypes: schema.allDataTypes }, { count: 10000 }); + + const allDataTypes = await db.select().from(schema.allDataTypes); + // every value in each rows does not equal undefined. + const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/pg/compositeUniqueKey/pg.test.ts b/drizzle-seed/tests/pg/compositeUniqueKey/pg.test.ts new file mode 100644 index 0000000000..8ef02ac3fa --- /dev/null +++ b/drizzle-seed/tests/pg/compositeUniqueKey/pg.test.ts @@ -0,0 +1,197 @@ +import { PGlite } from '@electric-sql/pglite'; +import { sql } from 'drizzle-orm'; +import type { PgliteDatabase } from 'drizzle-orm/pglite'; +import { drizzle } from 'drizzle-orm/pglite'; +import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import * as schema from './pgSchema.ts'; + +let client: PGlite; +let db: PgliteDatabase; + +beforeAll(async () => { + client = new PGlite(); + + db = drizzle({ client }); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "composite_example" ( + "id" integer not null, + "name" text not null, + CONSTRAINT "composite_example_id_name_unique" UNIQUE("id","name"), + CONSTRAINT "custom_name" UNIQUE("id","name") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_two_0" ( + "id" integer not null unique, + "name" text not null, + CONSTRAINT "custom_name0" UNIQUE("id","name") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_two_1" ( + "id" integer not null, + "name" text not null, + CONSTRAINT "custom_name1" UNIQUE("id","name"), + CONSTRAINT "custom_name1_id" UNIQUE("id") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_three_0" ( + "id" integer not null unique, + "name" text not null, + "slug" text not null, + CONSTRAINT "custom_name2" UNIQUE("id","name","slug") + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "unique_column_in_composite_of_three_1" ( + "id" integer not null, + "name" text not null, + "slug" text not null, + CONSTRAINT "custom_name3" UNIQUE("id","name","slug"), + CONSTRAINT "custom_name3_id" UNIQUE("id") + ); + `, + ); +}); + +afterEach(async () => { + await reset(db, schema); +}); + +afterAll(async () => { + await client.close(); +}); + +test('basic seed test', async () => { + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async () => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async () => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/pg/compositeUniqueKey/pgSchema.ts b/drizzle-seed/tests/pg/compositeUniqueKey/pgSchema.ts new file mode 100644 index 0000000000..8e70ca4c6a --- /dev/null +++ b/drizzle-seed/tests/pg/compositeUniqueKey/pgSchema.ts @@ -0,0 +1,40 @@ +import { integer, pgTable, text, unique, varchar } from 'drizzle-orm/pg-core'; + +export const composite = pgTable('composite_example', { + id: integer('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = pgTable('unique_column_in_composite_of_two_0', { + id: integer('id').notNull().unique(), + name: varchar('name', { length: 8 }).notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = pgTable('unique_column_in_composite_of_two_1', { + id: integer('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = pgTable('unique_column_in_composite_of_three_0', { + id: integer('id').notNull().unique(), + name: text('name').notNull(), + slug: varchar('slug').notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = pgTable('unique_column_in_composite_of_three_1', { + id: integer('id').notNull(), + name: text('name').notNull(), + slug: varchar('slug').notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/pg/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/pg/cyclicTables/cyclicTables.test.ts index c4be3509e8..eaf57f209e 100644 --- a/drizzle-seed/tests/pg/cyclicTables/cyclicTables.test.ts +++ b/drizzle-seed/tests/pg/cyclicTables/cyclicTables.test.ts @@ -12,7 +12,7 @@ let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); - db = drizzle(client); + db = drizzle({ client }); await db.execute( sql` diff --git a/drizzle-seed/tests/pg/generatorsTest/generators.test.ts b/drizzle-seed/tests/pg/generatorsTest/generators.test.ts index 3de2ce99ec..b049b5d874 100644 --- a/drizzle-seed/tests/pg/generatorsTest/generators.test.ts +++ b/drizzle-seed/tests/pg/generatorsTest/generators.test.ts @@ -1,6 +1,7 @@ import { afterAll, beforeAll, expect, test } from 'vitest'; import { PGlite } from '@electric-sql/pglite'; +import { vector } from '@electric-sql/pglite/vector'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; @@ -17,9 +18,11 @@ let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { - client = new PGlite(); + client = new PGlite({ extensions: { vector } }); - db = drizzle(client); + await client.query('CREATE EXTENSION IF NOT EXISTS vector;'); + + db = drizzle({ client }); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); @@ -631,6 +634,106 @@ beforeAll(async () => { ); `, ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."bit_string_table" ( + "bit" bit(12) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."bit_string_unique_table" ( + "bit" bit(12) unique + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."bit_string_array_table" ( + "bit" bit(12)[] + ); + `, + ); + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."inet_table" ( + "inet" inet + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."inet_unique_table" ( + "inet" inet unique + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."inet_array_table" ( + "inet" inet[] + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."vector_table" ( + "vector" vector(12) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."vector_unique_table" ( + "vector" vector(12) unique + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."vector_array_table" ( + "vector" vector(12)[] + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."composite_unique_key_table" ( + "number" real, + "int" integer, + "interval" interval, + "string" varchar(256), + "first_name" varchar(256), + "last_name" varchar(256), + "full_name" varchar(256), + "country" varchar(256), + "city" varchar(256), + "street_address" varchar(256), + "postcode" varchar(256), + "company_name" varchar(256), + "phone_number" varchar(256), + "email" varchar(256), + "uuid" uuid, + "bit" bit(12), + "inet" inet, + "vector" vector(12), + "values_from_array" varchar(256), + -- "point" "point", + -- "line" "line", + CONSTRAINT "custom_name" UNIQUE("number","int","interval","string","first_name","last_name","full_name","country","city","street_address","postcode","company_name","phone_number","email","uuid","bit","inet","vector","values_from_array") + ); + `, + ); }); afterAll(async () => { @@ -727,8 +830,8 @@ test('valuesFromArray unique generator test', async () => { }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ - { values: lastNames.slice(0, 14920), weight: 0.3 }, - { values: lastNames.slice(14920), weight: 0.7 }, + { values: lastNames.slice(0, 14894), weight: 0.3 }, + { values: lastNames.slice(14894), weight: 0.7 }, ], isUnique: true, }), @@ -2095,3 +2198,238 @@ test('uuid array generator test', async () => { && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); + +test('bitString generator test', async () => { + await reset(db, { bitStringTable: schema.bitStringTable }); + await seed(db, { bitStringTable: schema.bitStringTable }).refine((funcs) => ({ + bitStringTable: { + count, + columns: { + bit: funcs.bitString({ + dimensions: 12, + }), + }, + }, + })); + + const data = await db.select().from(schema.bitStringTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('bitString unique generator test', async () => { + await reset(db, { bitStringUniqueTable: schema.bitStringUniqueTable }); + await seed(db, { bitStringUniqueTable: schema.bitStringUniqueTable }).refine((funcs) => ({ + bitStringUniqueTable: { + count, + columns: { + bit: funcs.bitString({ + isUnique: true, + dimensions: 12, + }), + }, + }, + })); + + const data = await db.select().from(schema.bitStringUniqueTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('bitString array generator test', async () => { + await reset(db, { bitStringArrayTable: schema.bitStringArrayTable }); + await seed(db, { bitStringArrayTable: schema.bitStringArrayTable }).refine((funcs) => ({ + bitStringArrayTable: { + count, + columns: { + bit: funcs.bitString({ + arraySize: 4, + dimensions: 12, + }), + }, + }, + })); + + const data = await db.select().from(schema.bitStringArrayTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('inet generator test', async () => { + await reset(db, { inetTable: schema.inetTable }); + await seed(db, { inetTable: schema.inetTable }).refine((funcs) => ({ + inetTable: { + count, + columns: { + inet: funcs.inet({ + ipAddress: 'ipv4', + includeCidr: true, + }), + }, + }, + })); + + const data = await db.select().from(schema.inetTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('inet unique generator test', async () => { + await reset(db, { inetUniqueTable: schema.inetUniqueTable }); + await seed(db, { inetUniqueTable: schema.inetUniqueTable }).refine((funcs) => ({ + inetUniqueTable: { + count, + columns: { + inet: funcs.inet({ + isUnique: true, + ipAddress: 'ipv4', + includeCidr: true, + }), + }, + }, + })); + + const data = await db.select().from(schema.inetUniqueTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('inet array generator test', async () => { + await reset(db, { inetArrayTable: schema.inetArrayTable }); + await seed(db, { inetArrayTable: schema.inetArrayTable }).refine((funcs) => ({ + inetArrayTable: { + count, + columns: { + inet: funcs.inet({ + arraySize: 4, + ipAddress: 'ipv4', + includeCidr: true, + }), + }, + }, + })); + + const data = await db.select().from(schema.inetArrayTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('vector generator test', async () => { + await reset(db, { vectorTable: schema.vectorTable }); + await seed(db, { vectorTable: schema.vectorTable }).refine((funcs) => ({ + vectorTable: { + count, + columns: { + vector: funcs.vector({ + decimalPlaces: 5, + dimensions: 12, + minValue: -100, + maxValue: 100, + }), + }, + }, + })); + + const data = await db.select().from(schema.vectorTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('vector unique generator test', async () => { + await reset(db, { vectorUniqueTable: schema.vectorUniqueTable }); + await seed(db, { vectorUniqueTable: schema.vectorUniqueTable }).refine((funcs) => ({ + vectorUniqueTable: { + count, + columns: { + vector: funcs.vector({ + isUnique: true, + decimalPlaces: 5, + dimensions: 12, + minValue: -100, + maxValue: 100, + }), + }, + }, + })); + + const data = await db.select().from(schema.vectorUniqueTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('vector array generator test', async () => { + await reset(db, { vectorArrayTable: schema.vectorArrayTable }); + await seed(db, { vectorArrayTable: schema.vectorArrayTable }).refine((funcs) => ({ + vectorArrayTable: { + count, + columns: { + vector: funcs.vector({ + arraySize: 4, + decimalPlaces: 5, + dimensions: 12, + minValue: -100, + maxValue: 100, + }), + }, + }, + })); + + const data = await db.select().from(schema.vectorArrayTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('composite unique key generator test', async () => { + await reset(db, { compositeUniqueKeyTable: schema.compositeUniqueKeyTable }); + await seed(db, { compositeUniqueKeyTable: schema.compositeUniqueKeyTable }, { count: 10000 }).refine((funcs) => ({ + compositeUniqueKeyTable: { + columns: { + number: funcs.number(), + int: funcs.int(), + interval: funcs.interval(), + string: funcs.string(), + firstName: funcs.firstName(), + lastName: funcs.lastName(), + fullName: funcs.fullName(), + country: funcs.country(), + city: funcs.city(), + streetAddress: funcs.streetAddress(), + postcode: funcs.postcode(), + companyName: funcs.companyName(), + phoneNumber: funcs.phoneNumber(), + email: funcs.email(), + uuid: funcs.uuid(), + bit: funcs.bitString(), + inet: funcs.inet(), + vector: funcs.vector(), + valuesFromArray: funcs.valuesFromArray({ values: Array.from({ length: 20 }, (_, i) => String(i + 1)) }), + // point: funcs.point(), + // line: funcs.line(), + }, + }, + })); + + const data = await db.select().from(schema.compositeUniqueKeyTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/pg/generatorsTest/pgPostgisSchema.ts b/drizzle-seed/tests/pg/generatorsTest/pgPostgisSchema.ts new file mode 100644 index 0000000000..b150c3f1de --- /dev/null +++ b/drizzle-seed/tests/pg/generatorsTest/pgPostgisSchema.ts @@ -0,0 +1,24 @@ +import { geometry, integer, pgTable, unique } from 'drizzle-orm/pg-core'; + +export const geometryTable = pgTable('geometry_table', { + geometryPointTuple: geometry('geometry_point_tuple', { type: 'point', mode: 'tuple', srid: 0 }), + geometryPointXy: geometry('geometry_point_xy', { type: 'point', mode: 'xy', srid: 0 }), +}); +export const geometryUniqueTable = pgTable('geometry_unique_table', { + geometryPointTuple: geometry('geometry_point_tuple', { type: 'point', mode: 'tuple', srid: 0 }).unique(), + geometryPointXy: geometry('geometry_point_xy', { type: 'point', mode: 'xy', srid: 0 }).unique(), +}); +export const geometryArrayTable = pgTable('geometry_array_table', { + geometryPointTuple: geometry('geometry_point_tuple', { type: 'point', mode: 'tuple', srid: 0 }).array(), + geometryPointXy: geometry('geometry_point_xy', { type: 'point', mode: 'xy', srid: 0 }).array(), +}); + +export const compositeUniqueKeyTable = pgTable('composite_unique_key_table', { + id: integer('id'), + geometryPoint: geometry('geometry_point', { type: 'point' }), +}, (table) => [ + unique().on( + table.id, + table.geometryPoint, + ), +]); diff --git a/drizzle-seed/tests/pg/generatorsTest/pgSchema.ts b/drizzle-seed/tests/pg/generatorsTest/pgSchema.ts index 48902ac6e3..a5b9aaeb97 100644 --- a/drizzle-seed/tests/pg/generatorsTest/pgSchema.ts +++ b/drizzle-seed/tests/pg/generatorsTest/pgSchema.ts @@ -1,6 +1,8 @@ import { + bit, boolean, date, + inet, integer, interval, json, @@ -11,8 +13,10 @@ import { text, time, timestamp, + unique, uuid, varchar, + vector, } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); @@ -320,3 +324,81 @@ export const uuidTable = schema.table('uuid_table', { export const uuidArrayTable = schema.table('uuid_array_table', { uuid: uuid('uuid').array(), }); + +export const bitStringTable = schema.table('bit_string_table', { + bit: bit('bit', { dimensions: 12 }), +}); +export const bitStringUniqueTable = schema.table('bit_string_unique_table', { + bit: bit('bit', { dimensions: 12 }).unique(), +}); +export const bitStringArrayTable = schema.table('bit_string_array_table', { + bit: bit('bit', { dimensions: 12 }).array(), +}); + +export const inetTable = schema.table('inet_table', { + inet: inet('inet'), +}); +export const inetUniqueTable = schema.table('inet_unique_table', { + inet: inet('inet').unique(), +}); +export const inetArrayTable = schema.table('inet_array_table', { + inet: inet('inet').array(), +}); + +export const vectorTable = schema.table('vector_table', { + vector: vector('vector', { dimensions: 12 }), +}); +export const vectorUniqueTable = schema.table('vector_unique_table', { + vector: vector('vector', { dimensions: 12 }).unique(), +}); +export const vectorArrayTable = schema.table('vector_array_table', { + vector: vector('vector', { dimensions: 12 }).array(), +}); + +export const compositeUniqueKeyTable = schema.table('composite_unique_key_table', { + number: real('number'), + int: integer('int'), + interval: interval('interval'), + string: varchar('string', { length: 256 }), + firstName: varchar('first_name', { length: 256 }), + lastName: varchar('last_name', { length: 256 }), + fullName: varchar('full_name', { length: 256 }), + country: varchar('country', { length: 256 }), + city: varchar('city', { length: 256 }), + streetAddress: varchar('street_address', { length: 256 }), + postcode: varchar('postcode', { length: 256 }), + companyName: varchar('company_name', { length: 256 }), + phoneNumber: varchar('phone_number', { length: 256 }), + email: varchar('email', { length: 256 }), + uuid: uuid('uuid'), + bit: bit('bit', { dimensions: 12 }), + inet: inet('inet'), + vector: vector('vector', { dimensions: 12 }), + valuesFromArray: varchar('values_from_array', { length: 256 }), + // point: point('point'), + // line: line('line'), +}, (table) => [ + unique().on( + table.number, + table.int, + table.interval, + table.string, + table.firstName, + table.lastName, + table.fullName, + table.country, + table.city, + table.streetAddress, + table.postcode, + table.companyName, + table.phoneNumber, + table.email, + table.uuid, + table.bit, + table.inet, + table.vector, + table.valuesFromArray, + // table.point, + // table.line, + ), +]); diff --git a/drizzle-seed/tests/pg/generatorsTest/postgisGenerators.test.ts b/drizzle-seed/tests/pg/generatorsTest/postgisGenerators.test.ts new file mode 100644 index 0000000000..322abab317 --- /dev/null +++ b/drizzle-seed/tests/pg/generatorsTest/postgisGenerators.test.ts @@ -0,0 +1,162 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { pgPostgisTest as test } from '../instrumentation.ts'; +import * as schema from './pgPostgisSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "geometry_table" ( + "geometry_point_tuple" geometry(point, 0), + "geometry_point_xy" geometry(point, 0) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "geometry_unique_table" ( + "geometry_point_tuple" geometry(point, 0) unique, + "geometry_point_xy" geometry(point, 0) unique + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "geometry_array_table" ( + "geometry_point_tuple" geometry(point, 0)[], + "geometry_point_xy" geometry(point, 0)[] + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE IF NOT EXISTS "composite_unique_key_table" ( + "id" integer, + "geometry_point" geometry(point, 0), + CONSTRAINT "custom_name" UNIQUE("id","geometry_point") + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +const count = 1000; + +test('geometry generator test', async ({ db }) => { + await reset(db, { geometryTable: schema.geometryTable }); + await seed(db, { geometryTable: schema.geometryTable }).refine((funcs) => ({ + geometryTable: { + count, + columns: { + geometryPointTuple: funcs.geometry({ + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + geometryPointXy: funcs.geometry({ + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + }, + }, + })); + + const data = await db.select().from(schema.geometryTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('geometry unique generator test', async ({ db }) => { + await reset(db, { geometryUniqueTable: schema.geometryUniqueTable }); + await seed(db, { geometryUniqueTable: schema.geometryUniqueTable }).refine((funcs) => ({ + geometryUniqueTable: { + count, + columns: { + geometryPointTuple: funcs.geometry({ + isUnique: true, + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + geometryPointXy: funcs.geometry({ + isUnique: true, + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + }, + }, + })); + + const data = await db.select().from(schema.geometryUniqueTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('geometry array generator test', async ({ db }) => { + await reset(db, { geometryArrayTable: schema.geometryArrayTable }); + await seed(db, { geometryArrayTable: schema.geometryArrayTable }).refine((funcs) => ({ + geometryArrayTable: { + count, + columns: { + geometryPointTuple: funcs.geometry({ + arraySize: 1, + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + geometryPointXy: funcs.geometry({ + arraySize: 1, + type: 'point', + srid: 4326, + decimalPlaces: 5, + }), + }, + }, + })); + + const data = await db.select().from(schema.geometryArrayTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('composite unique key generator test', async ({ db }) => { + await reset(db, { compositeUniqueKeyTable: schema.compositeUniqueKeyTable }); + await seed(db, { compositeUniqueKeyTable: schema.compositeUniqueKeyTable }, { count: 10000 }).refine((funcs) => ({ + compositeUniqueKeyTable: { + columns: { + id: funcs.int(), + geometryPoint: funcs.geometry({ type: 'point', srid: 4326 }), + }, + }, + })); + + const data = await db.select().from(schema.compositeUniqueKeyTable); + // every value in each row does not equal undefined. + const predicate = data.length !== 0 + && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/pg/instrumentation.ts b/drizzle-seed/tests/pg/instrumentation.ts new file mode 100644 index 0000000000..523125c213 --- /dev/null +++ b/drizzle-seed/tests/pg/instrumentation.ts @@ -0,0 +1,82 @@ +import { drizzle } from 'drizzle-orm/node-postgres'; +import type { PgDatabase } from 'drizzle-orm/pg-core'; +import { Client } from 'pg'; +import { test as base } from 'vitest'; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/pg/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const prepareTest = () => { + return base.extend< + { + client: { + client: Client; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + db: PgDatabase; + push: (schema: any) => Promise; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + const envurl = process.env['PG_POSTGIS_CONNECTION_STRING']; + if (!envurl) throw new Error('No pg_postgis url provided'); + + const client = new Client(envurl); + await client.connect(); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await query('drop schema if exists public cascade;'); + await query('create schema public;'); + await query('create extension if not exists postgis;'); + + await use({ client, query, batch }); + await client.end(); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = drizzle({ client: client.client }); + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema); + + await use(push); + }, + { scope: 'worker' }, + ], + }); +}; + +export const pgPostgisTest = prepareTest(); +export type Test = ReturnType; diff --git a/drizzle-seed/tests/pg/pg.test.ts b/drizzle-seed/tests/pg/pg.test.ts index 331cdf39df..1c058b8e5e 100644 --- a/drizzle-seed/tests/pg/pg.test.ts +++ b/drizzle-seed/tests/pg/pg.test.ts @@ -13,7 +13,7 @@ let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); - db = drizzle(client); + db = drizzle({ client }); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); await db.execute( diff --git a/drizzle-seed/tests/pg/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/pg/softRelationsTest/softRelations.test.ts index 2056478120..9f116c9575 100644 --- a/drizzle-seed/tests/pg/softRelationsTest/softRelations.test.ts +++ b/drizzle-seed/tests/pg/softRelationsTest/softRelations.test.ts @@ -12,7 +12,7 @@ let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); - db = drizzle(client); + db = drizzle({ client }); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); await db.execute( diff --git a/drizzle-seed/tests/pg/utils.ts b/drizzle-seed/tests/pg/utils.ts new file mode 100644 index 0000000000..fd9c789a29 --- /dev/null +++ b/drizzle-seed/tests/pg/utils.ts @@ -0,0 +1,33 @@ +import Docker from 'dockerode'; +import getPort from 'get-port'; + +export const createDockerPostgis = async () => { + const docker = new Docker(); + const port = await getPort(); + const image = 'postgis/postgis:16-3.4'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err: any) => err ? reject(err) : resolve(err)) + ); + + const user = 'postgres', password = 'postgres', database = 'postgres'; + const pgContainer = await docker.createContainer({ + Image: image, + Env: [`POSTGRES_USER=${user}`, `POSTGRES_PASSWORD=${password}`, `POSTGRES_DATABASE=${database}`], + name: `drizzle-seed-tests-${crypto.randomUUID()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '5432/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await pgContainer.start(); + + return { + url: `postgresql://postgres:postgres@127.0.0.1:${port}/postgres`, + container: pgContainer, + }; +}; diff --git a/drizzle-seed/tests/singlestore/allDataTypesTest/singlestoreSchema.ts b/drizzle-seed/tests/singlestore/allDataTypesTest/singlestoreSchema.ts new file mode 100644 index 0000000000..088692e325 --- /dev/null +++ b/drizzle-seed/tests/singlestore/allDataTypesTest/singlestoreSchema.ts @@ -0,0 +1,69 @@ +import { + bigint, + binary, + boolean, + char, + date, + datetime, + decimal, + double, + float, + int, + json, + longtext, + mediumint, + mediumtext, + real, + serial, + singlestoreEnum, + singlestoreTable, + smallint, + text, + time, + timestamp, + tinyint, + tinytext, + varbinary, + varchar, + vector, + year, +} from 'drizzle-orm/singlestore-core'; + +export const allDataTypes = singlestoreTable('all_data_types', { + int: int('int'), + tinyint: tinyint('tinyint'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + biginteger: bigint('bigint', { mode: 'bigint' }), + bigintNumber: bigint('bigint_number', { mode: 'number' }), + real: real('real'), + decimal: decimal('decimal'), + double: double('double'), + float: float('float'), + serial: serial('serial'), + binary: binary('binary', { length: 255 }), + varbinary: varbinary('varbinary', { length: 256 }), + char: char('char', { length: 255 }), + varchar: varchar('varchar', { length: 256 }), + tinytext: tinytext('tinytext'), + mediumtext: mediumtext('mediumtext'), + longtext: longtext('longtext'), + text: text('text'), + boolean: boolean('boolean'), + dateString: date('date_string', { mode: 'string' }), + date: date('date', { mode: 'date' }), + datetime: datetime('datetime', { mode: 'date' }), + datetimeString: datetime('datetimeString', { mode: 'string' }), + time: time('time'), + year: year('year'), + timestampDate: timestamp('timestamp_date', { mode: 'date' }), + timestampString: timestamp('timestamp_string', { mode: 'string' }), + json: json('json'), + mysqlEnum: singlestoreEnum('popularity', ['unknown', 'known', 'popular']), + vectorF32: vector('vector_f32', { dimensions: 12, elementType: 'F32' }), + vectorF64: vector('vector_f64', { dimensions: 12, elementType: 'F64' }), + vectorI8: vector('vector_i8', { dimensions: 12, elementType: 'I8' }), + vectorI16: vector('vector_i16', { dimensions: 12, elementType: 'I16' }), + vectorI32: vector('vector_i32', { dimensions: 12, elementType: 'I32' }), + vectorI64: vector('vector_i64', { dimensions: 12, elementType: 'I64' }), +}); diff --git a/drizzle-seed/tests/singlestore/allDataTypesTest/singlestore_all_data_types.test.ts b/drizzle-seed/tests/singlestore/allDataTypesTest/singlestore_all_data_types.test.ts new file mode 100644 index 0000000000..2e370834f8 --- /dev/null +++ b/drizzle-seed/tests/singlestore/allDataTypesTest/singlestore_all_data_types.test.ts @@ -0,0 +1,76 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { seed } from '../../../src/index.ts'; +import { singlestoreTest as test } from '../instrumentation.ts'; + +import * as schema from './singlestoreSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE \`all_data_types\` ( + \`int\` int, + \`tinyint\` tinyint, + \`smallint\` smallint, + \`mediumint\` mediumint, + \`bigint\` bigint, + \`bigint_number\` bigint, + \`real\` real, + \`decimal\` decimal, + \`double\` double, + \`float\` float, + \`serial\` serial, + \`binary\` binary(255), + \`varbinary\` varbinary(256), + \`char\` char(255), + \`varchar\` varchar(256), + \`tinytext\` tinytext, + \`mediumtext\` mediumtext, + \`text\` text, + \`longtext\` longtext, + \`boolean\` boolean, + \`date_string\` date, + \`date\` date, + \`datetime\` datetime, + \`datetimeString\` datetime, + \`time\` time, + \`year\` year, + \`timestamp_date\` timestamp, + \`timestamp_string\` timestamp, + \`json\` json, + \`popularity\` enum('unknown','known','popular'), + \`vector_f32\` vector(12, F32), + \`vector_f64\` vector(12, F64), + \`vector_i8\` vector(12, I8), + \`vector_i16\` vector(12, I16), + \`vector_i32\` vector(12, I32), + \`vector_i64\` vector(12, I64), + shard key (\`serial\`) + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test('basic seed test', async ({ db }) => { + await seed(db, schema, { count: 1 }); + + const allDataTypes = await db.select().from(schema.allDataTypes); + + // every value in each 10 rows does not equal undefined. + const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestore.test.ts b/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestore.test.ts new file mode 100644 index 0000000000..f2755c8c9b --- /dev/null +++ b/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestore.test.ts @@ -0,0 +1,227 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { singlestoreTest as test } from '../instrumentation.ts'; +import * as schema from './singlestoreSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE \`composite_example0\` ( + \`id\` integer not null, + \`name\` varchar(256) not null, + SHARD(\`id\`,\`name\`), + CONSTRAINT \`composite_example_id_name_unique\` UNIQUE(\`id\`,\`name\`) + ); + `, + ); + + await db.execute( + sql` + CREATE ROWSTORE TABLE \`composite_example\` ( + \`id\` integer not null, + \`name\` varchar(256) not null, + SHARD(\`id\`,\`name\`), + CONSTRAINT \`composite_example_id_name_unique\` UNIQUE(\`id\`,\`name\`), + CONSTRAINT \`custom_name\` UNIQUE(\`id\`,\`name\`) + ); + `, + ); + + await db.execute( + sql` + CREATE ROWSTORE TABLE \`unique_column_in_composite_of_two_0\` ( + \`id\` integer not null unique, + \`name\` varchar(256) not null, + SHARD(\`id\`), + CONSTRAINT \`custom_name0\` UNIQUE(\`id\`,\`name\`) + ); + `, + ); + + await db.execute( + sql` + CREATE ROWSTORE TABLE \`unique_column_in_composite_of_two_1\` ( + \`id\` integer not null, + \`name\` varchar(256) not null, + SHARD(\`id\`), + CONSTRAINT \`custom_name1\` UNIQUE(\`id\`,\`name\`), + CONSTRAINT \`custom_name1_id\` UNIQUE(\`id\`) + ); + `, + ); + + await db.execute( + sql` + CREATE ROWSTORE TABLE \`unique_column_in_composite_of_three_0\` ( + \`id\` integer not null unique, + \`name\` varchar(256) not null, + \`slug\` varchar(256) not null, + SHARD(\`id\`), + CONSTRAINT \`custom_name2\` UNIQUE(\`id\`,\`name\`,\`slug\`) + ); + `, + ); + + await db.execute( + sql` + CREATE ROWSTORE TABLE \`unique_column_in_composite_of_three_1\` ( + \`id\` integer not null, + \`name\` varchar(256) not null, + \`slug\` varchar(256) not null, + SHARD(\`id\`), + CONSTRAINT \`custom_name3\` UNIQUE(\`id\`,\`name\`,\`slug\`), + CONSTRAINT \`custom_name3_id\` UNIQUE(\`id\`) + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('basic seed test', async ({ db }) => { + const currSchema0 = { composite0: schema.composite0 }; + await seed(db, currSchema0, { count: 16 }); + + const composite0 = await db.select().from(schema.composite0); + + expect(composite0.length).toBe(16); + await reset(db, currSchema0); + + // ------------------------------------------------------------ + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------ + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------ + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + // ------------------------------------------------------------ + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + // ------------------------------------------------------------ + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async ({ db }) => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + // ------------------------------------------------------------ + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestoreSchema.ts b/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestoreSchema.ts new file mode 100644 index 0000000000..0ee724f124 --- /dev/null +++ b/drizzle-seed/tests/singlestore/compositeUniqueKey/singlestoreSchema.ts @@ -0,0 +1,47 @@ +import { int, singlestoreTable, unique, varchar } from 'drizzle-orm/singlestore-core'; + +export const composite0 = singlestoreTable('composite_example0', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('composite_example_id_name_unique').on(t.id, t.name), +]); + +export const composite = singlestoreTable('composite_example', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = singlestoreTable('unique_column_in_composite_of_two_0', { + id: int('id').notNull().unique(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = singlestoreTable('unique_column_in_composite_of_two_1', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = singlestoreTable('unique_column_in_composite_of_three_0', { + id: int('id').notNull().unique(), + name: varchar('name', { length: 256 }).notNull(), + slug: varchar('slug', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = singlestoreTable('unique_column_in_composite_of_three_1', { + id: int('id').notNull(), + name: varchar('name', { length: 256 }).notNull(), + slug: varchar('slug', { length: 256 }).notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/singlestore/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/singlestore/cyclicTables/cyclicTables.test.ts new file mode 100644 index 0000000000..5b52b72c31 --- /dev/null +++ b/drizzle-seed/tests/singlestore/cyclicTables/cyclicTables.test.ts @@ -0,0 +1,131 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { singlestoreTest as test } from '../instrumentation.ts'; +import * as schema from './singlestoreSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + create table model + ( + id int not null + primary key, + name varchar(256) not null, + defaultImageId int null + ); + `, + ); + + await db.execute( + sql` + create table model_image + ( + id int not null + primary key, + url varchar(256) not null, + caption varchar(256) null, + modelId int not null + ); + `, + ); + + // 3 tables case + await db.execute( + sql` + create table model1 + ( + id int not null + primary key, + name varchar(256) not null, + userId int null, + defaultImageId int null + ); + `, + ); + + await db.execute( + sql` + create table model_image1 + ( + id int not null + primary key, + url varchar(256) not null, + caption varchar(256) null, + modelId int not null + ); + `, + ); + + await db.execute( + sql` + create table user + ( + id int not null + primary key, + name text null, + invitedBy int null, + imageId int not null + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +test('2 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable: schema.modelTable, + modelImageTable: schema.modelImageTable, + }); + + const modelTable = await db.select().from(schema.modelTable); + const modelImageTable = await db.select().from(schema.modelImageTable); + + expect(modelTable.length).toBe(10); + let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable.length).toBe(10); + predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); + +test('3 cyclic tables test', async ({ db }) => { + await seed(db, { + modelTable1: schema.modelTable1, + modelImageTable1: schema.modelImageTable1, + user: schema.user, + }); + + const modelTable1 = await db.select().from(schema.modelTable1); + const modelImageTable1 = await db.select().from(schema.modelImageTable1); + const user = await db.select().from(schema.user); + + expect(modelTable1.length).toBe(10); + let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(modelImageTable1.length).toBe(10); + predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); + + expect(user.length).toBe(10); + predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); + expect(predicate).toBe(true); +}); diff --git a/drizzle-seed/tests/singlestore/cyclicTables/singlestoreSchema.ts b/drizzle-seed/tests/singlestore/cyclicTables/singlestoreSchema.ts new file mode 100644 index 0000000000..62b55a30d6 --- /dev/null +++ b/drizzle-seed/tests/singlestore/cyclicTables/singlestoreSchema.ts @@ -0,0 +1,94 @@ +import { relations } from 'drizzle-orm/_relations'; +import { int, serial, singlestoreTable, text, varchar } from 'drizzle-orm/singlestore-core'; + +// MODEL +export const modelTable = singlestoreTable( + 'model', + { + id: serial().primaryKey(), + name: varchar({ length: 256 }).notNull(), + defaultImageId: int(), + }, +); + +export const modelRelations = relations(modelTable, ({ one, many }) => ({ + images: many(modelImageTable), + defaultImage: one(modelImageTable, { + fields: [modelTable.defaultImageId], + references: [modelImageTable.id], + }), +})); + +// MODEL IMAGE +export const modelImageTable = singlestoreTable( + 'model_image', + { + id: serial().primaryKey(), + url: varchar({ length: 256 }).notNull(), + caption: varchar({ length: 256 }), + modelId: int().notNull(), + }, +); + +export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ + model: one(modelTable, { + fields: [modelImageTable.modelId], + references: [modelTable.id], + }), +})); + +// 3 tables case +export const modelTable1 = singlestoreTable( + 'model1', + { + id: serial().primaryKey(), + name: varchar({ length: 256 }).notNull(), + userId: int(), + defaultImageId: int(), + }, +); + +export const modelTable1Relations = relations(modelTable1, ({ one }) => ({ + user: one(user, { + fields: [modelTable1.userId], + references: [user.id], + }), +})); + +export const modelImageTable1 = singlestoreTable( + 'model_image1', + { + id: serial().primaryKey(), + url: varchar({ length: 256 }).notNull(), + caption: varchar({ length: 256 }), + modelId: int().notNull(), + }, +); + +export const modelImageTable1Relations = relations(modelImageTable1, ({ one }) => ({ + user: one(modelTable1, { + fields: [modelImageTable1.modelId], + references: [modelTable1.id], + }), +})); + +export const user = singlestoreTable( + 'user', + { + id: serial().primaryKey(), + name: text(), + invitedBy: int(), + imageId: int().notNull(), + }, +); + +export const userRelations = relations(user, ({ one }) => ({ + intvitedByUser: one(user, { + fields: [user.invitedBy], + references: [user.id], + }), + image: one(modelImageTable1, { + fields: [user.imageId], + references: [modelImageTable1.id], + }), +})); diff --git a/drizzle-seed/tests/singlestore/instrumentation.ts b/drizzle-seed/tests/singlestore/instrumentation.ts new file mode 100644 index 0000000000..cae0999471 --- /dev/null +++ b/drizzle-seed/tests/singlestore/instrumentation.ts @@ -0,0 +1,85 @@ +import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; +import type { AnyMySql2Connection } from 'drizzle-orm/mysql2'; +import { drizzle } from 'drizzle-orm/singlestore'; +import { createConnection } from 'mysql2/promise'; +import { test as base } from 'vitest'; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/singlestore/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const prepareTest = () => { + return base.extend< + { + client: { + client: AnyMySql2Connection; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + db: MySqlDatabase; + push: (schema: any) => Promise; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + const envurl = process.env['SINGLESTORE_CONNECTION_STRING']; + if (!envurl) throw new Error('No singlestore url provided'); + const client = await createConnection({ + uri: envurl, + supportBigNumbers: true, + multipleStatements: true, + }); + await client.connect(); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res[0] as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await batch(['drop database if exists drizzle;', 'create database drizzle;', 'use drizzle;']); + + await use({ client, query, batch }); + await client.end(); + client.destroy(); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = drizzle({ client: client.client as AnyMySql2Connection }); + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema); + + await use(push); + }, + { scope: 'worker' }, + ], + }); +}; + +export const singlestoreTest = prepareTest(); +export type Test = ReturnType; diff --git a/drizzle-seed/tests/singlestore/softRelationsTest/singlestoreSchema.ts b/drizzle-seed/tests/singlestore/softRelationsTest/singlestoreSchema.ts new file mode 100644 index 0000000000..284e4a5b04 --- /dev/null +++ b/drizzle-seed/tests/singlestore/softRelationsTest/singlestoreSchema.ts @@ -0,0 +1,128 @@ +import { relations } from 'drizzle-orm/_relations'; +import { float, int, singlestoreTable, text, timestamp, varchar } from 'drizzle-orm/singlestore-core'; + +export const customers = singlestoreTable('customer', { + id: varchar('id', { length: 256 }).primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code'), + region: text('region'), + country: text('country').notNull(), + phone: text('phone').notNull(), + fax: text('fax'), +}); + +export const employees = singlestoreTable( + 'employee', + { + id: int('id').primaryKey(), + lastName: text('last_name').notNull(), + firstName: text('first_name'), + title: text('title').notNull(), + titleOfCourtesy: text('title_of_courtesy').notNull(), + birthDate: timestamp('birth_date').notNull(), + hireDate: timestamp('hire_date').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + homePhone: text('home_phone').notNull(), + extension: int('extension').notNull(), + notes: text('notes').notNull(), + reportsTo: int('reports_to'), + photoPath: text('photo_path'), + }, +); + +export const employeesRelations = relations(employees, ({ one }) => ({ + employee: one(employees, { + fields: [employees.reportsTo], + references: [employees.id], + }), +})); + +export const orders = singlestoreTable('order', { + id: int('id').primaryKey(), + orderDate: timestamp('order_date').notNull(), + requiredDate: timestamp('required_date').notNull(), + shippedDate: timestamp('shipped_date'), + shipVia: int('ship_via').notNull(), + freight: float('freight').notNull(), + shipName: text('ship_name').notNull(), + shipCity: text('ship_city').notNull(), + shipRegion: text('ship_region'), + shipPostalCode: text('ship_postal_code'), + shipCountry: text('ship_country').notNull(), + + customerId: varchar('customer_id', { length: 256 }).notNull(), + + employeeId: int('employee_id').notNull(), +}); + +export const ordersRelations = relations(orders, ({ one }) => ({ + customer: one(customers, { + fields: [orders.customerId], + references: [customers.id], + }), + employee: one(employees, { + fields: [orders.employeeId], + references: [employees.id], + }), +})); + +export const suppliers = singlestoreTable('supplier', { + id: int('id').primaryKey(), + companyName: text('company_name').notNull(), + contactName: text('contact_name').notNull(), + contactTitle: text('contact_title').notNull(), + address: text('address').notNull(), + city: text('city').notNull(), + region: text('region'), + postalCode: text('postal_code').notNull(), + country: text('country').notNull(), + phone: text('phone').notNull(), +}); + +export const products = singlestoreTable('product', { + id: int('id').primaryKey(), + name: text('name').notNull(), + quantityPerUnit: text('quantity_per_unit').notNull(), + unitPrice: float('unit_price').notNull(), + unitsInStock: int('units_in_stock').notNull(), + unitsOnOrder: int('units_on_order').notNull(), + reorderLevel: int('reorder_level').notNull(), + discontinued: int('discontinued').notNull(), + + supplierId: int('supplier_id').notNull(), +}); + +export const productsRelations = relations(products, ({ one }) => ({ + supplier: one(suppliers, { + fields: [products.supplierId], + references: [suppliers.id], + }), +})); + +export const details = singlestoreTable('order_detail', { + unitPrice: float('unit_price').notNull(), + quantity: int('quantity').notNull(), + discount: float('discount').notNull(), + + orderId: int('order_id').notNull(), + + productId: int('product_id').notNull(), +}); + +export const detailsRelations = relations(details, ({ one }) => ({ + order: one(orders, { + fields: [details.orderId], + references: [orders.id], + }), + product: one(products, { + fields: [details.productId], + references: [products.id], + }), +})); diff --git a/drizzle-seed/tests/singlestore/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/singlestore/softRelationsTest/softRelations.test.ts new file mode 100644 index 0000000000..85673ab4ed --- /dev/null +++ b/drizzle-seed/tests/singlestore/softRelationsTest/softRelations.test.ts @@ -0,0 +1,341 @@ +import { sql } from 'drizzle-orm'; +import { expect } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import { singlestoreTest as test } from '../instrumentation.ts'; +import * as schema from './singlestoreSchema.ts'; + +let firstTime = true; +let resolveFunc: (val: any) => void; +const promise = new Promise((resolve) => { + resolveFunc = resolve; +}); +test.beforeEach(async ({ db }) => { + if (firstTime) { + firstTime = false; + + await db.execute( + sql` + CREATE TABLE \`customer\` ( + \`id\` varchar(256) NOT NULL, + \`company_name\` text NOT NULL, + \`contact_name\` text NOT NULL, + \`contact_title\` text NOT NULL, + \`address\` text NOT NULL, + \`city\` text NOT NULL, + \`postal_code\` text, + \`region\` text, + \`country\` text NOT NULL, + \`phone\` text NOT NULL, + \`fax\` text, + CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE \`order_detail\` ( + \`unit_price\` float NOT NULL, + \`quantity\` int NOT NULL, + \`discount\` float NOT NULL, + \`order_id\` int NOT NULL, + \`product_id\` int NOT NULL + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE \`employee\` ( + \`id\` int NOT NULL, + \`last_name\` text NOT NULL, + \`first_name\` text, + \`title\` text NOT NULL, + \`title_of_courtesy\` text NOT NULL, + \`birth_date\` timestamp NOT NULL, + \`hire_date\` timestamp NOT NULL, + \`address\` text NOT NULL, + \`city\` text NOT NULL, + \`postal_code\` text NOT NULL, + \`country\` text NOT NULL, + \`home_phone\` text NOT NULL, + \`extension\` int NOT NULL, + \`notes\` text NOT NULL, + \`reports_to\` int, + \`photo_path\` text, + CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE \`order\` ( + \`id\` int NOT NULL, + \`order_date\` timestamp NOT NULL, + \`required_date\` timestamp NOT NULL, + \`shipped_date\` timestamp, + \`ship_via\` int NOT NULL, + \`freight\` float NOT NULL, + \`ship_name\` text NOT NULL, + \`ship_city\` text NOT NULL, + \`ship_region\` text, + \`ship_postal_code\` text, + \`ship_country\` text NOT NULL, + \`customer_id\` varchar(256) NOT NULL, + \`employee_id\` int NOT NULL, + CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE \`product\` ( + \`id\` int NOT NULL, + \`name\` text NOT NULL, + \`quantity_per_unit\` text NOT NULL, + \`unit_price\` float NOT NULL, + \`units_in_stock\` int NOT NULL, + \`units_on_order\` int NOT NULL, + \`reorder_level\` int NOT NULL, + \`discontinued\` int NOT NULL, + \`supplier_id\` int NOT NULL, + CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) + ); + `, + ); + + await db.execute( + sql` + CREATE TABLE \`supplier\` ( + \`id\` int NOT NULL, + \`company_name\` text NOT NULL, + \`contact_name\` text NOT NULL, + \`contact_title\` text NOT NULL, + \`address\` text NOT NULL, + \`city\` text NOT NULL, + \`region\` text, + \`postal_code\` text NOT NULL, + \`country\` text NOT NULL, + \`phone\` text NOT NULL, + CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) + ); + `, + ); + + resolveFunc(''); + } + + await promise; +}); + +test.afterEach(async ({ db }) => { + await reset(db, schema); +}); + +const checkSoftRelations = ( + customers: (typeof schema.customers.$inferSelect)[], + details: (typeof schema.details.$inferSelect)[], + employees: (typeof schema.employees.$inferSelect)[], + orders: (typeof schema.orders.$inferSelect)[], + products: (typeof schema.products.$inferSelect)[], + suppliers: (typeof schema.suppliers.$inferSelect)[], +) => { + // employees soft relations check + const employeeIds = new Set(employees.map((employee) => employee.id)); + const employeesPredicate = employees.every((employee) => + employee.reportsTo !== null && employeeIds.has(employee.reportsTo) + ); + expect(employeesPredicate).toBe(true); + + // orders soft relations check + const customerIds = new Set(customers.map((customer) => customer.id)); + const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); + expect(ordersPredicate1).toBe(true); + + const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); + expect(ordersPredicate2).toBe(true); + + // product soft relations check + const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); + const productsPredicate = products.every((product) => + product.supplierId !== null && supplierIds.has(product.supplierId) + ); + expect(productsPredicate).toBe(true); + + // details soft relations check + const orderIds = new Set(orders.map((order) => order.id)); + const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); + expect(detailsPredicate1).toBe(true); + + const productIds = new Set(products.map((product) => product.id)); + const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); + expect(detailsPredicate2).toBe(true); +}; + +test('basic seed, soft relations test', async ({ db }) => { + await seed(db, schema); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(10); + expect(details.length).toBe(10); + expect(employees.length).toBe(10); + expect(orders.length).toBe(10); + expect(products.length).toBe(10); + expect(suppliers.length).toBe(10); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test('seed with options.count:11, soft relations test', async ({ db }) => { + await seed(db, schema, { count: 11 }); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(11); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test('redefine(refine) customers count, soft relations test', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(11); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test('redefine(refine) all tables count, soft relations test', async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 12, + }, + details: { + count: 13, + }, + employees: { + count: 14, + }, + orders: { + count: 15, + }, + products: { + count: 16, + }, + suppliers: { + count: 17, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(12); + expect(details.length).toBe(13); + expect(employees.length).toBe(14); + expect(orders.length).toBe(15); + expect(products.length).toBe(16); + expect(suppliers.length).toBe(17); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("redefine(refine) orders count using 'with' in customers, soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 13, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(11); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); + +test("sequential using of 'with', soft relations test", async ({ db }) => { + await seed(db, schema, { count: 11 }).refine(() => ({ + customers: { + count: 4, + with: { + orders: 2, + }, + }, + orders: { + count: 12, + with: { + details: 3, + }, + }, + })); + + const customers = await db.select().from(schema.customers); + const details = await db.select().from(schema.details); + const employees = await db.select().from(schema.employees); + const orders = await db.select().from(schema.orders); + const products = await db.select().from(schema.products); + const suppliers = await db.select().from(schema.suppliers); + + expect(customers.length).toBe(4); + expect(details.length).toBe(24); + expect(employees.length).toBe(11); + expect(orders.length).toBe(8); + expect(products.length).toBe(11); + expect(suppliers.length).toBe(11); + + checkSoftRelations(customers, details, employees, orders, products, suppliers); +}); diff --git a/drizzle-seed/tests/singlestore/utils.ts b/drizzle-seed/tests/singlestore/utils.ts new file mode 100644 index 0000000000..719c4cce07 --- /dev/null +++ b/drizzle-seed/tests/singlestore/utils.ts @@ -0,0 +1,32 @@ +import Docker, { type Container } from 'dockerode'; +import getPort from 'get-port'; +import { v4 as uuid } from 'uuid'; + +export async function createDockerDB(): Promise<{ url: string; container: Container }> { + const docker = new Docker(); + const port = await getPort({ port: 3306 }); + const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + // eslint-disable-next-line @typescript-eslint/no-unsafe-argument + docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) + ); + + const singleStoreContainer = await docker.createContainer({ + Image: image, + Env: ['ROOT_PASSWORD=singlestore'], + name: `drizzle-seed-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await singleStoreContainer.start(); + await new Promise((resolve) => setTimeout(resolve, 4000)); + + return { url: `singlestore://root:singlestore@localhost:${port}/`, container: singleStoreContainer }; +} diff --git a/drizzle-seed/tests/sqlite/allDataTypesTest/sqlite_all_data_types.test.ts b/drizzle-seed/tests/sqlite/allDataTypesTest/sqlite_all_data_types.test.ts index 8282f921d4..aa1241dc03 100644 --- a/drizzle-seed/tests/sqlite/allDataTypesTest/sqlite_all_data_types.test.ts +++ b/drizzle-seed/tests/sqlite/allDataTypesTest/sqlite_all_data_types.test.ts @@ -12,7 +12,7 @@ let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); - db = drizzle(client); + db = drizzle({ client }); db.run( sql.raw(` diff --git a/drizzle-seed/tests/sqlite/compositeUniqueKey/sqlite.test.ts b/drizzle-seed/tests/sqlite/compositeUniqueKey/sqlite.test.ts new file mode 100644 index 0000000000..b4aee225ce --- /dev/null +++ b/drizzle-seed/tests/sqlite/compositeUniqueKey/sqlite.test.ts @@ -0,0 +1,197 @@ +import BetterSqlite3 from 'better-sqlite3'; +import { sql } from 'drizzle-orm'; +import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; +import { drizzle } from 'drizzle-orm/better-sqlite3'; +import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; +import { reset, seed } from '../../../src/index.ts'; +import * as schema from './sqliteSchema.ts'; + +let client: BetterSqlite3.Database; +let db: BetterSQLite3Database; + +beforeAll(async () => { + client = new BetterSqlite3(':memory:'); + + db = drizzle({ client }); + + db.run( + sql` + CREATE TABLE IF NOT EXISTS \`composite_example\` ( + \`id\` integer not null, + \`name\` text not null, + CONSTRAINT \`composite_example_id_name_unique\` UNIQUE(\`id\`,\`name\`), + CONSTRAINT \`custom_name\` UNIQUE(\`id\`,\`name\`) + ); + `, + ); + + db.run( + sql` + CREATE TABLE IF NOT EXISTS \`unique_column_in_composite_of_two_0\` ( + \`id\` integer not null unique, + \`name\` text not null, + CONSTRAINT \`custom_name0\` UNIQUE(\`id\`,\`name\`) + ); + `, + ); + + db.run( + sql` + CREATE TABLE IF NOT EXISTS \`unique_column_in_composite_of_two_1\` ( + \`id\` integer not null, + \`name\` text not null, + CONSTRAINT \`custom_name1\` UNIQUE(\`id\`,\`name\`), + CONSTRAINT \`custom_name1_id\` UNIQUE(\`id\`) + ); + `, + ); + + db.run( + sql` + CREATE TABLE IF NOT EXISTS \`unique_column_in_composite_of_three_0\` ( + \`id\` integer not null unique, + \`name\` text not null, + \`slug\` text not null, + CONSTRAINT \`custom_name2\` UNIQUE(\`id\`,\`name\`,\`slug\`) + ); + `, + ); + + db.run( + sql` + CREATE TABLE IF NOT EXISTS \`unique_column_in_composite_of_three_1\` ( + \`id\` integer not null, + \`name\` text not null, + \`slug\` text not null, + CONSTRAINT \`custom_name3\` UNIQUE(\`id\`,\`name\`,\`slug\`), + CONSTRAINT \`custom_name3_id\` UNIQUE(\`id\`) + ); + `, + ); +}); + +afterAll(async () => { + client.close(); +}); + +afterEach(async () => { + await reset(db, schema); +}); + +test('basic seed test', async () => { + const currSchema = { composite: schema.composite }; + await seed(db, currSchema, { count: 16 }); + + let composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 16 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(16); + await reset(db, currSchema); + + await seed(db, currSchema, { count: 17 }).refine((funcs) => ({ + composite: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.composite); + + expect(composite.length).toBe(17); + await reset(db, currSchema); +}); + +test('unique column in composite of 2 columns', async () => { + const currSchema0 = { uniqueColumnInCompositeOfTwo0: schema.uniqueColumnInCompositeOfTwo0 }; + await seed(db, currSchema0, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo0); + + expect(composite.length).toBe(4); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfTwo1: schema.uniqueColumnInCompositeOfTwo1 }; + await seed(db, currSchema1, { count: 4 }).refine((funcs) => ({ + uniqueColumnInCompositeOfTwo1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3] }), + name: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfTwo1); + + expect(composite.length).toBe(4); + await reset(db, currSchema1); +}); + +test('unique column in composite of 3 columns', async () => { + const currSchema0 = { uniqueColumnInCompositeOfThree0: schema.uniqueColumnInCompositeOfThree0 }; + await seed(db, currSchema0, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree0: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + let composite = await db.select().from(schema.uniqueColumnInCompositeOfThree0); + + expect(composite.length).toBe(16); + await reset(db, currSchema0); + + const currSchema1 = { uniqueColumnInCompositeOfThree1: schema.uniqueColumnInCompositeOfThree1 }; + await seed(db, currSchema1, { count: 16 }).refine((funcs) => ({ + uniqueColumnInCompositeOfThree1: { + columns: { + id: funcs.valuesFromArray({ values: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] }), + name: funcs.valuesFromArray({ values: ['a', 'b'] }), + slug: funcs.valuesFromArray({ values: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i'] }), + }, + }, + })); + + composite = await db.select().from(schema.uniqueColumnInCompositeOfThree1); + + expect(composite.length).toBe(16); + await reset(db, currSchema1); +}); diff --git a/drizzle-seed/tests/sqlite/compositeUniqueKey/sqliteSchema.ts b/drizzle-seed/tests/sqlite/compositeUniqueKey/sqliteSchema.ts new file mode 100644 index 0000000000..549c987550 --- /dev/null +++ b/drizzle-seed/tests/sqlite/compositeUniqueKey/sqliteSchema.ts @@ -0,0 +1,40 @@ +import { integer, sqliteTable, text, unique } from 'drizzle-orm/sqlite-core'; + +export const composite = sqliteTable('composite_example', { + id: integer('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo0 = sqliteTable('unique_column_in_composite_of_two_0', { + id: integer('id').notNull().unique(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name0').on(t.id, t.name), +]); + +export const uniqueColumnInCompositeOfTwo1 = sqliteTable('unique_column_in_composite_of_two_1', { + id: integer('id').notNull(), + name: text('name').notNull(), +}, (t) => [ + unique('custom_name1').on(t.id, t.name), + unique('custom_name1_id').on(t.id), +]); + +export const uniqueColumnInCompositeOfThree0 = sqliteTable('unique_column_in_composite_of_three_0', { + id: integer('id').notNull().unique(), + name: text('name').notNull(), + slug: text('slug').notNull(), +}, (t) => [ + unique('custom_name2').on(t.id, t.name, t.slug), +]); + +export const uniqueColumnInCompositeOfThree1 = sqliteTable('unique_column_in_composite_of_three_1', { + id: integer('id').notNull(), + name: text('name').notNull(), + slug: text('slug').notNull(), +}, (t) => [ + unique('custom_name3').on(t.id, t.name, t.slug), + unique('custom_name3_id').on(t.id), +]); diff --git a/drizzle-seed/tests/sqlite/cyclicTables/cyclicTables.test.ts b/drizzle-seed/tests/sqlite/cyclicTables/cyclicTables.test.ts index d404072ebf..18af62b852 100644 --- a/drizzle-seed/tests/sqlite/cyclicTables/cyclicTables.test.ts +++ b/drizzle-seed/tests/sqlite/cyclicTables/cyclicTables.test.ts @@ -12,7 +12,7 @@ let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); - db = drizzle(client); + db = drizzle({ client }); db.run( sql` diff --git a/drizzle-seed/tests/sqlite/softRelationsTest/softRelations.test.ts b/drizzle-seed/tests/sqlite/softRelationsTest/softRelations.test.ts index 124ac8ee15..23b5148087 100644 --- a/drizzle-seed/tests/sqlite/softRelationsTest/softRelations.test.ts +++ b/drizzle-seed/tests/sqlite/softRelationsTest/softRelations.test.ts @@ -12,7 +12,7 @@ let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); - db = drizzle(client); + db = drizzle({ client }); db.run( sql.raw(` diff --git a/drizzle-seed/tests/sqlite/sqlite.test.ts b/drizzle-seed/tests/sqlite/sqlite.test.ts index 51322bbf46..3bdfb953d0 100644 --- a/drizzle-seed/tests/sqlite/sqlite.test.ts +++ b/drizzle-seed/tests/sqlite/sqlite.test.ts @@ -13,7 +13,7 @@ let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); - db = drizzle(client); + db = drizzle({ client }); db.run( sql.raw(` diff --git a/drizzle-seed/tests/sqlite/sqliteSchema.ts b/drizzle-seed/tests/sqlite/sqliteSchema.ts index fe508321b5..ea88228c41 100644 --- a/drizzle-seed/tests/sqlite/sqliteSchema.ts +++ b/drizzle-seed/tests/sqlite/sqliteSchema.ts @@ -34,12 +34,10 @@ export const employees = sqliteTable( reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, - (table) => ({ - reportsToFk: foreignKey(() => ({ - columns: [table.reportsTo], - foreignColumns: [table.id], - })), - }), + (table) => [foreignKey({ + columns: [table.reportsTo], + foreignColumns: [table.id], + })], ); export const orders = sqliteTable('order', { @@ -113,12 +111,10 @@ export const users = sqliteTable( name: text(), invitedBy: integer(), }, - (table) => ({ - reportsToFk: foreignKey(() => ({ - columns: [table.invitedBy], - foreignColumns: [table.id], - })), - }), + (table) => [foreignKey({ + columns: [table.invitedBy], + foreignColumns: [table.id], + })], ); export const posts = sqliteTable( diff --git a/drizzle-seed/tsconfig.json b/drizzle-seed/tsconfig.json index f32902e108..42e23b1642 100644 --- a/drizzle-seed/tsconfig.json +++ b/drizzle-seed/tsconfig.json @@ -44,5 +44,5 @@ } }, "exclude": ["**/dist", "src/dev"], - "include": ["src", "*.ts", "tests"] + "include": ["src", "tests", "type-tests"] } diff --git a/drizzle-seed/type-tests/cockroach.ts b/drizzle-seed/type-tests/cockroach.ts new file mode 100644 index 0000000000..caf60929ad --- /dev/null +++ b/drizzle-seed/type-tests/cockroach.ts @@ -0,0 +1,36 @@ +import { drizzle } from 'drizzle-orm/cockroach'; +import type { CockroachColumn } from 'drizzle-orm/cockroach-core'; +import { cockroachTable, int4, text } from 'drizzle-orm/cockroach-core'; +import { reset, seed } from '../src/index.ts'; + +const cockroachUsers = cockroachTable('users', { + id: int4().primaryKey(), + name: text(), + inviteId: int4('invite_id').references((): CockroachColumn => cockroachUsers.id), +}); + +{ + const db0 = drizzle('', { schema: { users: cockroachUsers } }); + + await seed(db0, { users: cockroachUsers }); + await seed(db0, { users: cockroachUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: cockroachUsers }); + + const db1 = drizzle(''); + + await seed(db1, { users: cockroachUsers }); + await seed(db1, { users: cockroachUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: cockroachUsers }); +} diff --git a/drizzle-seed/type-tests/mssql.ts b/drizzle-seed/type-tests/mssql.ts new file mode 100644 index 0000000000..a34cc56f1e --- /dev/null +++ b/drizzle-seed/type-tests/mssql.ts @@ -0,0 +1,36 @@ +import type { MsSqlColumn } from 'drizzle-orm/mssql-core'; +import { int, mssqlTable, text } from 'drizzle-orm/mssql-core'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import { reset, seed } from '../src/index.ts'; + +const mssqlUsers = mssqlTable('users', { + id: int().primaryKey(), + name: text(), + inviteId: int('invite_id').references((): MsSqlColumn => mssqlUsers.id), +}); + +{ + const db0 = drizzle('', { schema: { users: mssqlUsers } }); + + await seed(db0, { users: mssqlUsers }); + await seed(db0, { users: mssqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: mssqlUsers }); + + const db1 = drizzle(''); + + await seed(db1, { users: mssqlUsers }); + await seed(db1, { users: mssqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: mssqlUsers }); +} diff --git a/drizzle-seed/type-tests/mysql.ts b/drizzle-seed/type-tests/mysql.ts index ffd42726de..7ac4277342 100644 --- a/drizzle-seed/type-tests/mysql.ts +++ b/drizzle-seed/type-tests/mysql.ts @@ -1,6 +1,7 @@ import type { MySqlColumn } from 'drizzle-orm/mysql-core'; import { int, mysqlTable, text } from 'drizzle-orm/mysql-core'; import { drizzle as mysql2Drizzle } from 'drizzle-orm/mysql2'; +import { drizzle as planetscaleDrizzle } from 'drizzle-orm/planetscale-serverless'; import { reset, seed } from '../src/index.ts'; const mysqlUsers = mysqlTable('users', { @@ -9,9 +10,68 @@ const mysqlUsers = mysqlTable('users', { inviteId: int('invite_id').references((): MySqlColumn => mysqlUsers.id), }); +// mysql2 { - const db = mysql2Drizzle(''); + const db0 = mysql2Drizzle('', { schema: { users: mysqlUsers }, mode: 'default' }); - await seed(db, { users: mysqlUsers }); - await reset(db, { users: mysqlUsers }); + await seed(db0, { users: mysqlUsers }); + await seed(db0, { users: mysqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: mysqlUsers }); + + const db1 = mysql2Drizzle('', { schema: { users: mysqlUsers }, mode: 'planetscale' }); + + await seed(db1, { users: mysqlUsers }); + await seed(db1, { users: mysqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: mysqlUsers }); + + const db2 = mysql2Drizzle(''); + + await seed(db2, { users: mysqlUsers }); + await seed(db2, { users: mysqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db2, { users: mysqlUsers }); +} + +// planetscale +{ + const db0 = planetscaleDrizzle('', { schema: { users: mysqlUsers } }); + + await seed(db0, { users: mysqlUsers }); + await seed(db0, { users: mysqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: mysqlUsers }); + + const db1 = planetscaleDrizzle(''); + + await seed(db1, { users: mysqlUsers }); + await seed(db1, { users: mysqlUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: mysqlUsers }); } diff --git a/drizzle-seed/type-tests/pg.ts b/drizzle-seed/type-tests/pg.ts index 3bec9989ff..64489c4019 100644 --- a/drizzle-seed/type-tests/pg.ts +++ b/drizzle-seed/type-tests/pg.ts @@ -11,38 +11,83 @@ const pgUsers = pgTable('users', { inviteId: integer('invite_id').references((): PgColumn => pgUsers.id), }); +// node-postgres { const db0 = nodePostgresDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); + await seed(db0, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db0, { users: pgUsers }); const db1 = nodePostgresDrizzle(''); await seed(db1, { users: pgUsers }); + await seed(db1, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db1, { users: pgUsers }); } +// pglite { const db0 = pgliteDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); + await seed(db0, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db0, { users: pgUsers }); const db1 = pgliteDrizzle(''); await seed(db1, { users: pgUsers }); + await seed(db1, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db1, { users: pgUsers }); } +// postgres-js { const db0 = postgresJsDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); + await seed(db0, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db0, { users: pgUsers }); const db1 = postgresJsDrizzle(''); await seed(db1, { users: pgUsers }); + await seed(db1, { users: pgUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); await reset(db1, { users: pgUsers }); } diff --git a/drizzle-seed/type-tests/singlestore.ts b/drizzle-seed/type-tests/singlestore.ts new file mode 100644 index 0000000000..0085c8adfa --- /dev/null +++ b/drizzle-seed/type-tests/singlestore.ts @@ -0,0 +1,35 @@ +import { drizzle } from 'drizzle-orm/singlestore'; +import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; +import { reset, seed } from '../src/index.ts'; + +const singlestoreUsers = singlestoreTable('users', { + id: int().primaryKey(), + name: text(), + inviteId: int('invite_id'), +}); + +{ + const db0 = drizzle('', { schema: { users: singlestoreUsers } }); + + await seed(db0, { users: singlestoreUsers }); + await seed(db0, { users: singlestoreUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: singlestoreUsers }); + + const db1 = drizzle(''); + + await seed(db1, { users: singlestoreUsers }); + await seed(db1, { users: singlestoreUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: singlestoreUsers }); +} diff --git a/drizzle-seed/type-tests/sqlite.ts b/drizzle-seed/type-tests/sqlite.ts index c9fa3d23bc..3228609c5c 100644 --- a/drizzle-seed/type-tests/sqlite.ts +++ b/drizzle-seed/type-tests/sqlite.ts @@ -1,17 +1,63 @@ import { drizzle as betterSqlite3Drizzle } from 'drizzle-orm/better-sqlite3'; +import { drizzle as libsqlDrizzle } from 'drizzle-orm/libsql'; import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'; import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { reset, seed } from '../src/index.ts'; -const mysqlUsers = sqliteTable('users', { +const sqliteUsers = sqliteTable('users', { id: int().primaryKey(), name: text(), - inviteId: int('invite_id').references((): SQLiteColumn => mysqlUsers.id), + inviteId: int('invite_id').references((): SQLiteColumn => sqliteUsers.id), }); { - const db = betterSqlite3Drizzle(''); + const db0 = betterSqlite3Drizzle('', { schema: { users: sqliteUsers } }); - await seed(db, { users: mysqlUsers }); - await reset(db, { users: mysqlUsers }); + await seed(db0, { users: sqliteUsers }); + await seed(db0, { users: sqliteUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: sqliteUsers }); + + const db1 = betterSqlite3Drizzle(''); + + await seed(db1, { users: sqliteUsers }); + await seed(db1, { users: sqliteUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: sqliteUsers }); +} + +{ + const db0 = libsqlDrizzle('', { schema: { users: sqliteUsers } }); + + await seed(db0, { users: sqliteUsers }); + await seed(db0, { users: sqliteUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db0, { users: sqliteUsers }); + + const db1 = libsqlDrizzle(''); + + await seed(db1, { users: sqliteUsers }); + await seed(db1, { users: sqliteUsers }).refine((funcs) => ({ + users: { + columns: { + id: funcs.intPrimaryKey(), + }, + }, + })); + await reset(db1, { users: sqliteUsers }); } diff --git a/drizzle-seed/vitest.config.ts b/drizzle-seed/vitest.config.ts index 5489010bde..3109f9f53a 100644 --- a/drizzle-seed/vitest.config.ts +++ b/drizzle-seed/vitest.config.ts @@ -1,8 +1,12 @@ +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ + './tests/singlestore/**/*.test.ts', + './tests/cockroach/**/*.test.ts', + './tests/mssql/**/*.test.ts', './tests/pg/**/*.test.ts', './tests/mysql/**/*.test.ts', './tests/sqlite/**/*.test.ts', diff --git a/drizzle-typebox/package.json b/drizzle-typebox/package.json index d35944edef..1c9bd779f8 100644 --- a/drizzle-typebox/package.json +++ b/drizzle-typebox/package.json @@ -1,13 +1,15 @@ { "name": "drizzle-typebox", - "version": "0.3.3", + "version": "1.0.0-beta.2", "description": "Generate Typebox schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", + "build:artifact": "pnpm run build", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz", "test": "vitest run" }, @@ -56,7 +58,7 @@ "license": "Apache-2.0", "peerDependencies": { "@sinclair/typebox": ">=0.34.8", - "drizzle-orm": ">=1.0.0-beta.1" + "drizzle-orm": ">=1.0.0-beta.2" }, "devDependencies": { "@rollup/plugin-typescript": "^11.1.0", @@ -67,8 +69,6 @@ "json-rules-engine": "^7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "zx": "^7.2.2" } } diff --git a/drizzle-typebox/src/column.ts b/drizzle-typebox/src/column.ts index 14ae2aa779..ab251c82c5 100644 --- a/drizzle-typebox/src/column.ts +++ b/drizzle-typebox/src/column.ts @@ -12,11 +12,11 @@ import { getTableName, } from 'drizzle-orm'; import { CONSTANTS } from './constants.ts'; -import type { BufferSchema, JsonSchema } from './utils.ts'; +import type { BigIntStringModeSchema, BufferSchema, JsonSchema } from './utils.ts'; export const literalSchema = t.Union([t.String(), t.Number(), t.Boolean(), t.Null()]); export const jsonSchema: JsonSchema = t.Union([literalSchema, t.Array(t.Any()), t.Record(t.String(), t.Any())]) as any; -TypeRegistry.Set('Buffer', (_, value) => value instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof +TypeRegistry.Set('Buffer', (_, value) => value instanceof Buffer); export const bufferSchema: BufferSchema = { [Kind]: 'Buffer', type: 'buffer' } as any; export function mapEnumValues(values: string[]) { @@ -161,6 +161,11 @@ function numberColumnToSchema( integer = true; break; } + case 'unsigned': { + min = 0; + max = Number.MAX_SAFE_INTEGER; + break; + } default: { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; @@ -175,6 +180,43 @@ function numberColumnToSchema( }); } +TypeRegistry.Set('BigIntStringMode', (_, value) => { + if (typeof value !== 'string' || !(/^-?\d+$/.test(value))) { + return false; + } + + const bigint = BigInt(value); + if (bigint < CONSTANTS.INT64_MIN || bigint > CONSTANTS.INT64_MAX) { + return false; + } + + return true; +}); + +TypeRegistry.Set('UnsignedBigIntStringMode', (_, value) => { + if (typeof value !== 'string' || !(/^\d+$/.test(value))) { + return false; + } + + const bigint = BigInt(value); + if (bigint < 0 || bigint > CONSTANTS.INT64_MAX) { + return false; + } + + return true; +}); +/** @internal */ +export const bigintStringModeSchema: BigIntStringModeSchema = { + [Kind]: 'BigIntStringMode', + type: 'string', +} as any; + +/** @internal */ +export const unsignedBigintStringModeSchema: BigIntStringModeSchema = { + [Kind]: 'UnsignedBigIntStringMode', + type: 'string', +} as any; + function arrayColumnToSchema( column: Column, constraint: ColumnDataArrayConstraint | undefined, @@ -199,6 +241,22 @@ function arrayColumnToSchema( : undefined; return t.Array(t.Number(), sizeParam); } + case 'int64vector': { + const length = column.length; + const sizeParam = length + ? { + minItems: length, + maxItems: length, + } + : undefined; + return t.Array( + t.BigInt({ + minimum: CONSTANTS.INT64_MIN, + maximum: CONSTANTS.INT64_MAX, + }), + sizeParam, + ); + } case 'basecolumn': { const size = column.length; const sizeParam = size @@ -316,6 +374,12 @@ function stringColumnToSchema( } return t.Enum(mapEnumValues(enumValues)); } + if (constraint === 'int64') { + return bigintStringModeSchema; + } + if (constraint === 'uint64') { + return unsignedBigintStringModeSchema; + } const options: Partial = {}; diff --git a/drizzle-typebox/src/column.types.ts b/drizzle-typebox/src/column.types.ts index 63ca38e9af..6dc4a0bb7a 100644 --- a/drizzle-typebox/src/column.types.ts +++ b/drizzle-typebox/src/column.types.ts @@ -1,10 +1,11 @@ import type * as t from '@sinclair/typebox'; import type { Assume, Column, ColumnTypeData, ExtractColumnTypeData } from 'drizzle-orm'; +import type { bigintStringModeSchema, unsignedBigintStringModeSchema } from './column.ts'; import type { BufferSchema, JsonSchema } from './utils.ts'; export type EnumValuesToEnum = { [K in TEnumValues[number]]: K }; -export interface GenericSchema extends t.TSchema { +export interface GenericSchema extends t.TSchema { // oxlint-disable-line import/namespace false-positive static: T; } @@ -17,6 +18,7 @@ export type GetTypeboxType< : TType['constraint'] extends 'geometry' | 'point' ? t.TTuple<[t.TNumber, t.TNumber]> : TType['constraint'] extends 'line' ? t.TTuple<[t.TNumber, t.TNumber, t.TNumber]> : TType['constraint'] extends 'vector' | 'halfvector' ? t.TArray + : TType['constraint'] extends 'int64vector' ? t.TArray : t.TArray : TType['type'] extends 'object' ? TType['constraint'] extends 'geometry' | 'point' ? t.TObject<{ x: t.TNumber; y: t.TNumber }> @@ -35,6 +37,8 @@ export type GetTypeboxType< : TType['type'] extends 'bigint' ? t.TBigInt : TType['type'] extends 'boolean' ? t.TBoolean : TType['type'] extends 'string' ? TType['constraint'] extends 'binary' | 'varbinary' ? t.TRegExp + : TType['constraint'] extends 'int64' ? typeof bigintStringModeSchema + : TType['constraint'] extends 'uint64' ? typeof unsignedBigintStringModeSchema : TType['constraint'] extends 'enum' ? t.TEnum<{ [K in Assume[number]]: K }> : t.TString : t.TAny; diff --git a/drizzle-typebox/src/schema.ts b/drizzle-typebox/src/schema.ts index 266fe77400..0918d33641 100644 --- a/drizzle-typebox/src/schema.ts +++ b/drizzle-typebox/src/schema.ts @@ -74,13 +74,17 @@ const selectConditions: Conditions = { }; const insertConditions: Conditions = { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }; const updateConditions: Conditions = { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: () => true, nullable: (column) => !column.notNull, }; diff --git a/drizzle-typebox/src/schema.types.internal.ts b/drizzle-typebox/src/schema.types.internal.ts index 9cd1b9a7c9..2af9541b62 100644 --- a/drizzle-typebox/src/schema.types.internal.ts +++ b/drizzle-typebox/src/schema.types.internal.ts @@ -43,8 +43,11 @@ export type BuildSchema< > = t.TObject< Simplify< { - [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: TColumns[K] extends - infer TColumn extends Column + [ + K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? TType extends 'select' ? K + : never + : K + ]: TColumns[K] extends infer TColumn extends Column ? IsRefinementDefined extends true ? Assume, t.TSchema> : HandleColumn diff --git a/drizzle-typebox/src/schema.types.ts b/drizzle-typebox/src/schema.types.ts index 75e6759cde..f15c46de85 100644 --- a/drizzle-typebox/src/schema.types.ts +++ b/drizzle-typebox/src/schema.types.ts @@ -1,5 +1,6 @@ import type * as t from '@sinclair/typebox'; import type { InferInsertModel, InferSelectModel, Table, View } from 'drizzle-orm'; +import type { CockroachEnum } from 'drizzle-orm/cockroach-core'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { EnumValuesToEnum } from './column.types.ts'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; @@ -23,7 +24,7 @@ export interface CreateSelectSchema { refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; - >(enum_: TEnum): t.TEnum>; + | CockroachEnum>(enum_: TEnum): t.TEnum>; } export interface CreateInsertSchema { diff --git a/drizzle-typebox/src/utils.ts b/drizzle-typebox/src/utils.ts index b230c88a67..bc972fa2df 100644 --- a/drizzle-typebox/src/utils.ts +++ b/drizzle-typebox/src/utils.ts @@ -25,6 +25,11 @@ export interface BufferSchema extends TSchema { static: Buffer; type: 'buffer'; } +export interface BigIntStringModeSchema extends TSchema { + [Kind]: 'BigIntStringMode'; + static: string; + type: 'string'; +} export type IsNever = [T] extends [never] ? true : false; diff --git a/drizzle-typebox/tests/cockroach.test.ts b/drizzle-typebox/tests/cockroach.test.ts new file mode 100644 index 0000000000..d2a7199054 --- /dev/null +++ b/drizzle-typebox/tests/cockroach.test.ts @@ -0,0 +1,560 @@ +import { type Static, Type as t } from '@sinclair/typebox'; +import { type Equal, sql } from 'drizzle-orm'; +import { + cockroachEnum, + cockroachMaterializedView, + cockroachSchema, + cockroachTable, + cockroachView, + customType, + int4, + jsonb, + text, +} from 'drizzle-orm/cockroach-core'; +import type { TopLevelCondition } from 'json-rules-engine'; +import { test } from 'vitest'; +import { jsonSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; +import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; + +const int4Schema = t.Integer({ + minimum: CONSTANTS.INT32_MIN, + maximum: CONSTANTS.INT32_MAX, +}); +const int4NullableSchema = t.Union([int4Schema, t.Null()]); +const int4OptionalSchema = t.Optional(int4Schema); +const int4NullableOptionalSchema = t.Optional(t.Union([int4Schema, t.Null()])); + +const textSchema = t.String(); +const textOptionalSchema = t.Optional(textSchema); + +const anySchema = t.Any(); + +const extendedSchema = t.Integer({ + minimum: CONSTANTS.INT32_MIN, + maximum: 1000, +}); +const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); + +test('table - select', (tc) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + generated: int4().generatedAlwaysAsIdentity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ id: int4Schema, generated: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = cockroachSchema('test'); + const table = schema.table('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (tc) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createInsertSchema(table); + const expected = t.Object({ name: textSchema, age: int4NullableOptionalSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - update', (tc) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createUpdateSchema(table); + const expected = t.Object({ + name: textOptionalSchema, + age: int4NullableOptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view qb - select', (tc) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = t.Object({ id: int4Schema, age: anySchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view columns - select', (tc) => { + const view = cockroachView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = t.Object({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('materialized view qb - select', (tc) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachMaterializedView('test').as((qb) => + qb.select({ id: table.id, age: sql``.as('age') }).from(table) + ); + + const result = createSelectSchema(view); + const expected = t.Object({ id: int4Schema, age: anySchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('materialized view columns - select', (tc) => { + const view = cockroachMaterializedView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = t.Object({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (tc) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = t.Object({ + id: int4Schema, + nested: t.Object({ name: textSchema, age: anySchema }), + table: t.Object({ id: int4Schema, name: textSchema }), + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('enum - select', (tc) => { + const enum_ = cockroachEnum('test', ['a', 'b', 'c']); + + const result = createSelectSchema(enum_); + const expected = t.Enum({ a: 'a', b: 'b', c: 'c' }); + expectEnumValues(tc, expected).from(result); + Expect>(); +}); + +test('nullability - select', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ + c1: int4NullableSchema, + c2: int4Schema, + c3: int4NullableSchema, + c4: int4Schema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createInsertSchema(table); + const expected = t.Object({ + c1: int4NullableOptionalSchema, + c2: int4Schema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); +}); + +test('nullability - update', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createUpdateSchema(table); + const expected = t.Object({ + c1: int4NullableOptionalSchema, + c2: int4OptionalSchema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - select', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (tc) => { + const customText = customType({ dataType: () => 'text' }); + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: customText(), + }); + + const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); + const result = createSelectSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + c4: customTextSchema, + }); + const expected = t.Object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: int4NullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - update', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: int4NullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine view - select', (tc) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4(), + c3: int4(), + c4: int4(), + c5: int4(), + c6: int4(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + nested: { + c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c6: t.Integer({ minimum: 1, maximum: 10 }), + }, + table: { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }, + }); + const expected = t.Object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: t.Object({ + c4: int4NullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: t.Object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: int4NullableSchema, + c5: int4NullableSchema, + c6: int4NullableSchema, + }), + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('all data types', (tc) => { + const table = cockroachTable('test', ({ + bigint, + bit, + bool, + char, + date, + decimal, + float, + doublePrecision, + geometry, + inet, + int2, + int4, + int8, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varchar, + vector, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bit: bit({ length: 5 }).notNull(), + boolean: bool().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + doublePrecision: doublePrecision().notNull(), + geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), + geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), + inet: inet().notNull(), + int2: int2().notNull(), + int4: int4().notNull(), + int8_1: int8({ mode: 'number' }).notNull(), + int8_2: int8({ mode: 'bigint' }).notNull(), + interval: interval().notNull(), + jsonb: jsonb().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + string1: string().notNull(), + string2: string({ enum: ['a', 'b', 'c'] }).notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time: time().notNull(), + timestamp1: timestamp({ mode: 'date' }).notNull(), + timestamp2: timestamp({ mode: 'string' }).notNull(), + uuid: uuid().notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ dimensions: 3 }).notNull(), + array: int4().array().notNull(), + })); + + const result = createSelectSchema(table); + const expected = t.Object({ + bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + bit: t.RegExp(/^[01]*$/, { minLength: 5, maxLength: 5 }), + boolean: t.Boolean(), + char1: t.String({ maxLength: 10 }), + char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + date1: t.Date(), + date2: t.String(), + decimal1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + decimal2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + decimal3: t.String(), + float: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), + doublePrecision: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), + geometry1: t.Tuple([t.Number(), t.Number()]), + geometry2: t.Object({ x: t.Number(), y: t.Number() }), + inet: t.String(), + int2: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), + int4: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), + int8_1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + int8_2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + interval: t.String(), + jsonb: jsonSchema, + numeric1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + numeric2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + numeric3: t.String(), + real: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), + smallint: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), + string1: t.String(), + string2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + text1: t.String(), + text2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + time: t.String(), + timestamp1: t.Date(), + timestamp2: t.String(), + uuid: t.String({ format: 'uuid' }), + varchar1: t.String({ maxLength: 10 }), + varchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + vector: t.Array(t.Number(), { minItems: 3, maxItems: 3 }), + array: t.Array(int4Schema), + }); + + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +/* Infinitely recursive type */ { + const TopLevelCondition: GenericSchema = t.Any() as any; + const table = cockroachTable('test', { + jsonb: jsonb().$type(), + }); + const result = createSelectSchema(table); + const expected = t.Object({ + jsonb: t.Union([TopLevelCondition, t.Null()]), + }); + Expect, Static>>(); +} + +/* Disallow unknown keys in table refinement - select */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createSelectSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createInsertSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = cockroachTable('test', { id: int4() }); + const view = cockroachView('test').as((qb) => qb.select().from(table)); + const mView = cockroachMaterializedView('test').as((qb) => qb.select().from(table)); + const nestedSelect = cockroachView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: t.String() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: t.String() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = cockroachView('test', { id: int4() }).as(sql``); + const mView = cockroachView('test', { id: int4() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: t.String() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: t.String() }); +} diff --git a/drizzle-typebox/tests/mssql.test.ts b/drizzle-typebox/tests/mssql.test.ts new file mode 100644 index 0000000000..1111618919 --- /dev/null +++ b/drizzle-typebox/tests/mssql.test.ts @@ -0,0 +1,499 @@ +import { Type as t } from '@sinclair/typebox'; +import { type Equal, sql } from 'drizzle-orm'; +import { customType, int, mssqlSchema, mssqlTable, mssqlView, text } from 'drizzle-orm/mssql-core'; +import { test } from 'vitest'; +import { bigintStringModeSchema, bufferSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src/index.ts'; +import { Expect, expectSchemaShape } from './utils.ts'; + +const integerSchema = t.Integer({ + minimum: CONSTANTS.INT32_MIN, + maximum: CONSTANTS.INT32_MAX, +}); +const integerNullableSchema = t.Union([integerSchema, t.Null()]); +const integerOptionalSchema = t.Optional(integerSchema); +const integerNullableOptionalSchema = t.Optional(t.Union([integerSchema, t.Null()])); + +const textSchema = t.String(); +const textOptionalSchema = t.Optional(textSchema); + +const anySchema = t.Any(); + +const extendedSchema = t.Integer({ + minimum: CONSTANTS.INT32_MIN, + maximum: 1000, +}); +const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); + +test('table - select', (tc) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + generated: int().identity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ id: integerSchema, generated: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = mssqlSchema('test'); + const table = schema.table('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ id: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (tc) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createInsertSchema(table); + const expected = t.Object({ name: textSchema, age: integerNullableOptionalSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - update', (tc) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createUpdateSchema(table); + const expected = t.Object({ + name: textOptionalSchema, + age: integerNullableOptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view qb - select', (tc) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = t.Object({ id: integerSchema, age: anySchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view columns - select', (tc) => { + const view = mssqlView('test', { + id: int().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = t.Object({ id: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (tc) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = t.Object({ + id: integerSchema, + nested: t.Object({ name: textSchema, age: anySchema }), + table: t.Object({ id: integerSchema, name: textSchema }), + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('nullability - select', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = t.Object({ + c1: integerNullableSchema, + c2: integerSchema, + c3: integerNullableSchema, + c4: integerSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createInsertSchema(table); + const expected = t.Object({ + c1: integerNullableOptionalSchema, + c2: integerSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); +}); + +test('nullability - update', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createUpdateSchema(table); + const expected = t.Object({ + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - select', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (tc) => { + const customText = customType({ dataType: () => 'text' }); + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: customText(), + }); + + const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); + const result = createSelectSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + c4: customTextSchema, + }); + const expected = t.Object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine table - update', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }); + const expected = t.Object({ + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('refine view - select', (tc) => { + const table = mssqlTable('test', { + c1: int(), + c2: int(), + c3: int(), + c4: int(), + c5: int(), + c6: int(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + nested: { + c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c6: t.Integer({ minimum: 1, maximum: 10 }), + }, + table: { + c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c3: t.Integer({ minimum: 1, maximum: 10 }), + }, + }); + const expected = t.Object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: t.Object({ + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: t.Object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, + }), + }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('all data types', (tc) => { + const table = mssqlTable('test', ({ + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + numeric, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, + ntext, + nvarchar, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bigint3: bigint({ mode: 'string' }).notNull(), + binary: binary({ length: 10 }).notNull(), + bit: bit().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + datetime1: datetime({ mode: 'date' }).notNull(), + datetime2: datetime({ mode: 'string' }).notNull(), + datetime2_1: datetime2({ mode: 'date' }).notNull(), + datetime2_2: datetime2({ mode: 'string' }).notNull(), + datetimeoffset1: datetimeoffset({ mode: 'date' }).notNull(), + datetimeoffset2: datetimeoffset({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + int: int().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time1: time({ mode: 'date' }).notNull(), + time2: time({ mode: 'string' }).notNull(), + tinyint: tinyint().notNull(), + varbinary: varbinary({ length: 10 }).notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + ntext1: ntext().notNull(), + ntext2: ntext({ enum: ['a', 'b', 'c'] }).notNull(), + nvarchar1: nvarchar({ length: 10 }).notNull(), + nvarchar2: nvarchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + })); + + const result = createSelectSchema(table); + const expected = t.Object({ + bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + bigint3: bigintStringModeSchema, + binary: bufferSchema, + bit: t.Boolean(), + char1: t.String({ maxLength: 10 }), + char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + date1: t.Date(), + date2: t.String(), + datetime1: t.Date(), + datetime2: t.String(), + datetime2_1: t.Date(), + datetime2_2: t.String(), + datetimeoffset1: t.Date(), + datetimeoffset2: t.String(), + decimal1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + decimal2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + decimal3: t.String(), + float: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), + int: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), + numeric1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + numeric2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + numeric3: t.String(), + real: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), + smallint: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), + text1: t.String(), + text2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + time1: t.Date(), + time2: t.String(), + tinyint: t.Integer({ minimum: 0, maximum: CONSTANTS.INT8_UNSIGNED_MAX }), + varbinary: bufferSchema, + varchar1: t.String({ maxLength: 10 }), + varchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + ntext1: t.String(), + ntext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + nvarchar1: t.String({ maxLength: 10 }), + nvarchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + }); + + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +// MSSQL doesn't support JSON data type +// /* Infinitely recursive type */ { +// const TopLevelCondition: GenericSchema = t.Any() as any; +// const table = mssqlTable('test', { +// json: json().$type(), +// }); +// const result = createSelectSchema(table); +// const expected = t.Object({ +// json: t.Union([TopLevelCondition, t.Null()]), +// }); +// Expect, Static>>(); +// } + +/* Disallow unknown keys in table refinement - select */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createSelectSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createInsertSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: t.String() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = mssqlTable('test', { id: int() }); + const view = mssqlView('test').as((qb) => qb.select().from(table)); + const nestedSelect = mssqlView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: t.String() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = mssqlView('test', { id: int() }).as(sql``); + const mView = mssqlView('test', { id: int() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: t.String() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: t.String() }); +} diff --git a/drizzle-typebox/tests/mysql.test.ts b/drizzle-typebox/tests/mysql.test.ts index 15b682902a..c8fde3222f 100644 --- a/drizzle-typebox/tests/mysql.test.ts +++ b/drizzle-typebox/tests/mysql.test.ts @@ -3,7 +3,7 @@ import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; @@ -12,11 +12,26 @@ const intSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX, }); -const serialNumberModeSchema = t.Integer({ +const intNullableSchema = t.Union([intSchema, t.Null()]); +const intOptionalSchema = t.Optional(intSchema); +const intNullableOptionalSchema = t.Optional(t.Union([intSchema, t.Null()])); + +const serialSchema = t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER, }); +const serialOptionalSchema = t.Optional(serialSchema); + const textSchema = t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }); +const textOptionalSchema = t.Optional(textSchema); + +const anySchema = t.Any(); + +const extendedSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }); +const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); test('table - select', (tc) => { const table = mysqlTable('test', { @@ -25,7 +40,7 @@ test('table - select', (tc) => { }); const result = createSelectSchema(table); - const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); + const expected = t.Object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -38,7 +53,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); + const expected = t.Object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -52,9 +67,9 @@ test('table - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - id: t.Optional(serialNumberModeSchema), + id: serialOptionalSchema, name: textSchema, - age: t.Optional(t.Union([intSchema, t.Null()])), + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -69,9 +84,9 @@ test('table - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - id: t.Optional(serialNumberModeSchema), - name: t.Optional(textSchema), - age: t.Optional(t.Union([intSchema, t.Null()])), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -85,7 +100,7 @@ test('view qb - select', (tc) => { const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = t.Object({ id: serialNumberModeSchema, age: t.Any() }); + const expected = t.Object({ id: serialSchema, age: anySchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -97,7 +112,7 @@ test('view columns - select', (tc) => { }).as(sql``); const result = createSelectSchema(view); - const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); + const expected = t.Object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -120,9 +135,9 @@ test('view with nested fields - select', (tc) => { const result = createSelectSchema(view); const expected = t.Object({ - id: serialNumberModeSchema, - nested: t.Object({ name: textSchema, age: t.Any() }), - table: t.Object({ id: serialNumberModeSchema, name: textSchema }), + id: serialSchema, + nested: t.Object({ name: textSchema, age: anySchema }), + table: t.Object({ id: serialSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -138,9 +153,9 @@ test('nullability - select', (tc) => { const result = createSelectSchema(table); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), + c1: intNullableSchema, c2: intSchema, - c3: t.Union([intSchema, t.Null()]), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(tc, expected).from(result); @@ -158,10 +173,10 @@ test('nullability - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), + c1: intNullableOptionalSchema, c2: intSchema, - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -178,10 +193,10 @@ test('nullability - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(intSchema), - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -199,9 +214,9 @@ test('refine table - select', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); @@ -224,9 +239,9 @@ test('refine table - select with custom data type', (tc) => { c4: customTextSchema, }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -247,9 +262,9 @@ test('refine table - insert', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -268,9 +283,9 @@ test('refine table - update', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -312,21 +327,21 @@ test('refine view - select', (tc) => { }, }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: t.Object({ - c4: t.Union([intSchema, t.Null()]), - c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c6: t.Integer({ minimum: 1, maximum: 10 }), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), - c4: t.Union([intSchema, t.Null()]), - c5: t.Union([intSchema, t.Null()]), - c6: t.Union([intSchema, t.Null()]), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(tc, expected).from(result); @@ -376,8 +391,12 @@ test('all data types', (tc) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -417,18 +436,22 @@ test('all data types', (tc) => { bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bigint3: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), bigint4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), - bigint5: t.String(), - bigint6: t.String(), - binary: t.RegExp(/^[01]*$/, { minLength: 10, maxLength: 10 }), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: t.RegExp(/^[01]*$/, { maxLength: 10 }), boolean: t.Boolean(), - char1: t.String({ minLength: 10, maxLength: 10 }), + char1: t.String({ maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), date1: t.Date(), date2: t.String(), datetime1: t.Date(), datetime2: t.String(), - decimal1: t.String(), - decimal2: t.String(), + decimal1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + decimal2: t.Number({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), + decimal3: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + decimal4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), + decimal5: t.String(), + decimal6: t.String(), double1: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), double2: t.Number({ minimum: 0, maximum: CONSTANTS.INT48_UNSIGNED_MAX }), float1: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), diff --git a/drizzle-typebox/tests/pg.test.ts b/drizzle-typebox/tests/pg.test.ts index 35cc9874ef..f461f99556 100644 --- a/drizzle-typebox/tests/pg.test.ts +++ b/drizzle-typebox/tests/pg.test.ts @@ -15,13 +15,26 @@ import { } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }); +const integerNullableSchema = t.Union([integerSchema, t.Null()]); +const integerOptionalSchema = t.Optional(integerSchema); +const integerNullableOptionalSchema = t.Optional(t.Union([integerSchema, t.Null()])); + const textSchema = t.String(); +const textOptionalSchema = t.Optional(textSchema); + +const anySchema = t.Any(); + +const extendedSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }); +const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); test('table - select', (tc) => { const table = pgTable('test', { @@ -56,7 +69,7 @@ test('table - insert', (tc) => { }); const result = createInsertSchema(table); - const expected = t.Object({ name: textSchema, age: t.Optional(t.Union([integerSchema, t.Null()])) }); + const expected = t.Object({ name: textSchema, age: integerNullableOptionalSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -70,8 +83,8 @@ test('table - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - name: t.Optional(textSchema), - age: t.Optional(t.Union([integerSchema, t.Null()])), + name: textOptionalSchema, + age: integerNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -85,7 +98,7 @@ test('view qb - select', (tc) => { const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = t.Object({ id: integerSchema, age: t.Any() }); + const expected = t.Object({ id: integerSchema, age: anySchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -110,13 +123,13 @@ test('materialized view qb - select', (tc) => { const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = t.Object({ id: integerSchema, age: t.Any() }); + const expected = t.Object({ id: integerSchema, age: anySchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('materialized view columns - select', (tc) => { - const view = pgView('test', { + const view = pgMaterializedView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); @@ -132,7 +145,7 @@ test('view with nested fields - select', (tc) => { id: serial().primaryKey(), name: text().notNull(), }); - const view = pgMaterializedView('test').as((qb) => + const view = pgView('test').as((qb) => qb.select({ id: table.id, nested: { @@ -146,7 +159,7 @@ test('view with nested fields - select', (tc) => { const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, - nested: t.Object({ name: textSchema, age: t.Any() }), + nested: t.Object({ name: textSchema, age: anySchema }), table: t.Object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); @@ -172,9 +185,9 @@ test('nullability - select', (tc) => { const result = createSelectSchema(table); const expected = t.Object({ - c1: t.Union([integerSchema, t.Null()]), + c1: integerNullableSchema, c2: integerSchema, - c3: t.Union([integerSchema, t.Null()]), + c3: integerNullableSchema, c4: integerSchema, }); expectSchemaShape(tc, expected).from(result); @@ -194,11 +207,11 @@ test('nullability - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([integerSchema, t.Null()])), + c1: integerNullableOptionalSchema, c2: integerSchema, - c3: t.Optional(t.Union([integerSchema, t.Null()])), - c4: t.Optional(integerSchema), - c7: t.Optional(integerSchema), + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(tc, expected).from(result); }); @@ -216,11 +229,11 @@ test('nullability - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([integerSchema, t.Null()])), - c2: t.Optional(integerSchema), - c3: t.Optional(t.Union([integerSchema, t.Null()])), - c4: t.Optional(integerSchema), - c7: t.Optional(integerSchema), + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -238,9 +251,9 @@ test('refine table - select', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Union([integerSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -262,9 +275,9 @@ test('refine table - select with custom data type', (tc) => { c4: customTextSchema, }); const expected = t.Object({ - c1: t.Union([integerSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -285,9 +298,9 @@ test('refine table - insert', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([integerSchema, t.Null()])), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -306,9 +319,9 @@ test('refine table - update', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([integerSchema, t.Null()])), - c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -350,21 +363,21 @@ test('refine view - select', (tc) => { }, }); const expected = t.Object({ - c1: t.Union([integerSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: t.Object({ - c4: t.Union([integerSchema, t.Null()]), - c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c6: t.Integer({ minimum: 1, maximum: 10 }), + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: t.Object({ - c1: t.Union([integerSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), - c4: t.Union([integerSchema, t.Null()]), - c5: t.Union([integerSchema, t.Null()]), - c6: t.Union([integerSchema, t.Null()]), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, }), }); expectSchemaShape(tc, expected).from(result); @@ -430,7 +443,9 @@ test('all data types', (tc) => { line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), @@ -456,14 +471,14 @@ test('all data types', (tc) => { const expected = t.Object({ bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), - bigint3: t.String(), + bigint3: bigintStringModeSchema, bigserial1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigserial2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bit: t.RegExp(/^[01]*$/, { minLength: 5, maxLength: 5 }), boolean: t.Boolean(), date1: t.Date(), date2: t.String(), - char1: t.String({ minLength: 10, maxLength: 10 }), + char1: t.String({ maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), cidr: t.String(), doublePrecision: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), @@ -479,7 +494,9 @@ test('all data types', (tc) => { line2: t.Tuple([t.Number(), t.Number(), t.Number()]), macaddr: t.String(), macaddr8: t.String(), - numeric: t.String(), + numeric1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + numeric2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + numeric3: t.String(), point1: t.Object({ x: t.Number(), y: t.Number() }), point2: t.Tuple([t.Number(), t.Number()]), real: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), diff --git a/drizzle-typebox/tests/singlestore.test.ts b/drizzle-typebox/tests/singlestore.test.ts index 2aec096737..730b2e3c46 100644 --- a/drizzle-typebox/tests/singlestore.test.ts +++ b/drizzle-typebox/tests/singlestore.test.ts @@ -3,7 +3,7 @@ import type { Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; @@ -12,11 +12,26 @@ const intSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX, }); -const serialNumberModeSchema = t.Integer({ +const intNullableSchema = t.Union([intSchema, t.Null()]); +const intOptionalSchema = t.Optional(intSchema); +const intNullableOptionalSchema = t.Optional(t.Union([intSchema, t.Null()])); + +const serialSchema = t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER, }); +const serialOptionalSchema = t.Optional(serialSchema); + const textSchema = t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }); +const textOptionalSchema = t.Optional(textSchema); + +// const anySchema = t.Any(); + +const extendedSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }); +// const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); test('table - select', (tc) => { const table = singlestoreTable('test', { @@ -25,7 +40,7 @@ test('table - select', (tc) => { }); const result = createSelectSchema(table); - const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); + const expected = t.Object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -38,7 +53,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); + const expected = t.Object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -52,9 +67,9 @@ test('table - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - id: t.Optional(serialNumberModeSchema), + id: serialOptionalSchema, name: textSchema, - age: t.Optional(t.Union([intSchema, t.Null()])), + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -69,9 +84,9 @@ test('table - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - id: t.Optional(serialNumberModeSchema), - name: t.Optional(textSchema), - age: t.Optional(t.Union([intSchema, t.Null()])), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -87,7 +102,7 @@ test('table - update', (tc) => { // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); -// const expected = t.Object({ id: serialNumberModeSchema, age: t.Any() }); +// const expected = t.Object({ id: serialSchema, age: anySchema }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); @@ -99,7 +114,7 @@ test('table - update', (tc) => { // }).as(sql``); // const result = createSelectSchema(view); -// const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); +// const expected = t.Object({ id: serialSchema, name: textSchema }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); @@ -123,8 +138,8 @@ test('table - update', (tc) => { // const result = createSelectSchema(view); // const expected = t.Object({ // id: serialNumberModeSchema, -// nested: t.Object({ name: textSchema, age: t.Any() }), -// table: t.Object({ id: serialNumberModeSchema, name: textSchema }), +// nested: t.Object({ name: textSchema, age: anySchema }), +// table: t.Object({ id: serialSchema, name: textSchema }), // }); // expectSchemaShape(tc, expected).from(result); // Expect>(); @@ -140,9 +155,9 @@ test('nullability - select', (tc) => { const result = createSelectSchema(table); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), + c1: intNullableSchema, c2: intSchema, - c3: t.Union([intSchema, t.Null()]), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(tc, expected).from(result); @@ -160,10 +175,10 @@ test('nullability - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), + c1: intNullableOptionalSchema, c2: intSchema, - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -180,10 +195,10 @@ test('nullability - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(intSchema), - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -201,9 +216,9 @@ test('refine table - select', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); @@ -226,9 +241,9 @@ test('refine table - select with custom data type', (tc) => { c4: customTextSchema, }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -249,9 +264,9 @@ test('refine table - insert', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -270,9 +285,9 @@ test('refine table - update', (tc) => { c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -314,21 +329,21 @@ test('refine table - update', (tc) => { // }, // }); // const expected = t.Object({ -// c1: t.Union([intSchema, t.Null()]), -// c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), -// c3: t.Integer({ minimum: 1, maximum: 10 }), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, // nested: t.Object({ -// c4: t.Union([intSchema, t.Null()]), -// c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), -// c6: t.Integer({ minimum: 1, maximum: 10 }), +// c4: intNullableSchema, +// c5: extendedNullableSchema, +// c6: customSchema, // }), // table: t.Object({ -// c1: t.Union([intSchema, t.Null()]), -// c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), -// c3: t.Integer({ minimum: 1, maximum: 10 }), -// c4: t.Union([intSchema, t.Null()]), -// c5: t.Union([intSchema, t.Null()]), -// c6: t.Union([intSchema, t.Null()]), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, +// c4: intNullableSchema, +// c5: intNullableSchema, +// c6: intNullableSchema, // }), // }); // expectSchemaShape(tc, expected).from(result); @@ -363,6 +378,7 @@ test('all data types', (tc) => { longtext, mediumtext, tinytext, + vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), @@ -378,8 +394,12 @@ test('all data types', (tc) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -411,6 +431,14 @@ test('all data types', (tc) => { mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ + dimensions: 3, + elementType: 'F32', + }).notNull(), + vector2: vector({ + dimensions: 2, + elementType: 'I64', + }).notNull(), })); const result = createSelectSchema(table); @@ -419,18 +447,22 @@ test('all data types', (tc) => { bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bigint3: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), bigint4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), - bigint5: t.String(), - bigint6: t.String(), - binary: t.RegExp(/^[01]*$/, { minLength: 10, maxLength: 10 }), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: t.RegExp(/^[01]*$/, { maxLength: 10 }), boolean: t.Boolean(), - char1: t.String({ minLength: 10, maxLength: 10 }), + char1: t.String({ maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), date1: t.Date(), date2: t.String(), datetime1: t.Date(), datetime2: t.String(), - decimal1: t.String(), - decimal2: t.String(), + decimal1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + decimal2: t.Number({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), + decimal3: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + decimal4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), + decimal5: t.String(), + decimal6: t.String(), double1: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), double2: t.Number({ minimum: 0, maximum: CONSTANTS.INT48_UNSIGNED_MAX }), float1: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), @@ -462,6 +494,11 @@ test('all data types', (tc) => { mediumtext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: t.String({ maxLength: CONSTANTS.INT8_UNSIGNED_MAX }), tinytext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), + vector: t.Array(t.Number(), { minItems: 3, maxItems: 3 }), + vector2: t.Array(t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), { + minItems: 2, + maxItems: 2, + }), }); expectSchemaShape(tc, expected).from(result); Expect>(); diff --git a/drizzle-typebox/tests/sqlite.test.ts b/drizzle-typebox/tests/sqlite.test.ts index b4db9b90ec..a8fc0ea798 100644 --- a/drizzle-typebox/tests/sqlite.test.ts +++ b/drizzle-typebox/tests/sqlite.test.ts @@ -9,7 +9,20 @@ import { createInsertSchema, createSelectSchema, createUpdateSchema, type Generi import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }); +const intNullableSchema = t.Union([intSchema, t.Null()]); +const intOptionalSchema = t.Optional(intSchema); +const intNullableOptionalSchema = t.Optional(t.Union([intSchema, t.Null()])); + const textSchema = t.String(); +const textOptionalSchema = t.Optional(textSchema); + +const anySchema = t.Any(); + +const extendedSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }); +const extendedNullableSchema = t.Union([extendedSchema, t.Null()]); +const extendedOptionalSchema = t.Optional(extendedSchema); + +const customSchema = t.Integer({ minimum: 1, maximum: 10 }); test('table - select', (tc) => { const table = sqliteTable('test', { @@ -32,9 +45,9 @@ test('table - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - id: t.Optional(intSchema), + id: intOptionalSchema, name: textSchema, - age: t.Optional(t.Union([intSchema, t.Null()])), + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -49,9 +62,9 @@ test('table - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - id: t.Optional(intSchema), - name: t.Optional(textSchema), - age: t.Optional(t.Union([intSchema, t.Null()])), + id: intOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -65,7 +78,7 @@ test('view qb - select', (tc) => { const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = t.Object({ id: intSchema, age: t.Any() }); + const expected = t.Object({ id: intSchema, age: anySchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -101,7 +114,7 @@ test('view with nested fields - select', (tc) => { const result = createSelectSchema(view); const expected = t.Object({ id: intSchema, - nested: t.Object({ name: textSchema, age: t.Any() }), + nested: t.Object({ name: textSchema, age: anySchema }), table: t.Object({ id: intSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); @@ -118,9 +131,9 @@ test('nullability - select', (tc) => { const result = createSelectSchema(table); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), + c1: intNullableSchema, c2: intSchema, - c3: t.Union([intSchema, t.Null()]), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(tc, expected).from(result); @@ -138,10 +151,10 @@ test('nullability - insert', (tc) => { const result = createInsertSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), + c1: intNullableOptionalSchema, c2: intSchema, - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -158,10 +171,10 @@ test('nullability - update', (tc) => { const result = createUpdateSchema(table); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(intSchema), - c3: t.Optional(t.Union([intSchema, t.Null()])), - c4: t.Optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -175,13 +188,13 @@ test('refine table - select', (tc) => { }); const result = createSelectSchema(table, { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -198,14 +211,14 @@ test('refine table - select with custom data type', (tc) => { const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); const result = createSelectSchema(table, { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -222,13 +235,13 @@ test('refine table - insert', (tc) => { }); const result = createInsertSchema(table, { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -243,13 +256,13 @@ test('refine table - update', (tc) => { }); const result = createUpdateSchema(table, { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ - c1: t.Optional(t.Union([intSchema, t.Null()])), - c2: t.Optional(t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 })), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); @@ -279,33 +292,33 @@ test('refine view - select', (tc) => { ); const result = createSelectSchema(view, { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: { - c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c5: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c6: t.Integer({ minimum: 1, maximum: 10 }), }, table: { - c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), + c2: () => t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }, }); const expected = t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: t.Object({ - c4: t.Union([intSchema, t.Null()]), - c5: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), - c6: t.Integer({ minimum: 1, maximum: 10 }), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: t.Object({ - c1: t.Union([intSchema, t.Null()]), - c2: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), - c3: t.Integer({ minimum: 1, maximum: 10 }), - c4: t.Union([intSchema, t.Null()]), - c5: t.Union([intSchema, t.Null()]), - c6: t.Union([intSchema, t.Null()]), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(tc, expected).from(result); @@ -327,7 +340,9 @@ test('all data types', (tc) => { integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), @@ -344,7 +359,9 @@ test('all data types', (tc) => { integer2: t.Boolean(), integer3: t.Date(), integer4: t.Date(), - numeric: t.String(), + numeric1: t.Number({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), + numeric2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), + numeric3: t.String(), real: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), text1: t.String(), text2: t.String({ maxLength: 10 }), diff --git a/drizzle-typebox/tests/utils.ts b/drizzle-typebox/tests/utils.ts index 46cd16a32f..71082869f6 100644 --- a/drizzle-typebox/tests/utils.ts +++ b/drizzle-typebox/tests/utils.ts @@ -1,5 +1,5 @@ import type * as t from '@sinclair/typebox'; -import { expect, type TaskContext } from 'vitest'; +import { expect, type TestContext } from 'vitest'; function removeKeysFromObject(obj: Record, keys: string[]) { for (const key of keys) { @@ -8,7 +8,7 @@ function removeKeysFromObject(obj: Record, keys: string[]) { return obj; } -export function expectSchemaShape(t: TaskContext, expected: T) { +export function expectSchemaShape(t: TestContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.properties)).toStrictEqual(Object.keys(expected.properties)); @@ -23,7 +23,7 @@ export function expectSchemaShape(t: TaskContext, expected: }; } -export function expectEnumValues>(t: TaskContext, expected: T) { +export function expectEnumValues>(t: TestContext, expected: T) { return { from(actual: T) { expect(actual.anyOf).toStrictEqual(expected.anyOf); diff --git a/drizzle-typebox/tsconfig.json b/drizzle-typebox/tsconfig.json index c25379c37b..3f051aa73e 100644 --- a/drizzle-typebox/tsconfig.json +++ b/drizzle-typebox/tsconfig.json @@ -5,6 +5,7 @@ "baseUrl": ".", "declaration": true, "noEmit": true, + "allowImportingTsExtensions": true, "paths": { "~/*": ["src/*"] } diff --git a/drizzle-typebox/vitest.config.ts b/drizzle-typebox/vitest.config.ts index 1f0eb7ad9a..d767b570bd 100644 --- a/drizzle-typebox/vitest.config.ts +++ b/drizzle-typebox/vitest.config.ts @@ -1,4 +1,5 @@ import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/drizzle-valibot/package.json b/drizzle-valibot/package.json index bce4659f0e..8f5693d99d 100644 --- a/drizzle-valibot/package.json +++ b/drizzle-valibot/package.json @@ -1,13 +1,15 @@ { "name": "drizzle-valibot", - "version": "0.4.2", + "version": "1.0.0-beta.2", "description": "Generate valibot schemas from Drizzle ORM schemas", "type": "module", "scripts": { + "build:artifact": "pnpm run build", "build": "tsx scripts/build.ts", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz", "test": "vitest run" }, @@ -55,7 +57,7 @@ "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { - "drizzle-orm": ">=1.0.0-beta.1", + "drizzle-orm": ">=1.0.0-beta.2", "valibot": ">=1.0.0-beta.7" }, "devDependencies": { @@ -67,8 +69,6 @@ "rimraf": "^5.0.0", "rollup": "^3.29.5", "valibot": "1.0.0-beta.7", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "zx": "^7.2.2" } } diff --git a/drizzle-valibot/src/column.ts b/drizzle-valibot/src/column.ts index 25119f6ea9..f0f1bdc50f 100644 --- a/drizzle-valibot/src/column.ts +++ b/drizzle-valibot/src/column.ts @@ -19,7 +19,7 @@ export const jsonSchema: v.GenericSchema = v.union([ v.array(v.any()), v.record(v.string(), v.any()), ]); -export const bufferSchema: v.GenericSchema = v.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof +export const bufferSchema: v.GenericSchema = v.custom((v) => v instanceof Buffer); export function mapEnumValues(values: string[]) { return Object.fromEntries(values.map((value) => [value, value])); @@ -158,6 +158,11 @@ function numberColumnToSchema(column: Column, constraint: ColumnDataNumberConstr integer = true; break; } + case 'unsigned': { + min = 0; + max = Number.MAX_SAFE_INTEGER; + break; + } default: { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; @@ -172,6 +177,28 @@ function numberColumnToSchema(column: Column, constraint: ColumnDataNumberConstr return v.pipe(v.number(), ...actions); } +/** @internal */ +export const bigintStringModeSchema = v.pipe( + v.string(), + v.regex(/^-?\d+$/), + // eslint-disable-next-line unicorn/prefer-native-coercion-functions + v.transform((v) => BigInt(v)), + v.minValue(CONSTANTS.INT64_MIN), + v.maxValue(CONSTANTS.INT64_MAX), + v.transform((v) => v.toString()), +); + +/** @internal */ +export const unsignedBigintStringModeSchema = v.pipe( + v.string(), + v.regex(/^\d+$/), + // eslint-disable-next-line unicorn/prefer-native-coercion-functions + v.transform((v) => BigInt(v)), + v.minValue(0n), + v.maxValue(CONSTANTS.INT64_MAX), + v.transform((v) => v.toString()), +); + function bigintColumnToSchema(column: Column, constraint: ColumnDataBigIntConstraint | undefined): v.GenericSchema { let min!: bigint | undefined; let max!: bigint | undefined; @@ -212,6 +239,15 @@ function arrayColumnToSchema(column: Column, constraint: ColumnDataArrayConstrai ? v.pipe(v.array(v.number()), v.length(length)) : v.array(v.number()); } + case 'int64vector': { + const length = column.length; + return length + ? v.pipe( + v.array(v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX))), + v.length(length), + ) + : v.array(v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX))); + } case 'basecolumn': { const { length } = column; const schema = (<{ baseColumn?: Column }> column).baseColumn @@ -274,6 +310,12 @@ function stringColumnToSchema(column: Column, constraint: ColumnDataStringConstr } return v.enum(mapEnumValues(enumValues)); } + if (constraint === 'int64') { + return bigintStringModeSchema; + } + if (constraint === 'uint64') { + return unsignedBigintStringModeSchema; + } const actions: any[] = []; if (regex) { diff --git a/drizzle-valibot/src/column.types.ts b/drizzle-valibot/src/column.types.ts index 7df510517e..2d251ee3c6 100644 --- a/drizzle-valibot/src/column.types.ts +++ b/drizzle-valibot/src/column.types.ts @@ -7,6 +7,7 @@ import type { ExtractColumnTypeData, } from 'drizzle-orm'; import type * as v from 'valibot'; +import type { bigintStringModeSchema, unsignedBigintStringModeSchema } from './column.ts'; import type { IsNever, Json, RemoveNeverElements } from './utils.ts'; export type HasBaseColumn = TColumn extends { _: { baseColumn: Column | undefined } } @@ -53,6 +54,7 @@ export type GetValibotType< [v.ArraySchema, undefined>, GetLengthAction] > : v.ArraySchema, undefined> + : TConstraint extends 'int64vector' ? v.ArraySchema, undefined> : TConstraint extends 'geometry' | 'point' ? v.TupleSchema<[v.NumberSchema, v.NumberSchema], undefined> : TConstraint extends 'line' @@ -116,6 +118,8 @@ export type GetValibotType< { readonly [K in Assume[number]]: K }, undefined > + : TConstraint extends 'int64' ? typeof bigintStringModeSchema + : TConstraint extends 'uint64' ? typeof unsignedBigintStringModeSchema : TConstraint extends 'binary' ? v.SchemaWithPipe< RemoveNeverElements<[ v.StringSchema, diff --git a/drizzle-valibot/src/schema.ts b/drizzle-valibot/src/schema.ts index 30a6f77ece..3ea10cea91 100644 --- a/drizzle-valibot/src/schema.ts +++ b/drizzle-valibot/src/schema.ts @@ -76,7 +76,9 @@ export const createInsertSchema: CreateInsertSchema = ( ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }) as any; @@ -88,7 +90,9 @@ export const createUpdateSchema: CreateUpdateSchema = ( ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: () => true, nullable: (column) => !column.notNull, }) as any; diff --git a/drizzle-valibot/src/schema.types.internal.ts b/drizzle-valibot/src/schema.types.internal.ts index 9382c69a17..7be7acbcc9 100644 --- a/drizzle-valibot/src/schema.types.internal.ts +++ b/drizzle-valibot/src/schema.types.internal.ts @@ -51,17 +51,20 @@ export type BuildSchema< > = v.ObjectSchema< Simplify< { - readonly [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: - TColumns[K] extends infer TColumn extends Column - ? IsRefinementDefined> extends true - ? Assume, v.GenericSchema> - : HandleColumn - : TColumns[K] extends infer TObject extends SelectedFieldsFlat | Table | View ? BuildSchema< - TType, - GetSelection, - TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined - > - : v.AnySchema; + readonly [ + K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? TType extends 'select' ? K + : never + : K + ]: TColumns[K] extends infer TColumn extends Column + ? IsRefinementDefined> extends true + ? Assume, v.GenericSchema> + : HandleColumn + : TColumns[K] extends infer TObject extends SelectedFieldsFlat | Table | View ? BuildSchema< + TType, + GetSelection, + TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined + > + : v.AnySchema; } >, undefined diff --git a/drizzle-valibot/src/schema.types.ts b/drizzle-valibot/src/schema.types.ts index dc6164593a..6d594ddef4 100644 --- a/drizzle-valibot/src/schema.types.ts +++ b/drizzle-valibot/src/schema.types.ts @@ -1,4 +1,5 @@ import type { InferInsertModel, InferSelectModel, Table, View } from 'drizzle-orm'; +import type { CockroachEnum } from 'drizzle-orm/cockroach-core'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type * as v from 'valibot'; import type { EnumValuesToEnum } from './column.types.ts'; @@ -23,7 +24,9 @@ export interface CreateSelectSchema { refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; - >(enum_: TEnum): v.EnumSchema, undefined>; + | CockroachEnum>( + enum_: TEnum, + ): v.EnumSchema, undefined>; } export interface CreateInsertSchema { diff --git a/drizzle-valibot/tests/cockroach.test.ts b/drizzle-valibot/tests/cockroach.test.ts new file mode 100644 index 0000000000..92317c62fd --- /dev/null +++ b/drizzle-valibot/tests/cockroach.test.ts @@ -0,0 +1,556 @@ +import { type Equal, sql } from 'drizzle-orm'; +import { + cockroachEnum, + cockroachMaterializedView, + cockroachSchema, + cockroachTable, + cockroachView, + customType, + int4, + jsonb, + text, +} from 'drizzle-orm/cockroach-core'; +import type { TopLevelCondition } from 'json-rules-engine'; +import * as v from 'valibot'; +import { test } from 'vitest'; +import { jsonSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; + +const int4Schema = v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()); +const int4NullableSchema = v.nullable(int4Schema); +const int4OptionalSchema = v.optional(int4Schema); +const int4NullableOptionalSchema = v.optional(v.nullable(int4Schema)); + +const textSchema = v.string(); +const textOptionalSchema = v.optional(textSchema); + +const anySchema = v.any(); + +const extendedSchema = v.pipe(int4Schema, v.maxValue(1000)); +const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); + +test('table - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + generated: int4().generatedAlwaysAsIdentity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = v.object({ id: int4Schema, generated: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = cockroachSchema('test'); + const table = schema.table('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = v.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createInsertSchema(table); + const expected = v.object({ name: textSchema, age: int4NullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createUpdateSchema(table); + const expected = v.object({ + name: textOptionalSchema, + age: int4NullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = v.object({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = cockroachView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = v.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachMaterializedView('test').as((qb) => + qb.select({ id: table.id, age: sql``.as('age') }).from(table) + ); + + const result = createSelectSchema(view); + const expected = v.object({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view columns - select', (t) => { + const view = cockroachMaterializedView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = v.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = v.object({ + id: int4Schema, + nested: v.object({ name: textSchema, age: anySchema }), + table: v.object({ id: int4Schema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('enum - select', (t) => { + const enum_ = cockroachEnum('test', ['a', 'b', 'c']); + + const result = createSelectSchema(enum_); + const expected = v.enum({ a: 'a', b: 'b', c: 'c' }); + expectEnumValues(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = v.object({ + c1: int4NullableSchema, + c2: int4Schema, + c3: int4NullableSchema, + c4: int4Schema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createInsertSchema(table); + const expected = v.object({ + c1: int4NullableOptionalSchema, + c2: int4Schema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createUpdateSchema(table); + const expected = v.object({ + c1: int4NullableOptionalSchema, + c2: int4OptionalSchema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: customText(), + }); + + const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); + const result = createSelectSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + c4: customTextSchema, + }); + const expected = v.object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: int4NullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: int4NullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4(), + c3: int4(), + c4: int4(), + c5: int4(), + c6: int4(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + nested: { + c5: (schema) => v.pipe(schema, v.maxValue(1000)), + c6: v.pipe(v.string(), v.transform(Number)), + }, + table: { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }, + }); + const expected = v.object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: v.object({ + c4: int4NullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: v.object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: int4NullableSchema, + c5: int4NullableSchema, + c6: int4NullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = cockroachTable('test', ({ + bigint, + bit, + bool, + char, + date, + decimal, + float, + doublePrecision, + geometry, + inet, + int2, + int4, + int8, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varchar, + vector, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bit: bit({ length: 5 }).notNull(), + boolean: bool().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + doublePrecision: doublePrecision().notNull(), + geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), + geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), + inet: inet().notNull(), + int2: int2().notNull(), + int4: int4().notNull(), + int8_1: int8({ mode: 'number' }).notNull(), + int8_2: int8({ mode: 'bigint' }).notNull(), + interval: interval().notNull(), + jsonb: jsonb().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + string1: string().notNull(), + string2: string({ enum: ['a', 'b', 'c'] }).notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time: time().notNull(), + timestamp1: timestamp({ mode: 'date' }).notNull(), + timestamp2: timestamp({ mode: 'string' }).notNull(), + uuid: uuid().notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ dimensions: 3 }).notNull(), + array: int4().array().notNull(), + })); + + const result = createSelectSchema(table); + const expected = v.object({ + bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), + bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + bit: v.pipe(v.string(), v.regex(/^[01]*$/), v.length(5 as number)), + boolean: v.boolean(), + char1: v.pipe(v.string(), v.maxLength(10 as number)), + char2: v.enum({ a: 'a', b: 'b', c: 'c' }), + date1: v.date(), + date2: v.string(), + decimal1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + decimal3: v.string(), + float: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), + doublePrecision: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), + geometry1: v.tuple([v.number(), v.number()]), + geometry2: v.object({ x: v.number(), y: v.number() }), + inet: v.string(), + int2: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), + int4: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), + int8_1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), + int8_2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + interval: v.string(), + jsonb: jsonSchema, + numeric1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + numeric2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + numeric3: v.string(), + real: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), + smallint: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), + string1: v.string(), + string2: v.enum({ a: 'a', b: 'b', c: 'c' }), + text1: v.string(), + text2: v.enum({ a: 'a', b: 'b', c: 'c' }), + time: v.string(), + timestamp1: v.date(), + timestamp2: v.string(), + uuid: v.pipe(v.string(), v.uuid()), + varchar1: v.pipe(v.string(), v.maxLength(10 as number)), + varchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), + vector: v.pipe(v.array(v.number()), v.length(3 as number)), + array: v.array(int4Schema), + }); + + // @ts-ignore - TODO: Remake type checks for new columns + expectSchemaShape(t, expected).from(result); + // @ts-ignore - TODO: Remake type checks for new columns + Expect>(); +}); + +/* Infinitely recursive type */ { + const TopLevelCondition: v.GenericSchema = v.custom(() => true); + const table = cockroachTable('test', { + jsonb: jsonb().$type(), + }); + const result = createSelectSchema(table); + const expected = v.object({ + jsonb: v.nullable(TopLevelCondition), + }); + Expect, v.InferOutput>>(); +} + +/* Disallow unknown keys in table refinement - select */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createSelectSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createInsertSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = cockroachTable('test', { id: int4() }); + const view = cockroachView('test').as((qb) => qb.select().from(table)); + const mView = cockroachMaterializedView('test').as((qb) => qb.select().from(table)); + const nestedSelect = cockroachView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: v.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: v.string() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = cockroachView('test', { id: int4() }).as(sql``); + const mView = cockroachView('test', { id: int4() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: v.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: v.string() }); +} diff --git a/drizzle-valibot/tests/mssql.test.ts b/drizzle-valibot/tests/mssql.test.ts new file mode 100644 index 0000000000..e4c7d4efd9 --- /dev/null +++ b/drizzle-valibot/tests/mssql.test.ts @@ -0,0 +1,495 @@ +import { type Equal, sql } from 'drizzle-orm'; +import { customType, int, mssqlSchema, mssqlTable, mssqlView, text } from 'drizzle-orm/mssql-core'; +import * as v from 'valibot'; +import { test } from 'vitest'; +import { bigintStringModeSchema, bufferSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src/index.ts'; +import { Expect, expectSchemaShape } from './utils.ts'; + +const integerSchema = v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()); +const integerNullableSchema = v.nullable(integerSchema); +const integerOptionalSchema = v.optional(integerSchema); +const integerNullableOptionalSchema = v.optional(v.nullable(integerSchema)); + +const textSchema = v.string(); +const textOptionalSchema = v.optional(textSchema); + +const anySchema = v.any(); + +const extendedSchema = v.pipe(integerSchema, v.maxValue(1000)); +const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); + +test('table - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + generated: int().identity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = v.object({ id: integerSchema, generated: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = mssqlSchema('test'); + const table = schema.table('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = v.object({ id: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createInsertSchema(table); + const expected = v.object({ name: textSchema, age: integerNullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createUpdateSchema(table); + const expected = v.object({ + name: textOptionalSchema, + age: integerNullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = v.object({ id: integerSchema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = mssqlView('test', { + id: int().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = v.object({ id: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = v.object({ + id: integerSchema, + nested: v.object({ name: textSchema, age: anySchema }), + table: v.object({ id: integerSchema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = v.object({ + c1: integerNullableSchema, + c2: integerSchema, + c3: integerNullableSchema, + c4: integerSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createInsertSchema(table); + const expected = v.object({ + c1: integerNullableOptionalSchema, + c2: integerSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createUpdateSchema(table); + const expected = v.object({ + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: customText(), + }); + + const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); + const result = createSelectSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + c4: customTextSchema, + }); + const expected = v.object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }); + const expected = v.object({ + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int(), + c3: int(), + c4: int(), + c5: int(), + c6: int(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + nested: { + c5: (schema) => v.pipe(schema, v.maxValue(1000)), + c6: v.pipe(v.string(), v.transform(Number)), + }, + table: { + c2: (schema) => v.pipe(schema, v.maxValue(1000)), + c3: v.pipe(v.string(), v.transform(Number)), + }, + }); + const expected = v.object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: v.object({ + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: v.object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = mssqlTable('test', ({ + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + numeric, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, + ntext, + nvarchar, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bigint3: bigint({ mode: 'string' }).notNull(), + binary: binary({ length: 10 }).notNull(), + bit: bit().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + datetime1: datetime({ mode: 'date' }).notNull(), + datetime2: datetime({ mode: 'string' }).notNull(), + datetime2_1: datetime2({ mode: 'date' }).notNull(), + datetime2_2: datetime2({ mode: 'string' }).notNull(), + datetimeoffset1: datetimeoffset({ mode: 'date' }).notNull(), + datetimeoffset2: datetimeoffset({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + int: int().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time1: time({ mode: 'date' }).notNull(), + time2: time({ mode: 'string' }).notNull(), + tinyint: tinyint().notNull(), + varbinary: varbinary({ length: 10 }).notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + ntext1: ntext().notNull(), + ntext2: ntext({ enum: ['a', 'b', 'c'] }).notNull(), + nvarchar1: nvarchar({ length: 10 }).notNull(), + nvarchar2: nvarchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + })); + + const result = createSelectSchema(table); + const expected = v.object({ + bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), + bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + bigint3: bigintStringModeSchema, + binary: bufferSchema, + bit: v.boolean(), + char1: v.pipe(v.string(), v.maxLength(10 as number)), + char2: v.enum({ a: 'a', b: 'b', c: 'c' }), + date1: v.date(), + date2: v.string(), + datetime1: v.date(), + datetime2: v.string(), + datetime2_1: v.date(), + datetime2_2: v.string(), + datetimeoffset1: v.date(), + datetimeoffset2: v.string(), + decimal1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + decimal3: v.string(), + float: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), + int: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), + numeric1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + numeric2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + numeric3: v.string(), + real: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), + smallint: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), + text1: v.string(), + text2: v.enum({ a: 'a', b: 'b', c: 'c' }), + time1: v.date(), + time2: v.string(), + tinyint: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT8_UNSIGNED_MAX), v.integer()), + varbinary: bufferSchema, + varchar1: v.pipe(v.string(), v.maxLength(10 as number)), + varchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), + ntext1: v.string(), + ntext2: v.enum({ a: 'a', b: 'b', c: 'c' }), + nvarchar1: v.pipe(v.string(), v.maxLength(10 as number)), + nvarchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), + }); + + // @ts-ignore - TODO: Remake type checks for new columns + expectSchemaShape(t, expected).from(result); + // @ts-ignore - TODO: Remake type checks for new columns + Expect>(); +}); + +// MSSQL doesn't support JSON data type +// /* Infinitely recursive type */ { +// const TopLevelCondition: v.GenericSchema = v.custom(() => true); +// const table = mssqlTable('test', { +// json: json().$type(), +// }); +// const result = createSelectSchema(table); +// const expected = v.object({ +// json: v.nullable(TopLevelCondition), +// }); +// Expect, v.InferOutput>>(); +// } + +/* Disallow unknown keys in table refinement - select */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createSelectSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createInsertSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: v.string() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = mssqlTable('test', { id: int() }); + const view = mssqlView('test').as((qb) => qb.select().from(table)); + const nestedSelect = mssqlView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: v.string() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = mssqlView('test', { id: int() }).as(sql``); + const mView = mssqlView('test', { id: int() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: v.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: v.string() }); +} diff --git a/drizzle-valibot/tests/mysql.test.ts b/drizzle-valibot/tests/mysql.test.ts index df466320c5..3ecf0bb8cd 100644 --- a/drizzle-valibot/tests/mysql.test.ts +++ b/drizzle-valibot/tests/mysql.test.ts @@ -3,7 +3,7 @@ import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; @@ -14,13 +14,29 @@ const intSchema = v.pipe( v.maxValue(CONSTANTS.INT32_MAX as number), v.integer(), ); -const serialNumberModeSchema = v.pipe( +const intNullableSchema = v.nullable(intSchema); +const intOptionalSchema = v.optional(intSchema); +const intNullableOptionalSchema = v.optional(v.nullable(intSchema)); + +const serialSchema = v.pipe( v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER as number), v.integer(), ); + +const serialOptionalSchema = v.optional(serialSchema); + const textSchema = v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX as number)); +const textOptionalSchema = v.optional(textSchema); + +const anySchema = v.any(); + +const extendedSchema = v.pipe(intSchema, v.maxValue(1000)); +const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); test('table - select', (t) => { const table = mysqlTable('test', { @@ -29,7 +45,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = v.object({ id: serialSchema, name: textSchema }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); // @ts-ignore - TODO: Remake type checks for new columns @@ -44,7 +60,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = v.object({ id: serialSchema, name: textSchema }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(tc, expected).from(result); // @ts-ignore - TODO: Remake type checks for new columns @@ -60,9 +76,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - id: v.optional(serialNumberModeSchema), + id: serialOptionalSchema, name: textSchema, - age: v.optional(v.nullable(intSchema)), + age: intNullableOptionalSchema, }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); @@ -79,9 +95,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - id: v.optional(serialNumberModeSchema), - name: v.optional(textSchema), - age: v.optional(v.nullable(intSchema)), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); @@ -97,7 +113,7 @@ test('view qb - select', (t) => { const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); + const expected = v.object({ id: serialSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -109,7 +125,7 @@ test('view columns - select', (t) => { }).as(sql``); const result = createSelectSchema(view); - const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = v.object({ id: serialSchema, name: textSchema }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); // @ts-ignore - TODO: Remake type checks for new columns @@ -134,9 +150,9 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = v.object({ - id: serialNumberModeSchema, - nested: v.object({ name: textSchema, age: v.any() }), - table: v.object({ id: serialNumberModeSchema, name: textSchema }), + id: serialSchema, + nested: v.object({ name: textSchema, age: anySchema }), + table: v.object({ id: serialSchema, name: textSchema }), }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); @@ -154,9 +170,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = v.object({ - c1: v.nullable(intSchema), + c1: intNullableSchema, c2: intSchema, - c3: v.nullable(intSchema), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -174,10 +190,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), + c1: intNullableOptionalSchema, c2: intSchema, - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -194,10 +210,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(intSchema), - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -215,9 +231,9 @@ test('refine table - select', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -239,9 +255,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -262,9 +278,9 @@ test('refine table - insert', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -283,9 +299,9 @@ test('refine table - update', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -327,21 +343,21 @@ test('refine view - select', (t) => { }, }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: v.object({ - c4: v.nullable(intSchema), - c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c6: v.pipe(v.string(), v.transform(Number)), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: v.object({ - c1: v.nullable(intSchema), - c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), - c4: v.nullable(intSchema), - c5: v.nullable(intSchema), - c6: v.nullable(intSchema), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -391,8 +407,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -432,18 +452,22 @@ test('all data types', (t) => { bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bigint3: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), - bigint5: v.string(), - bigint6: v.string(), - binary: v.pipe(v.string(), v.regex(/^[01]*$/), v.length(10 as number)), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: v.pipe(v.string(), v.regex(/^[01]*$/), v.maxLength(10 as number)), boolean: v.boolean(), - char1: v.pipe(v.string(), v.length(10 as number)), + char1: v.pipe(v.string(), v.maxLength(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), date1: v.date(), date2: v.string(), datetime1: v.date(), datetime2: v.string(), - decimal1: v.string(), - decimal2: v.string(), + decimal1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal3: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + decimal4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), + decimal5: v.string(), + decimal6: v.string(), double1: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), double2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT48_UNSIGNED_MAX)), float1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), diff --git a/drizzle-valibot/tests/pg.test.ts b/drizzle-valibot/tests/pg.test.ts index c34ed70ca3..a8b0a8d523 100644 --- a/drizzle-valibot/tests/pg.test.ts +++ b/drizzle-valibot/tests/pg.test.ts @@ -15,13 +15,26 @@ import { import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()); +const integerNullableSchema = v.nullable(integerSchema); +const integerOptionalSchema = v.optional(integerSchema); +const integerNullableOptionalSchema = v.optional(v.nullable(integerSchema)); + const textSchema = v.string(); +const textOptionalSchema = v.optional(textSchema); + +const anySchema = v.any(); + +const extendedSchema = v.pipe(integerSchema, v.maxValue(1000)); +const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); test('table - select', (t) => { const table = pgTable('test', { @@ -56,7 +69,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = v.object({ name: textSchema, age: v.optional(v.nullable(integerSchema)) }); + const expected = v.object({ name: textSchema, age: integerNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -70,8 +83,8 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - name: v.optional(textSchema), - age: v.optional(v.nullable(integerSchema)), + name: textOptionalSchema, + age: integerNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -85,7 +98,7 @@ test('view qb - select', (t) => { const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = v.object({ id: integerSchema, age: v.any() }); + const expected = v.object({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -110,13 +123,13 @@ test('materialized view qb - select', (t) => { const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = v.object({ id: integerSchema, age: v.any() }); + const expected = v.object({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { - const view = pgView('test', { + const view = pgMaterializedView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); @@ -132,7 +145,7 @@ test('view with nested fields - select', (t) => { id: serial().primaryKey(), name: text().notNull(), }); - const view = pgMaterializedView('test').as((qb) => + const view = pgView('test').as((qb) => qb.select({ id: table.id, nested: { @@ -146,7 +159,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, - nested: v.object({ name: textSchema, age: v.any() }), + nested: v.object({ name: textSchema, age: anySchema }), table: v.object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); @@ -172,9 +185,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = v.object({ - c1: v.nullable(integerSchema), + c1: integerNullableSchema, c2: integerSchema, - c3: v.nullable(integerSchema), + c3: integerNullableSchema, c4: integerSchema, }); expectSchemaShape(t, expected).from(result); @@ -194,11 +207,11 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(integerSchema)), + c1: integerNullableOptionalSchema, c2: integerSchema, - c3: v.optional(v.nullable(integerSchema)), - c4: v.optional(integerSchema), - c7: v.optional(integerSchema), + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); }); @@ -216,11 +229,11 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(integerSchema)), - c2: v.optional(integerSchema), - c3: v.optional(v.nullable(integerSchema)), - c4: v.optional(integerSchema), - c7: v.optional(integerSchema), + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); @@ -239,9 +252,9 @@ test('refine table - select', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.nullable(integerSchema), - c2: v.pipe(integerSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -263,9 +276,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = v.object({ - c1: v.nullable(integerSchema), - c2: v.pipe(integerSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -286,9 +299,9 @@ test('refine table - insert', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(integerSchema)), - c2: v.pipe(integerSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -307,9 +320,9 @@ test('refine table - update', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(integerSchema)), - c2: v.optional(v.pipe(integerSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -351,21 +364,21 @@ test('refine view - select', (t) => { }, }); const expected = v.object({ - c1: v.nullable(integerSchema), - c2: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: v.object({ - c4: v.nullable(integerSchema), - c5: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), - c6: v.pipe(v.string(), v.transform(Number)), + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: v.object({ - c1: v.nullable(integerSchema), - c2: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), - c4: v.nullable(integerSchema), - c5: v.nullable(integerSchema), - c6: v.nullable(integerSchema), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -431,7 +444,9 @@ test('all data types', (t) => { line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), @@ -457,7 +472,7 @@ test('all data types', (t) => { const expected = v.object({ bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), - bigint3: v.string(), + bigint3: bigintStringModeSchema, bigserial1: v.pipe( v.number(), v.minValue(Number.MIN_SAFE_INTEGER), @@ -469,7 +484,7 @@ test('all data types', (t) => { boolean: v.boolean(), date1: v.date(), date2: v.string(), - char1: v.pipe(v.string(), v.length(10 as number)), + char1: v.pipe(v.string(), v.maxLength(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), cidr: v.string(), doublePrecision: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), @@ -485,7 +500,9 @@ test('all data types', (t) => { line2: v.tuple([v.number(), v.number(), v.number()]), macaddr: v.string(), macaddr8: v.string(), - numeric: v.string(), + numeric1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + numeric2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + numeric3: v.string(), point1: v.object({ x: v.number(), y: v.number() }), point2: v.tuple([v.number(), v.number()]), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), diff --git a/drizzle-valibot/tests/singlestore.test.ts b/drizzle-valibot/tests/singlestore.test.ts index b4dc81d17a..c297c1f2b5 100644 --- a/drizzle-valibot/tests/singlestore.test.ts +++ b/drizzle-valibot/tests/singlestore.test.ts @@ -3,7 +3,7 @@ import { customType, int, json, serial, singlestoreSchema, singlestoreTable, tex import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; @@ -14,13 +14,28 @@ const intSchema = v.pipe( v.maxValue(CONSTANTS.INT32_MAX as number), v.integer(), ); -const serialNumberModeSchema = v.pipe( +const intNullableSchema = v.nullable(intSchema); +const intOptionalSchema = v.optional(intSchema); +const intNullableOptionalSchema = v.optional(v.nullable(intSchema)); + +const serialSchema = v.pipe( v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER as number), v.integer(), ); +const serialOptionalSchema = v.optional(serialSchema); + const textSchema = v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX as number)); +const textOptionalSchema = v.optional(textSchema); + +// const anySchema = v.any(); + +const extendedSchema = v.pipe(intSchema, v.maxValue(1000)); +// const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); test('table - select', (t) => { const table = singlestoreTable('test', { @@ -29,7 +44,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = v.object({ id: serialSchema, name: textSchema }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); // @ts-ignore - TODO: Remake type checks for new columns @@ -44,7 +59,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = v.object({ id: serialSchema, name: textSchema }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(tc, expected).from(result); // @ts-ignore - TODO: Remake type checks for new columns @@ -60,9 +75,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - id: v.optional(serialNumberModeSchema), + id: serialOptionalSchema, name: textSchema, - age: v.optional(v.nullable(intSchema)), + age: intNullableOptionalSchema, }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); @@ -79,9 +94,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - id: v.optional(serialNumberModeSchema), - name: v.optional(textSchema), - age: v.optional(v.nullable(intSchema)), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); @@ -99,7 +114,7 @@ test('table - update', (t) => { // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); -// const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); +// const expected = v.object({ id: serialSchema, age: anySchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -111,7 +126,7 @@ test('table - update', (t) => { // }).as(sql``); // const result = createSelectSchema(view); -// const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); +// const expected = v.object({ id: serialSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -134,9 +149,9 @@ test('table - update', (t) => { // const result = createSelectSchema(view); // const expected = v.object({ -// id: serialNumberModeSchema, -// nested: v.object({ name: textSchema, age: v.any() }), -// table: v.object({ id: serialNumberModeSchema, name: textSchema }), +// id: serialSchema, +// nested: v.object({ name: textSchema, age: anySchema }), +// table: v.object({ id: serialSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); @@ -152,9 +167,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = v.object({ - c1: v.nullable(intSchema), + c1: intNullableSchema, c2: intSchema, - c3: v.nullable(intSchema), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -172,10 +187,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), + c1: intNullableOptionalSchema, c2: intSchema, - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -192,10 +207,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(intSchema), - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -213,9 +228,9 @@ test('refine table - select', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -237,9 +252,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -260,9 +275,9 @@ test('refine table - insert', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -281,9 +296,9 @@ test('refine table - update', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -325,21 +340,21 @@ test('refine table - update', (t) => { // }, // }); // const expected = v.object({ -// c1: v.nullable(intSchema), -// c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c3: v.pipe(v.string(), v.transform(Number)), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, // nested: v.object({ -// c4: v.nullable(intSchema), -// c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c6: v.pipe(v.string(), v.transform(Number)), +// c4: intNullableSchema, +// c5: extendedNullableSchema, +// c6: customSchema, // }), // table: v.object({ -// c1: v.nullable(intSchema), -// c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), -// c3: v.pipe(v.string(), v.transform(Number)), -// c4: v.nullable(intSchema), -// c5: v.nullable(intSchema), -// c6: v.nullable(intSchema), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, +// c4: intNullableSchema, +// c5: intNullableSchema, +// c6: intNullableSchema, // }), // }); // expectSchemaShape(t, expected).from(result); @@ -374,6 +389,7 @@ test('all data types', (t) => { longtext, mediumtext, tinytext, + vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), @@ -389,8 +405,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -422,6 +442,14 @@ test('all data types', (t) => { mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ + dimensions: 3, + elementType: 'F32', + }).notNull(), + vector2: vector({ + dimensions: 2, + elementType: 'I64', + }).notNull(), })); const result = createSelectSchema(table); @@ -430,18 +458,22 @@ test('all data types', (t) => { bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bigint3: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), - bigint5: v.string(), - bigint6: v.string(), - binary: v.pipe(v.string(), v.regex(/^[01]*$/), v.length(10 as number)), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: v.pipe(v.string(), v.regex(/^[01]*$/), v.maxLength(10 as number)), boolean: v.boolean(), - char1: v.pipe(v.string(), v.length(10 as number)), + char1: v.pipe(v.string(), v.maxLength(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), date1: v.date(), date2: v.string(), datetime1: v.date(), datetime2: v.string(), - decimal1: v.string(), - decimal2: v.string(), + decimal1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER)), + decimal3: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + decimal4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), + decimal5: v.string(), + decimal6: v.string(), double1: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), double2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT48_UNSIGNED_MAX)), float1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), @@ -473,6 +505,11 @@ test('all data types', (t) => { mediumtext2: v.enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT8_UNSIGNED_MAX)), tinytext2: v.enum({ a: 'a', b: 'b', c: 'c' }), + vector: v.pipe(v.array(v.number()), v.length(3 as number)), + vector2: v.pipe( + v.array(v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX))), + v.length(2), + ), }); // @ts-ignore - TODO: Remake type checks for new columns expectSchemaShape(t, expected).from(result); diff --git a/drizzle-valibot/tests/sqlite.test.ts b/drizzle-valibot/tests/sqlite.test.ts index 7351fb9fa5..d050948ab7 100644 --- a/drizzle-valibot/tests/sqlite.test.ts +++ b/drizzle-valibot/tests/sqlite.test.ts @@ -14,7 +14,20 @@ const intSchema = v.pipe( v.maxValue(Number.MAX_SAFE_INTEGER), v.integer(), ); +const intNullableSchema = v.nullable(intSchema); +const intOptionalSchema = v.optional(intSchema); +const intNullableOptionalSchema = v.optional(v.nullable(intSchema)); + const textSchema = v.string(); +const textOptionalSchema = v.optional(textSchema); + +const anySchema = v.any(); + +const extendedSchema = v.pipe(intSchema, v.maxValue(1000)); +const extendedNullableSchema = v.nullable(extendedSchema); +const extendedOptionalSchema = v.optional(extendedSchema); + +const customSchema = v.pipe(v.string(), v.transform(Number)); test('table - select', (t) => { const table = sqliteTable('test', { @@ -36,7 +49,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = v.object({ id: v.optional(intSchema), name: textSchema, age: v.optional(v.nullable(intSchema)) }); + const expected = v.object({ id: intOptionalSchema, name: textSchema, age: intNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -50,9 +63,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - id: v.optional(intSchema), - name: v.optional(textSchema), - age: v.optional(v.nullable(intSchema)), + id: intOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -66,7 +79,7 @@ test('view qb - select', (t) => { const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = v.object({ id: intSchema, age: v.any() }); + const expected = v.object({ id: intSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -102,7 +115,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = v.object({ id: intSchema, - nested: v.object({ name: textSchema, age: v.any() }), + nested: v.object({ name: textSchema, age: anySchema }), table: v.object({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); @@ -119,9 +132,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = v.object({ - c1: v.nullable(intSchema), + c1: intNullableSchema, c2: intSchema, - c3: v.nullable(intSchema), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -139,10 +152,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), + c1: intNullableOptionalSchema, c2: intSchema, - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -159,10 +172,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(intSchema), - c3: v.optional(v.nullable(intSchema)), - c4: v.optional(intSchema), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -180,9 +193,9 @@ test('refine table - select', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -204,9 +217,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -227,9 +240,9 @@ test('refine table - insert', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.pipe(intSchema, v.maxValue(1000)), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -248,9 +261,9 @@ test('refine table - update', (t) => { c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ - c1: v.optional(v.nullable(intSchema)), - c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -292,21 +305,21 @@ test('refine view - select', (t) => { }, }); const expected = v.object({ - c1: v.nullable(intSchema), - c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: v.object({ - c4: v.nullable(intSchema), - c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c6: v.pipe(v.string(), v.transform(Number)), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: v.object({ - c1: v.nullable(intSchema), - c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), - c3: v.pipe(v.string(), v.transform(Number)), - c4: v.nullable(intSchema), - c5: v.nullable(intSchema), - c6: v.nullable(intSchema), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -328,7 +341,9 @@ test('all data types', (t) => { integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), @@ -345,7 +360,9 @@ test('all data types', (t) => { integer2: v.boolean(), integer3: v.date(), integer4: v.date(), - numeric: v.string(), + numeric1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER)), + numeric2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), + numeric3: v.string(), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), text1: v.string(), text2: v.pipe(v.string(), v.maxLength(10 as number)), diff --git a/drizzle-valibot/tests/utils.ts b/drizzle-valibot/tests/utils.ts index 7e1eae7570..c0793e3498 100644 --- a/drizzle-valibot/tests/utils.ts +++ b/drizzle-valibot/tests/utils.ts @@ -1,11 +1,11 @@ import type * as v from 'valibot'; -import { expect, type TaskContext } from 'vitest'; +import { expect, type TestContext } from 'vitest'; function onlySpecifiedKeys(obj: Record, keys: string[]) { return Object.fromEntries(Object.entries(obj).filter(([key]) => keys.includes(key))); } -export function expectSchemaShape>(t: TaskContext, expected: T) { +export function expectSchemaShape>(t: TestContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.entries)).toStrictEqual(Object.keys(expected.entries)); @@ -32,7 +32,7 @@ export function expectSchemaShape>(t: TaskContext, expected: T) { +export function expectEnumValues>(t: TestContext, expected: T) { return { from(actual: T) { expect(actual.enum).toStrictEqual(expected.enum); diff --git a/drizzle-valibot/tsconfig.json b/drizzle-valibot/tsconfig.json index c25379c37b..3f051aa73e 100644 --- a/drizzle-valibot/tsconfig.json +++ b/drizzle-valibot/tsconfig.json @@ -5,6 +5,7 @@ "baseUrl": ".", "declaration": true, "noEmit": true, + "allowImportingTsExtensions": true, "paths": { "~/*": ["src/*"] } diff --git a/drizzle-valibot/vitest.config.ts b/drizzle-valibot/vitest.config.ts index 1f0eb7ad9a..d767b570bd 100644 --- a/drizzle-valibot/vitest.config.ts +++ b/drizzle-valibot/vitest.config.ts @@ -1,4 +1,5 @@ import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/drizzle-zod/package.json b/drizzle-zod/package.json index 4f4e4b57ab..48c0c5fd93 100644 --- a/drizzle-zod/package.json +++ b/drizzle-zod/package.json @@ -1,13 +1,15 @@ { "name": "drizzle-zod", - "version": "0.8.3", + "version": "1.0.0-beta.2", "description": "Generate Zod schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", + "build:artifact": "pnpm run build", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz", "test": "vitest run" }, @@ -64,7 +66,7 @@ "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { - "drizzle-orm": ">=1.0.0-beta.1", + "drizzle-orm": ">=1.0.0-beta.2", "zod": "^3.25.0 || ^4.0.0" }, "devDependencies": { @@ -75,8 +77,6 @@ "json-rules-engine": "^7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", - "vite-tsconfig-paths": "^4.3.2", - "vitest": "^3.1.3", "zod": "3.25.1", "zx": "^7.2.2" } diff --git a/drizzle-zod/src/column.ts b/drizzle-zod/src/column.ts index 94d693840e..2f96b0705e 100644 --- a/drizzle-zod/src/column.ts +++ b/drizzle-zod/src/column.ts @@ -20,7 +20,7 @@ export const jsonSchema: zod.ZodType = zod.union([ zod.record(zod.string(), zod.any()), zod.array(zod.any()), ]); -export const bufferSchema: zod.ZodType = zod.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof +export const bufferSchema: zod.ZodType = zod.custom((v) => v instanceof Buffer); export function columnToSchema( column: Column, @@ -167,6 +167,11 @@ function numberColumnToSchema( integer = true; break; } + case 'unsigned': { + min = 0; + max = Number.MAX_SAFE_INTEGER; + break; + } default: { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; @@ -183,6 +188,16 @@ function numberColumnToSchema( return schema; } +/** @internal */ +export const bigintStringModeSchema = zod.string().regex(/^-?\d+$/).transform(BigInt).pipe( + zod.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), +).transform(String); + +/** @internal */ +export const unsignedBigintStringModeSchema = zod.string().regex(/^\d+$/).transform(BigInt).pipe( + zod.bigint().gte(0n).lte(CONSTANTS.INT64_MAX), +).transform(String); + function bigintColumnToSchema( column: Column, constraint: ColumnDataBigIntConstraint | undefined, @@ -233,6 +248,12 @@ function arrayColumnToSchema( ? z.array(z.number()).length(length) : z.array(z.number()); } + case 'int64vector': { + const length = column.length; + return length + ? z.array(z.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX)).length(length) + : z.array(z.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX)); + } case 'basecolumn': { const length = column.length; const schema = (<{ baseColumn?: Column }> column).baseColumn @@ -308,6 +329,12 @@ function stringColumnToSchema( } return z.enum(enumValues); } + if (constraint === 'int64') { + return bigintStringModeSchema; + } + if (constraint === 'uint64') { + return unsignedBigintStringModeSchema; + } let schema = coerce === true || coerce?.string ? z.coerce.string() : z.string(); schema = regex ? schema.regex(regex) : schema; diff --git a/drizzle-zod/src/column.types.ts b/drizzle-zod/src/column.types.ts index 4f16774aec..42464dd946 100644 --- a/drizzle-zod/src/column.types.ts +++ b/drizzle-zod/src/column.types.ts @@ -1,5 +1,6 @@ import type { Assume, Column, ColumnTypeData, ExtractColumnTypeData } from 'drizzle-orm'; import type { z } from 'zod/v4'; +import type { bigintStringModeSchema, unsignedBigintStringModeSchema } from './column.ts'; import type { CoerceOptions } from './schema.types.ts'; import type { Json } from './utils.ts'; @@ -12,6 +13,7 @@ export type GetZodType< ? TType['constraint'] extends 'geometry' | 'point' ? z.ZodTuple<[z.ZodNumber, z.ZodNumber], null> : TType['constraint'] extends 'line' ? z.ZodTuple<[z.ZodNumber, z.ZodNumber, z.ZodNumber], null> : TType['constraint'] extends 'vector' | 'halfvector' ? z.ZodArray + : TType['constraint'] extends 'int64vector' ? z.ZodArray : TType['constraint'] extends 'basecolumn' ? z.ZodArray }>['baseColumn'], TCoerce>> : z.ZodArray @@ -38,6 +40,8 @@ export type GetZodType< : TType['type'] extends 'string' ? TType['constraint'] extends 'uuid' ? z.ZodUUID : TCanCoerce extends true ? z.coerce.ZodCoercedString : TType['constraint'] extends 'enum' ? z.ZodEnum<{ [K in Assume[number]]: K }> + : TType['constraint'] extends 'int64' ? typeof bigintStringModeSchema + : TType['constraint'] extends 'uint64' ? typeof unsignedBigintStringModeSchema : z.ZodString : z.ZodType; diff --git a/drizzle-zod/src/schema.ts b/drizzle-zod/src/schema.ts index 372a351b60..69db9494db 100644 --- a/drizzle-zod/src/schema.ts +++ b/drizzle-zod/src/schema.ts @@ -78,13 +78,17 @@ const selectConditions: Conditions = { }; const insertConditions: Conditions = { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }; const updateConditions: Conditions = { - never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', + never: (column) => + column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always' + || ('identity' in (column ?? {}) && typeof (column as any)?.identity !== 'undefined'), optional: () => true, nullable: (column) => !column.notNull, }; diff --git a/drizzle-zod/src/schema.types.ts b/drizzle-zod/src/schema.types.ts index 95a0d9b692..9869170e52 100644 --- a/drizzle-zod/src/schema.types.ts +++ b/drizzle-zod/src/schema.types.ts @@ -1,4 +1,5 @@ import type { InferInsertModel, InferSelectModel, Table, View } from 'drizzle-orm'; +import type { CockroachEnum } from 'drizzle-orm/cockroach-core'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { z } from 'zod/v4'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; @@ -24,7 +25,7 @@ export interface CreateSelectSchema< refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine, TCoerce>; - >(enum_: TEnum): z.ZodEnum<{ [K in TEnum['enumValues'][number]]: K }>; + | CockroachEnum>(enum_: TEnum): z.ZodEnum<{ [K in TEnum['enumValues'][number]]: K }>; } export interface CreateInsertSchema< diff --git a/drizzle-zod/tests/cockroach.test.ts b/drizzle-zod/tests/cockroach.test.ts new file mode 100644 index 0000000000..cd8d17103c --- /dev/null +++ b/drizzle-zod/tests/cockroach.test.ts @@ -0,0 +1,607 @@ +import { type Equal, sql } from 'drizzle-orm'; +import { + cockroachEnum, + cockroachMaterializedView, + cockroachSchema, + cockroachTable, + cockroachView, + customType, + int4, + jsonb, + text, +} from 'drizzle-orm/cockroach-core'; +import type { TopLevelCondition } from 'json-rules-engine'; +import { test } from 'vitest'; +import { z } from 'zod/v4'; +import { jsonSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; +import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; + +const int4Schema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); +const int4NullableSchema = int4Schema.nullable(); +const int4OptionalSchema = int4Schema.optional(); +const int4NullableOptionalSchema = int4Schema.nullable().optional(); + +const textSchema = z.string(); +const textOptionalSchema = textSchema.optional(); + +const anySchema = z.any(); + +const extendedSchema = int4Schema.lte(1000); +const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); + +test('table - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + generated: int4().generatedAlwaysAsIdentity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = z.object({ id: int4Schema, generated: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = cockroachSchema('test'); + const table = schema.table('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = z.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createInsertSchema(table); + const expected = z.object({ name: textSchema, age: int4NullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = cockroachTable('test', { + id: int4().generatedAlwaysAsIdentity().primaryKey(), + name: text().notNull(), + age: int4(), + }); + + const result = createUpdateSchema(table); + const expected = z.object({ + name: textOptionalSchema, + age: int4NullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = z.object({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = cockroachView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = z.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view qb - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachMaterializedView('test').as((qb) => + qb.select({ id: table.id, age: sql``.as('age') }).from(table) + ); + + const result = createSelectSchema(view); + const expected = z.object({ id: int4Schema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('materialized view columns - select', (t) => { + const view = cockroachMaterializedView('test', { + id: int4().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = z.object({ id: int4Schema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = cockroachTable('test', { + id: int4().primaryKey(), + name: text().notNull(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = z.object({ + id: int4Schema, + nested: z.object({ name: textSchema, age: anySchema }), + table: z.object({ id: int4Schema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('enum - select', (t) => { + const enum_ = cockroachEnum('test', ['a', 'b', 'c']); + + const result = createSelectSchema(enum_); + const expected = z.enum(['a', 'b', 'c']); + expectEnumValues(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = z.object({ + c1: int4NullableSchema, + c2: int4Schema, + c3: int4NullableSchema, + c4: int4Schema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createInsertSchema(table); + const expected = z.object({ + c1: int4NullableOptionalSchema, + c2: int4Schema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().default(1), + c4: int4().notNull().default(1), + c5: int4().generatedAlwaysAs(1), + c6: int4().generatedAlwaysAsIdentity(), + c7: int4().generatedByDefaultAsIdentity(), + }); + + const result = createUpdateSchema(table); + const expected = z.object({ + c1: int4NullableOptionalSchema, + c2: int4OptionalSchema, + c3: int4NullableOptionalSchema, + c4: int4OptionalSchema, + c7: int4OptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: customText(), + }); + + const customTextSchema = z.string().min(1).max(100); + const result = createSelectSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + c4: customTextSchema, + }); + const expected = z.object({ + c1: int4NullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: int4NullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4().notNull(), + c3: int4().notNull(), + c4: int4().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: int4NullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = cockroachTable('test', { + c1: int4(), + c2: int4(), + c3: int4(), + c4: int4(), + c5: int4(), + c6: int4(), + }); + const view = cockroachView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + nested: { + c5: (schema) => schema.lte(1000), + c6: z.string().transform(Number), + }, + table: { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }, + }); + const expected = z.object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: z.object({ + c4: int4NullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: z.object({ + c1: int4NullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: int4NullableSchema, + c5: int4NullableSchema, + c6: int4NullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = cockroachTable('test', ({ + bigint, + bit, + bool, + char, + date, + decimal, + float, + doublePrecision, + geometry, + inet, + int2, + int4, + int8, + interval, + jsonb, + numeric, + real, + smallint, + string, + text, + time, + timestamp, + uuid, + varchar, + vector, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bit: bit({ length: 5 }).notNull(), + bool: bool().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + doublePrecision: doublePrecision().notNull(), + geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), + geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), + inet: inet().notNull(), + int2: int2().notNull(), + int4: int4().notNull(), + int8_1: int8({ mode: 'number' }).notNull(), + int8_2: int8({ mode: 'bigint' }).notNull(), + interval: interval().notNull(), + jsonb: jsonb().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + string1: string().notNull(), + string2: string({ enum: ['a', 'b', 'c'] }).notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time: time().notNull(), + timestamp1: timestamp({ mode: 'date' }).notNull(), + timestamp2: timestamp({ mode: 'string' }).notNull(), + uuid: uuid().notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ dimensions: 3 }).notNull(), + array: int4().array().notNull(), + })); + + const result = createSelectSchema(table); + const expected = z.object({ + bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + bit: z.string().regex(/^[01]*$/).length(5), + bool: z.boolean(), + char1: z.string().max(10), + char2: z.enum(['a', 'b', 'c']), + date1: z.date(), + date2: z.string(), + decimal1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + decimal2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + decimal3: z.string(), + float: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), + doublePrecision: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), + geometry1: z.tuple([z.number(), z.number()]), + geometry2: z.object({ x: z.number(), y: z.number() }), + inet: z.string(), + int2: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), + int4: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + int8_1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + int8_2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + interval: z.string(), + jsonb: jsonSchema, + numeric1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + numeric2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + numeric3: z.string(), + real: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), + smallint: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), + string1: z.string(), + string2: z.enum(['a', 'b', 'c']), + text1: z.string(), + text2: z.enum(['a', 'b', 'c']), + time: z.string(), + timestamp1: z.date(), + timestamp2: z.string(), + uuid: z.uuid(), + varchar1: z.string().max(10), + varchar2: z.enum(['a', 'b', 'c']), + vector: z.array(z.number()).length(3), + array: z.array(int4Schema), + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - all', (t) => { + const table = cockroachTable('test', ({ + bigint, + bool, + timestamp, + int4, + text, + }) => ({ + bigint: bigint({ mode: 'bigint' }).notNull(), + bool: bool().notNull(), + timestamp: timestamp().notNull(), + int4: int4().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + bool: z.coerce.boolean(), + timestamp: z.coerce.date(), + int4: z.coerce.number().int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + text: z.coerce.string(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = cockroachTable('test', ({ + timestamp, + int4, + }) => ({ + timestamp: timestamp().notNull(), + int4: int4().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + timestamp: z.coerce.date(), + int4: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +/* Infinitely recursive type */ { + const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); + const table = cockroachTable('test', { + jsonb: jsonb().$type(), + }); + const result = createSelectSchema(table); + const expected = z.object({ + jsonb: z.nullable(TopLevelCondition), + }); + Expect, z.infer>>(); +} + +/* Disallow unknown keys in table refinement - select */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createSelectSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createInsertSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = cockroachTable('test', { id: int4() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = cockroachTable('test', { id: int4() }); + const view = cockroachView('test').as((qb) => qb.select().from(table)); + const mView = cockroachMaterializedView('test').as((qb) => qb.select().from(table)); + const nestedSelect = cockroachView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: z.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: z.string() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = cockroachView('test', { id: int4() }).as(sql``); + const mView = cockroachView('test', { id: int4() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: z.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: z.string() }); +} diff --git a/drizzle-zod/tests/mssql.test.ts b/drizzle-zod/tests/mssql.test.ts new file mode 100644 index 0000000000..44f3921b31 --- /dev/null +++ b/drizzle-zod/tests/mssql.test.ts @@ -0,0 +1,546 @@ +import { type Equal, sql } from 'drizzle-orm'; +import { customType, int, mssqlSchema, mssqlTable, mssqlView, text } from 'drizzle-orm/mssql-core'; +import { test } from 'vitest'; +import { z } from 'zod/v4'; +import { bigintStringModeSchema, bufferSchema } from '~/column.ts'; +import { CONSTANTS } from '~/constants.ts'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src/index.ts'; +import { Expect, expectSchemaShape } from './utils.ts'; + +const integerSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); +const integerNullableSchema = integerSchema.nullable(); +const integerOptionalSchema = integerSchema.optional(); +const integerNullableOptionalSchema = integerSchema.nullable().optional(); + +const textSchema = z.string(); +const textOptionalSchema = textSchema.optional(); + +const anySchema = z.any(); + +const extendedSchema = integerSchema.lte(1000); +const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); + +test('table - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + generated: int().identity(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = z.object({ id: integerSchema, generated: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table in schema - select', (tc) => { + const schema = mssqlSchema('test'); + const table = schema.table('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + + const result = createSelectSchema(table); + const expected = z.object({ id: integerSchema, name: textSchema }); + expectSchemaShape(tc, expected).from(result); + Expect>(); +}); + +test('table - insert', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createInsertSchema(table); + const expected = z.object({ name: textSchema, age: integerNullableOptionalSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('table - update', (t) => { + const table = mssqlTable('test', { + id: int().identity().primaryKey(), + name: text().notNull(), + age: int(), + }); + + const result = createUpdateSchema(table); + const expected = z.object({ + name: textOptionalSchema, + age: integerNullableOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view qb - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); + + const result = createSelectSchema(view); + const expected = z.object({ id: integerSchema, age: anySchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view columns - select', (t) => { + const view = mssqlView('test', { + id: int().primaryKey(), + name: text().notNull(), + }).as(sql``); + + const result = createSelectSchema(view); + const expected = z.object({ id: integerSchema, name: textSchema }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('view with nested fields - select', (t) => { + const table = mssqlTable('test', { + id: int().primaryKey(), + name: text().notNull(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + id: table.id, + nested: { + name: table.name, + age: sql``.as('age'), + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view); + const expected = z.object({ + id: integerSchema, + nested: z.object({ name: textSchema, age: anySchema }), + table: z.object({ id: integerSchema, name: textSchema }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + }); + + const result = createSelectSchema(table); + const expected = z.object({ + c1: integerNullableSchema, + c2: integerSchema, + c3: integerNullableSchema, + c4: integerSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('nullability - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createInsertSchema(table); + const expected = z.object({ + c1: integerNullableOptionalSchema, + c2: integerSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); +}); + +test('nullability - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().default(1), + c4: int().notNull().default(1), + c5: int().generatedAlwaysAs(1), + c6: int().identity(), + }); + + const result = createUpdateSchema(table); + const expected = z.object({ + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + }); + + const result = createSelectSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - select with custom data type', (t) => { + const customText = customType({ dataType: () => 'text' }); + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: customText(), + }); + + const customTextSchema = z.string().min(1).max(100); + const result = createSelectSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + c4: customTextSchema, + }); + const expected = z.object({ + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, + c4: customTextSchema, + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - insert', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createInsertSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine table - update', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int().notNull(), + c3: int().notNull(), + c4: int().generatedAlwaysAs(1), + }); + + const result = createUpdateSchema(table, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }); + const expected = z.object({ + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('refine view - select', (t) => { + const table = mssqlTable('test', { + c1: int(), + c2: int(), + c3: int(), + c4: int(), + c5: int(), + c6: int(), + }); + const view = mssqlView('test').as((qb) => + qb.select({ + c1: table.c1, + c2: table.c2, + c3: table.c3, + nested: { + c4: table.c4, + c5: table.c5, + c6: table.c6, + }, + table, + }).from(table) + ); + + const result = createSelectSchema(view, { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + nested: { + c5: (schema) => schema.lte(1000), + c6: z.string().transform(Number), + }, + table: { + c2: (schema) => schema.lte(1000), + c3: z.string().transform(Number), + }, + }); + const expected = z.object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + nested: z.object({ + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, + }), + table: z.object({ + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, + }), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('all data types', (t) => { + const table = mssqlTable('test', ({ + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + numeric, + real, + smallint, + text, + time, + tinyint, + varbinary, + varchar, + ntext, + nvarchar, + }) => ({ + bigint1: bigint({ mode: 'number' }).notNull(), + bigint2: bigint({ mode: 'bigint' }).notNull(), + bigint3: bigint({ mode: 'string' }).notNull(), + binary: binary({ length: 10 }).notNull(), + bit: bit().notNull(), + char1: char({ length: 10 }).notNull(), + char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + date1: date({ mode: 'date' }).notNull(), + date2: date({ mode: 'string' }).notNull(), + datetime1: datetime({ mode: 'date' }).notNull(), + datetime2: datetime({ mode: 'string' }).notNull(), + datetime2_1: datetime2({ mode: 'date' }).notNull(), + datetime2_2: datetime2({ mode: 'string' }).notNull(), + datetimeoffset1: datetimeoffset({ mode: 'date' }).notNull(), + datetimeoffset2: datetimeoffset({ mode: 'string' }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'bigint' }).notNull(), + decimal3: decimal({ mode: 'string' }).notNull(), + float: float().notNull(), + int: int().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), + real: real().notNull(), + smallint: smallint().notNull(), + text1: text().notNull(), + text2: text({ enum: ['a', 'b', 'c'] }).notNull(), + time1: time({ mode: 'date' }).notNull(), + time2: time({ mode: 'string' }).notNull(), + tinyint: tinyint().notNull(), + varbinary: varbinary({ length: 10 }).notNull(), + varchar1: varchar({ length: 10 }).notNull(), + varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + ntext1: ntext().notNull(), + ntext2: ntext({ enum: ['a', 'b', 'c'] }).notNull(), + nvarchar1: nvarchar({ length: 10 }).notNull(), + nvarchar2: nvarchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), + })); + + const result = createSelectSchema(table); + const expected = z.object({ + bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + bigint3: bigintStringModeSchema, + binary: bufferSchema, + bit: z.boolean(), + char1: z.string().max(10), + char2: z.enum(['a', 'b', 'c']), + date1: z.date(), + date2: z.string(), + datetime1: z.date(), + datetime2: z.string(), + datetime2_1: z.date(), + datetime2_2: z.string(), + datetimeoffset1: z.date(), + datetimeoffset2: z.string(), + decimal1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + decimal2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + decimal3: z.string(), + float: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), + int: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + numeric1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + numeric2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + numeric3: z.string(), + real: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), + smallint: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), + text1: z.string(), + text2: z.enum(['a', 'b', 'c']), + time1: z.date(), + time2: z.string(), + tinyint: z.int().gte(0).lte(CONSTANTS.INT8_UNSIGNED_MAX), + varbinary: bufferSchema, + varchar1: z.string().max(10), + varchar2: z.enum(['a', 'b', 'c']), + ntext1: z.string(), + ntext2: z.enum(['a', 'b', 'c']), + nvarchar1: z.string().max(10), + nvarchar2: z.enum(['a', 'b', 'c']), + }); + + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - all', (t) => { + const table = mssqlTable('test', ({ + bigint, + bit, + datetime, + int, + text, + }) => ({ + bigint: bigint({ mode: 'bigint' }).notNull(), + bit: bit().notNull(), + datetime: datetime().notNull(), + int: int().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + bit: z.coerce.boolean(), + datetime: z.coerce.date(), + int: z.coerce.number().int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + text: z.coerce.string(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = mssqlTable('test', ({ + datetime, + int, + }) => ({ + datetime: datetime().notNull(), + int: int().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + datetime: z.coerce.date(), + int: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +// MSSQL doesn't support JSON data type +// /* Infinitely recursive type */ { +// const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); +// const table = mssqlTable('test', { +// json: json().$type(), +// }); +// const result = createSelectSchema(table); +// const expected = z.object({ +// json: z.nullable(TopLevelCondition), +// }); +// Expect, z.infer>>(); +// } + +/* Disallow unknown keys in table refinement - select */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createSelectSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in table refinement - insert */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createInsertSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in table refinement - update */ { + const table = mssqlTable('test', { id: int() }); + // @ts-expect-error + createUpdateSchema(table, { unknown: z.string() }); +} + +/* Disallow unknown keys in view qb - select */ { + const table = mssqlTable('test', { id: int() }); + const view = mssqlView('test').as((qb) => qb.select().from(table)); + const nestedSelect = mssqlView('test').as((qb) => qb.select({ table }).from(table)); + // @ts-expect-error + createSelectSchema(view, { unknown: z.string() }); + // @ts-expect-error + createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); +} + +/* Disallow unknown keys in view columns - select */ { + const view = mssqlView('test', { id: int() }).as(sql``); + const mView = mssqlView('test', { id: int() }).as(sql``); + // @ts-expect-error + createSelectSchema(view, { unknown: z.string() }); + // @ts-expect-error + createSelectSchema(mView, { unknown: z.string() }); +} diff --git a/drizzle-zod/tests/mysql.test.ts b/drizzle-zod/tests/mysql.test.ts index 1e638f2b2c..3dcd180db0 100644 --- a/drizzle-zod/tests/mysql.test.ts +++ b/drizzle-zod/tests/mysql.test.ts @@ -3,14 +3,29 @@ import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); -const serialNumberModeSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.nullable(); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intSchema.nullable().optional(); + +const serialSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); +const serialOptionalSchema = serialSchema.optional(); + const textSchema = z.string().max(CONSTANTS.INT16_UNSIGNED_MAX); +const textOptionalSchema = textSchema.optional(); + +const anySchema = z.any(); + +const extendedSchema = intSchema.lte(1000); +const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); test('table - select', (t) => { const table = mysqlTable('test', { @@ -20,7 +35,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = z.object({ id: serialNumberModeSchema, generated: intSchema, name: textSchema }); + const expected = z.object({ id: serialSchema, generated: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -33,7 +48,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = z.object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -47,9 +62,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - id: serialNumberModeSchema.optional(), + id: serialOptionalSchema, name: textSchema, - age: intSchema.nullable().optional(), + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -64,9 +79,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - id: serialNumberModeSchema.optional(), - name: textSchema.optional(), - age: intSchema.nullable().optional(), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -80,7 +95,7 @@ test('view qb - select', (t) => { const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = z.object({ id: serialNumberModeSchema, age: z.any() }); + const expected = z.object({ id: serialSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -92,7 +107,7 @@ test('view columns - select', (t) => { }).as(sql``); const result = createSelectSchema(view); - const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = z.object({ id: serialSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -115,9 +130,9 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = z.object({ - id: serialNumberModeSchema, - nested: z.object({ name: textSchema, age: z.any() }), - table: z.object({ id: serialNumberModeSchema, name: textSchema }), + id: serialSchema, + nested: z.object({ name: textSchema, age: anySchema }), + table: z.object({ id: serialSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -153,10 +168,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -173,10 +188,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.optional(), - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -194,9 +209,9 @@ test('refine table - select', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); @@ -219,9 +234,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -242,9 +257,9 @@ test('refine table - insert', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -263,9 +278,9 @@ test('refine table - update', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000).optional(), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -307,21 +322,21 @@ test('refine view - select', (t) => { }, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000).nullable(), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: z.object({ - c4: intSchema.nullable(), - c5: intSchema.lte(1000).nullable(), - c6: z.string().transform(Number), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000).nullable(), - c3: z.string().transform(Number), - c4: intSchema.nullable(), - c5: intSchema.nullable(), - c6: intSchema.nullable(), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -371,8 +386,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -412,18 +431,22 @@ test('all data types', (t) => { bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bigint3: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), bigint4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), - bigint5: z.string(), - bigint6: z.string(), - binary: z.string().regex(/^[01]*$/).length(10), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: z.string().regex(/^[01]*$/).max(10), boolean: z.boolean(), - char1: z.string().length(10), + char1: z.string().max(10), char2: z.enum(['a', 'b', 'c']), date1: z.date(), date2: z.string(), datetime1: z.date(), datetime2: z.string(), - decimal1: z.string(), - decimal2: z.string(), + decimal1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + decimal2: z.number().gte(0).lte(Number.MAX_SAFE_INTEGER), + decimal3: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + decimal4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), + decimal5: z.string(), + decimal6: z.string(), double1: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), double2: z.number().gte(0).lte(CONSTANTS.INT48_UNSIGNED_MAX), float1: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), @@ -483,7 +506,7 @@ test('type coercion - all', (t) => { bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), - int: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), + int: z.coerce.number().int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), }); expectSchemaShape(t, expected).from(result); diff --git a/drizzle-zod/tests/pg.test.ts b/drizzle-zod/tests/pg.test.ts index 2063c8f19b..6a9730b928 100644 --- a/drizzle-zod/tests/pg.test.ts +++ b/drizzle-zod/tests/pg.test.ts @@ -15,13 +15,26 @@ import { import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); +const integerNullableSchema = integerSchema.nullable(); +const integerOptionalSchema = integerSchema.optional(); +const integerNullableOptionalSchema = integerSchema.nullable().optional(); + const textSchema = z.string(); +const textOptionalSchema = textSchema.optional(); + +const anySchema = z.any(); + +const extendedSchema = integerSchema.lte(1000); +const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); test('table - select', (t) => { const table = pgTable('test', { @@ -57,7 +70,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = z.object({ name: textSchema, age: integerSchema.nullable().optional() }); + const expected = z.object({ name: textSchema, age: integerNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -71,8 +84,8 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - name: textSchema.optional(), - age: integerSchema.nullable().optional(), + name: textOptionalSchema, + age: integerNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -86,7 +99,7 @@ test('view qb - select', (t) => { const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = z.object({ id: integerSchema, age: z.any() }); + const expected = z.object({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -111,13 +124,13 @@ test('materialized view qb - select', (t) => { const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = z.object({ id: integerSchema, age: z.any() }); + const expected = z.object({ id: integerSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { - const view = pgView('test', { + const view = pgMaterializedView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); @@ -133,7 +146,7 @@ test('view with nested fields - select', (t) => { id: serial().primaryKey(), name: text().notNull(), }); - const view = pgMaterializedView('test').as((qb) => + const view = pgView('test').as((qb) => qb.select({ id: table.id, nested: { @@ -147,7 +160,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, - nested: z.object({ name: textSchema, age: z.any() }), + nested: z.object({ name: textSchema, age: anySchema }), table: z.object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); @@ -173,9 +186,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = z.object({ - c1: integerSchema.nullable(), + c1: integerNullableSchema, c2: integerSchema, - c3: integerSchema.nullable(), + c3: integerNullableSchema, c4: integerSchema, }); expectSchemaShape(t, expected).from(result); @@ -195,11 +208,11 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - c1: integerSchema.nullable().optional(), + c1: integerNullableOptionalSchema, c2: integerSchema, - c3: integerSchema.nullable().optional(), - c4: integerSchema.optional(), - c7: integerSchema.optional(), + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); }); @@ -217,11 +230,11 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - c1: integerSchema.nullable().optional(), - c2: integerSchema.optional(), - c3: integerSchema.nullable().optional(), - c4: integerSchema.optional(), - c7: integerSchema.optional(), + c1: integerNullableOptionalSchema, + c2: integerOptionalSchema, + c3: integerNullableOptionalSchema, + c4: integerOptionalSchema, + c7: integerOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -239,9 +252,9 @@ test('refine table - select', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: integerSchema.nullable(), - c2: integerSchema.lte(1000), - c3: z.string().transform(Number), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -263,9 +276,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = z.object({ - c1: integerSchema.nullable(), - c2: integerSchema.lte(1000), - c3: z.string().transform(Number), + c1: integerNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -286,9 +299,9 @@ test('refine table - insert', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: integerSchema.nullable().optional(), - c2: integerSchema.lte(1000), - c3: z.string().transform(Number), + c1: integerNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -307,9 +320,9 @@ test('refine table - update', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: integerSchema.nullable().optional(), - c2: integerSchema.lte(1000).optional(), - c3: z.string().transform(Number), + c1: integerNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -351,21 +364,21 @@ test('refine view - select', (t) => { }, }); const expected = z.object({ - c1: integerSchema.nullable(), - c2: integerSchema.lte(1000).nullable(), - c3: z.string().transform(Number), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: z.object({ - c4: integerSchema.nullable(), - c5: integerSchema.lte(1000).nullable(), - c6: z.string().transform(Number), + c4: integerNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: z.object({ - c1: integerSchema.nullable(), - c2: integerSchema.lte(1000).nullable(), - c3: z.string().transform(Number), - c4: integerSchema.nullable(), - c5: integerSchema.nullable(), - c6: integerSchema.nullable(), + c1: integerNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: integerNullableSchema, + c5: integerNullableSchema, + c6: integerNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -431,7 +444,9 @@ test('all data types', (t) => { line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), @@ -457,14 +472,14 @@ test('all data types', (t) => { const expected = z.object({ bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), - bigint3: z.string(), + bigint3: bigintStringModeSchema, bigserial1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigserial2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bit: z.string().regex(/^[01]*$/).length(5), boolean: z.boolean(), date1: z.date(), date2: z.string(), - char1: z.string().length(10), + char1: z.string().max(10), char2: z.enum(['a', 'b', 'c']), cidr: z.string(), doublePrecision: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), @@ -480,7 +495,9 @@ test('all data types', (t) => { line2: z.tuple([z.number(), z.number(), z.number()]), macaddr: z.string(), macaddr8: z.string(), - numeric: z.string(), + numeric1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + numeric2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + numeric3: z.string(), point1: z.object({ x: z.number(), y: z.number() }), point2: z.tuple([z.number(), z.number()]), real: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), @@ -498,8 +515,8 @@ test('all data types', (t) => { varchar2: z.enum(['a', 'b', 'c']), vector: z.array(z.number()).length(3), array1: z.array(integerSchema), - array2: z.array(z.array(integerSchema).length(2)), - array3: z.array(z.array(z.string().max(10)).length(2)), + array2: z.array(z.array(integerSchema)).length(2), + array3: z.array(z.array(z.string().max(10))).length(2), }); expectSchemaShape(t, expected).from(result); @@ -529,7 +546,7 @@ test('type coercion - all', (t) => { bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), - integer: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), + integer: z.coerce.number().int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), text: z.coerce.string(), }); expectSchemaShape(t, expected).from(result); diff --git a/drizzle-zod/tests/singlestore.test.ts b/drizzle-zod/tests/singlestore.test.ts index c921240d4f..e3a8087e14 100644 --- a/drizzle-zod/tests/singlestore.test.ts +++ b/drizzle-zod/tests/singlestore.test.ts @@ -3,14 +3,29 @@ import { customType, int, json, serial, singlestoreSchema, singlestoreTable, tex import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; -import { jsonSchema } from '~/column.ts'; +import { bigintStringModeSchema, jsonSchema, unsignedBigintStringModeSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); -const serialNumberModeSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.nullable(); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intNullableSchema.optional(); + +const serialSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); +const serialOptionalSchema = serialSchema.optional(); + const textSchema = z.string().max(CONSTANTS.INT16_UNSIGNED_MAX); +const textOptionalSchema = textSchema.optional(); + +// const anySchema = z.any(); + +const extendedSchema = intSchema.lte(1000); +// const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); test('table - select', (t) => { const table = singlestoreTable('test', { @@ -20,7 +35,7 @@ test('table - select', (t) => { }); const result = createSelectSchema(table); - const expected = z.object({ id: serialNumberModeSchema, generated: intSchema, name: textSchema }); + const expected = z.object({ id: serialSchema, generated: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -33,7 +48,7 @@ test('table in schema - select', (tc) => { }); const result = createSelectSchema(table); - const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); + const expected = z.object({ id: serialSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); @@ -47,9 +62,9 @@ test('table - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - id: serialNumberModeSchema.optional(), + id: serialOptionalSchema, name: textSchema, - age: intSchema.nullable().optional(), + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -64,9 +79,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - id: serialNumberModeSchema.optional(), - name: textSchema.optional(), - age: intSchema.nullable().optional(), + id: serialOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -82,7 +97,7 @@ test('table - update', (t) => { // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); -// const expected = z.object({ id: serialNumberModeSchema, age: z.any() }); +// const expected = z.object({ id: serialSchema, age: anySchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -94,7 +109,7 @@ test('table - update', (t) => { // }).as(sql``); // const result = createSelectSchema(view); -// const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); +// const expected = z.object({ id: serialSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); @@ -118,8 +133,8 @@ test('table - update', (t) => { // const result = createSelectSchema(view); // const expected = z.object({ // id: serialNumberModeSchema, -// nested: z.object({ name: textSchema, age: z.any() }), -// table: z.object({ id: serialNumberModeSchema, name: textSchema }), +// nested: z.object({ name: textSchema, age: anySchema }), +// table: z.object({ id: serialSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); @@ -135,9 +150,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = z.object({ - c1: intSchema.nullable(), + c1: intNullableSchema, c2: intSchema, - c3: intSchema.nullable(), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -155,10 +170,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -175,10 +190,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.optional(), - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -193,12 +208,12 @@ test('refine table - select', (t) => { const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), - c3: z.string().transform(Number), + c3: customSchema, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); @@ -221,9 +236,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -244,9 +259,9 @@ test('refine table - insert', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -265,9 +280,9 @@ test('refine table - update', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000).optional(), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -309,21 +324,21 @@ test('refine table - update', (t) => { // }, // }); // const expected = z.object({ -// c1: intSchema.nullable(), -// c2: intSchema.lte(1000).nullable(), -// c3: z.string().transform(Number), +// c1: intNullableSchema, +// c2: extendedNullableSchema, +// c3: customSchema, // nested: z.object({ -// c4: intSchema.nullable(), -// c5: intSchema.lte(1000).nullable(), -// c6: z.string().transform(Number), +// c4: intNullableSchema, +// c5: extendedNullableSchema,, +// c6: customSchema, // }), // table: z.object({ -// c1: intSchema.nullable(), -// c2: intSchema.lte(1000).nullable(), -// c3: z.string().transform(Number), -// c4: intSchema.nullable(), -// c5: intSchema.nullable(), -// c6: intSchema.nullable(), +// c1: intNullableSchema, +// c2: extendedNullableSchema,, +// c3: customSchema, +// c4: intNullableSchema, +// c5: intNullableSchema, +// c6: intNullableSchema, // }), // }); // expectSchemaShape(t, expected).from(result); @@ -358,6 +373,7 @@ test('all data types', (t) => { longtext, mediumtext, tinytext, + vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), @@ -373,8 +389,12 @@ test('all data types', (t) => { date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), - decimal1: decimal().notNull(), - decimal2: decimal({ unsigned: true }).notNull(), + decimal1: decimal({ mode: 'number' }).notNull(), + decimal2: decimal({ mode: 'number', unsigned: true }).notNull(), + decimal3: decimal({ mode: 'bigint' }).notNull(), + decimal4: decimal({ mode: 'bigint', unsigned: true }).notNull(), + decimal5: decimal({ mode: 'string' }).notNull(), + decimal6: decimal({ mode: 'string', unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), @@ -406,6 +426,14 @@ test('all data types', (t) => { mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), + vector: vector({ + dimensions: 3, + elementType: 'F32', + }).notNull(), + vector2: vector({ + dimensions: 2, + elementType: 'I64', + }).notNull(), })); const result = createSelectSchema(table); @@ -414,18 +442,22 @@ test('all data types', (t) => { bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bigint3: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), bigint4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), - bigint5: z.string(), - bigint6: z.string(), - binary: z.string().regex(/^[01]*$/).length(10), + bigint5: bigintStringModeSchema, + bigint6: unsignedBigintStringModeSchema, + binary: z.string().regex(/^[01]*$/).max(10), boolean: z.boolean(), - char1: z.string().length(10), + char1: z.string().max(10), char2: z.enum(['a', 'b', 'c']), date1: z.date(), date2: z.string(), datetime1: z.date(), datetime2: z.string(), - decimal1: z.string(), - decimal2: z.string(), + decimal1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + decimal2: z.number().gte(0).lte(Number.MAX_SAFE_INTEGER), + decimal3: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + decimal4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), + decimal5: z.string(), + decimal6: z.string(), double1: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), double2: z.number().gte(0).lte(CONSTANTS.INT48_UNSIGNED_MAX), float1: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), @@ -457,6 +489,8 @@ test('all data types', (t) => { mediumtext2: z.enum(['a', 'b', 'c']), tinytext1: z.string().max(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: z.enum(['a', 'b', 'c']), + vector: z.array(z.number()).length(3), + vector2: z.array(z.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX)).length(2), }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -485,7 +519,7 @@ test('type coercion - all', (t) => { bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), - int: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), + int: z.coerce.number().int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), }); expectSchemaShape(t, expected).from(result); diff --git a/drizzle-zod/tests/sqlite.test.ts b/drizzle-zod/tests/sqlite.test.ts index eff9536686..84e03f9e47 100644 --- a/drizzle-zod/tests/sqlite.test.ts +++ b/drizzle-zod/tests/sqlite.test.ts @@ -9,7 +9,20 @@ import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpda import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER); +const intNullableSchema = intSchema.nullable(); +const intOptionalSchema = intSchema.optional(); +const intNullableOptionalSchema = intSchema.nullable().optional(); + const textSchema = z.string(); +const textOptionalSchema = textSchema.optional(); + +const anySchema = z.any(); + +const extendedSchema = intSchema.lte(1000); +const extendedNullableSchema = extendedSchema.nullable(); +const extendedOptionalSchema = extendedSchema.optional(); + +const customSchema = z.string().transform(Number); test('table - select', (t) => { const table = sqliteTable('test', { @@ -32,7 +45,7 @@ test('table - insert', (t) => { }); const result = createInsertSchema(table); - const expected = z.object({ id: intSchema.optional(), name: textSchema, age: intSchema.nullable().optional() }); + const expected = z.object({ id: intOptionalSchema, name: textSchema, age: intNullableOptionalSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -46,9 +59,9 @@ test('table - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - id: intSchema.optional(), - name: textSchema.optional(), - age: intSchema.nullable().optional(), + id: intOptionalSchema, + name: textOptionalSchema, + age: intNullableOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -62,7 +75,7 @@ test('view qb - select', (t) => { const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); - const expected = z.object({ id: intSchema, age: z.any() }); + const expected = z.object({ id: intSchema, age: anySchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); @@ -98,7 +111,7 @@ test('view with nested fields - select', (t) => { const result = createSelectSchema(view); const expected = z.object({ id: intSchema, - nested: z.object({ name: textSchema, age: z.any() }), + nested: z.object({ name: textSchema, age: anySchema }), table: z.object({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); @@ -115,9 +128,9 @@ test('nullability - select', (t) => { const result = createSelectSchema(table); const expected = z.object({ - c1: intSchema.nullable(), + c1: intNullableSchema, c2: intSchema, - c3: intSchema.nullable(), + c3: intNullableSchema, c4: intSchema, }); expectSchemaShape(t, expected).from(result); @@ -135,10 +148,10 @@ test('nullability - insert', (t) => { const result = createInsertSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), + c1: intNullableOptionalSchema, c2: intSchema, - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -155,10 +168,10 @@ test('nullability - update', (t) => { const result = createUpdateSchema(table); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.optional(), - c3: intSchema.nullable().optional(), - c4: intSchema.optional(), + c1: intNullableOptionalSchema, + c2: intOptionalSchema, + c3: intNullableOptionalSchema, + c4: intOptionalSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -176,9 +189,9 @@ test('refine table - select', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -200,9 +213,9 @@ test('refine table - select with custom data type', (t) => { c4: customTextSchema, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedSchema, + c3: customSchema, c4: customTextSchema, }); @@ -223,9 +236,9 @@ test('refine table - insert', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -244,9 +257,9 @@ test('refine table - update', (t) => { c3: z.string().transform(Number), }); const expected = z.object({ - c1: intSchema.nullable().optional(), - c2: intSchema.lte(1000).optional(), - c3: z.string().transform(Number), + c1: intNullableOptionalSchema, + c2: extendedOptionalSchema, + c3: customSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); @@ -288,21 +301,21 @@ test('refine view - select', (t) => { }, }); const expected = z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000).nullable(), - c3: z.string().transform(Number), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, nested: z.object({ - c4: intSchema.nullable(), - c5: intSchema.lte(1000).nullable(), - c6: z.string().transform(Number), + c4: intNullableSchema, + c5: extendedNullableSchema, + c6: customSchema, }), table: z.object({ - c1: intSchema.nullable(), - c2: intSchema.lte(1000).nullable(), - c3: z.string().transform(Number), - c4: intSchema.nullable(), - c5: intSchema.nullable(), - c6: intSchema.nullable(), + c1: intNullableSchema, + c2: extendedNullableSchema, + c3: customSchema, + c4: intNullableSchema, + c5: intNullableSchema, + c6: intNullableSchema, }), }); expectSchemaShape(t, expected).from(result); @@ -324,7 +337,9 @@ test('all data types', (t) => { integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), - numeric: numeric().notNull(), + numeric1: numeric({ mode: 'number' }).notNull(), + numeric2: numeric({ mode: 'bigint' }).notNull(), + numeric3: numeric({ mode: 'string' }).notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), @@ -341,7 +356,9 @@ test('all data types', (t) => { integer2: z.boolean(), integer3: z.date(), integer4: z.date(), - numeric: z.string(), + numeric1: z.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), + numeric2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), + numeric3: z.string(), real: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), text1: z.string(), text2: z.string().max(10), @@ -373,7 +390,7 @@ test('type coercion - all', (t) => { blob: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), integer1: z.coerce.boolean(), integer2: z.coerce.date(), - integer3: z.coerce.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER).int(), + integer3: z.coerce.number().int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), text: z.coerce.string(), }); expectSchemaShape(t, expected).from(result); diff --git a/drizzle-zod/tests/utils.ts b/drizzle-zod/tests/utils.ts index 90ae9ff88a..ce3ab61830 100644 --- a/drizzle-zod/tests/utils.ts +++ b/drizzle-zod/tests/utils.ts @@ -1,19 +1,31 @@ -import { expect, type TaskContext } from 'vitest'; +import { expect, type TestContext } from 'vitest'; import type { z } from 'zod/v4'; import type { $ZodEnumDef } from 'zod/v4/core'; -export function expectSchemaShape>(t: TaskContext, expected: T) { +export function expectSchemaShape>(t: TestContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.shape)).toStrictEqual(Object.keys(expected.shape)); - for (const key in Object.keys(actual.shape)) { - expect(actual.shape[key]?._zod.def).toStrictEqual(expected.shape[key]?._zod.def); + + for (const key of Object.keys(actual.shape)) { + const actualDef = actual.shape[key]?._zod.def; + const expectedDef = expected.shape[key]?._zod.def; + + expect({ + key, + type: actualDef?.type, + checks: actualDef?.checks?.map((check) => check._zod.def), + }).toStrictEqual({ + key, + type: expectedDef?.type, + checks: expectedDef?.checks?.map((check) => check._zod.def), + }); } }, }; } -export function expectEnumValues>(t: TaskContext, expected: T) { +export function expectEnumValues>(t: TestContext, expected: T) { return { from(actual: T) { expect(actual.def).toStrictEqual(expected.def as $ZodEnumDef); diff --git a/drizzle-zod/tsconfig.json b/drizzle-zod/tsconfig.json index c25379c37b..3f051aa73e 100644 --- a/drizzle-zod/tsconfig.json +++ b/drizzle-zod/tsconfig.json @@ -5,6 +5,7 @@ "baseUrl": ".", "declaration": true, "noEmit": true, + "allowImportingTsExtensions": true, "paths": { "~/*": ["src/*"] } diff --git a/drizzle-zod/vitest.config.ts b/drizzle-zod/vitest.config.ts index 1f0eb7ad9a..d767b570bd 100644 --- a/drizzle-zod/vitest.config.ts +++ b/drizzle-zod/vitest.config.ts @@ -1,4 +1,5 @@ import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/eslint-plugin-drizzle/package.json b/eslint-plugin-drizzle/package.json index 8fea612f6d..530095e9ea 100644 --- a/eslint-plugin-drizzle/package.json +++ b/eslint-plugin-drizzle/package.json @@ -1,12 +1,14 @@ { "name": "eslint-plugin-drizzle", - "version": "0.2.3", + "version": "1.0.0-beta.2", "description": "Eslint plugin for drizzle users to avoid common pitfalls", "main": "src/index.js", "scripts": { "test": "vitest run", + "build:artifact": "pnpm run build", "build": "tsc -b && pnpm cpy readme.md dist/", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", + "pack:artifact": "pnpm run pack", "publish": "npm publish package.tgz" }, "keywords": [ @@ -28,8 +30,7 @@ "@typescript-eslint/utils": "^6.10.0", "cpy-cli": "^5.0.0", "eslint": "^8.53.0", - "typescript": "^5.9.2", - "vitest": "^3.1.3" + "typescript": "^5.9.2" }, "peerDependencies": { "eslint": ">=8.0.0" diff --git a/eslint/eslint-plugin-drizzle-internal/index.js b/eslint/eslint-plugin-drizzle-internal/index.js deleted file mode 100644 index 754fc0e8f0..0000000000 --- a/eslint/eslint-plugin-drizzle-internal/index.js +++ /dev/null @@ -1,81 +0,0 @@ -// @ts-nocheck -const { ESLintUtils } = require('@typescript-eslint/experimental-utils'); -const ts = require('typescript'); - -module.exports = { - rules: { - 'require-entity-kind': ESLintUtils.RuleCreator((name) => name)({ - meta: { - type: 'problem', - docs: { - description: 'Enforce the usage of a static readonly [entityKind] property on Drizzle classes', - recommended: 'error', - }, - messages: { - missingEntityKind: - "Class '{{name}}' doesn't have a static readonly [entityKind] property defined with a string value.", - }, - schema: [], - fixable: 'code', - }, - defaultOptions: [], - create(context) { - const parserServices = ESLintUtils.getParserServices(context); - const checker = parserServices.program.getTypeChecker(); - - return { - ClassDeclaration(node) { - const tsNode = parserServices.esTreeNodeToTSNodeMap.get(node); - const className = tsNode.name - ? tsNode.name.text - : undefined; - - ts.SyntaxKind.PropertyDeclaration; - - for (const prop of tsNode.members) { - if ( - prop.kind - === ts.SyntaxKind.PropertyDeclaration - && prop.modifiers?.some( - (m) => m.kind === ts.SyntaxKind.StaticKeyword, - ) - && prop.modifiers?.some( - (m) => - m.kind - === ts.SyntaxKind.ReadonlyKeyword, - ) - && ts.isComputedPropertyName(prop.name) - && ts.isIdentifier(prop.name.expression) - && prop.name.expression.escapedText - === 'entityKind' - && checker - .getTypeAtLocation(prop.initializer) - .isStringLiteral() - ) { - return; - } - } - - context.report({ - node, - messageId: 'missingEntityKind', - data: { - name: className, - }, - fix(fixer) { - const classBodyOpeningCurlyToken = context - .getSourceCode() - .getFirstToken(node.body); - const insertionPoint = classBodyOpeningCurlyToken.range[1]; - return fixer.insertTextAfterRange( - [insertionPoint, insertionPoint], - `\n\tstatic readonly [entityKind]: string = '${className}';\n`, - ); - }, - }); - }, - }; - }, - }), - }, -}; diff --git a/eslint/eslint-plugin-drizzle-internal/index.mjs b/eslint/eslint-plugin-drizzle-internal/index.mjs new file mode 100644 index 0000000000..89be4fe840 --- /dev/null +++ b/eslint/eslint-plugin-drizzle-internal/index.mjs @@ -0,0 +1,51 @@ +// @ts-nocheck +import { definePlugin, defineRule } from 'oxlint'; + +export default definePlugin({ + meta: { name: 'drizzle-internal' }, + rules: { + 'no-instanceof': defineRule({ + meta: { + messages: { + noInstanceof: 'Use of "instanceof" operator is forbidden', + }, + fixable: 'code', + }, + create: (context) => ({ + BinaryExpression: (node) => { + if (node.type === 'BinaryExpression' && node.operator === 'instanceof') { + context.report({ + node: node, + message: 'Use of "instanceof" operator is forbidden', + }); + } + }, + }), + }), + 'require-entity-kind': defineRule({ + meta: { + messages: { + missingEntityKind: + "Class '{{name}}' doesn't have a static readonly [entityKind] property defined with a string value.", + }, + fixable: 'code', + }, + create: (context) => ({ + ClassDeclaration: (node) => { + const sourceCode = context.sourceCode.getText(node); + + if ( + !(sourceCode.includes('static override readonly [entityKind]: string') + || sourceCode.includes('static readonly [entityKind]: string')) + ) { + context.report({ + node: node, + message: + `Class '${node.id.name}' doesn't have a static readonly [entityKind] property defined with a string value.`, + }); + } + }, + }), + }), + }, +}); diff --git a/integration-tests/.env.example b/integration-tests/.env.example index 861c9cbc08..839eec08e2 100644 --- a/integration-tests/.env.example +++ b/integration-tests/.env.example @@ -4,7 +4,7 @@ SINGLESTORE_CONNECTION_STRING="singlestore://root:singlestore@localhost:3306/dri PLANETSCALE_CONNECTION_STRING= TIDB_CONNECTION_STRING= NEON_HTTP_CONNECTION_STRING=postgres://postgres:postgres@db.localtest.me:5432/postgres -NEON_SERVERLESS_CONNECTION_STRING=postgres://postgres:postgres@localhost:5445/postgres +NEON_CONNECTION_STRING=postgres://postgres:postgres@localhost:5445/postgres LIBSQL_URL="file:local.db" LIBSQL_AUTH_TOKEN="ey..." # For Turso only LIBSQL_REMOTE_URL="libsql://..." diff --git a/integration-tests/.gitignore b/integration-tests/.gitignore index f543b6b5bc..e48c194a29 100644 --- a/integration-tests/.gitignore +++ b/integration-tests/.gitignore @@ -4,3 +4,4 @@ trace tests/imports/imports.cjs tests/imports/imports.mjs .sst +test.ts \ No newline at end of file diff --git a/integration-tests/bunfig.toml b/integration-tests/bunfig.toml new file mode 100644 index 0000000000..b3697f8d4e --- /dev/null +++ b/integration-tests/bunfig.toml @@ -0,0 +1 @@ +coverage = false \ No newline at end of file diff --git a/integration-tests/docker-neon.yml b/integration-tests/docker-neon.yml deleted file mode 100644 index 72deb13be1..0000000000 --- a/integration-tests/docker-neon.yml +++ /dev/null @@ -1,33 +0,0 @@ -services: - postgres: - image: 'postgres:latest' - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - ports: - - '5432:5441' - healthcheck: - test: ['CMD-SHELL', 'pg_isready -U postgres'] - interval: 10s - timeout: 5s - retries: 5 - neon-proxy: - image: ghcr.io/timowilhelm/local-neon-http-proxy:main - environment: - - PG_CONNECTION_STRING=postgres://postgres:postgres@postgres:5432/postgres - ports: - - '4444:4444' - depends_on: - postgres: - condition: service_healthy - pg_proxy: - image: ghcr.io/neondatabase/wsproxy:latest - environment: - APPEND_PORT: 'postgres:5432' - ALLOW_ADDR_REGEX: '.*' - LOG_TRAFFIC: 'true' - ports: - - '5446:80' - depends_on: - - postgres diff --git a/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/migration.sql b/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/migration.sql new file mode 100644 index 0000000000..20f0c1ec37 --- /dev/null +++ b/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/migration.sql @@ -0,0 +1,11 @@ +CREATE TABLE "users12" ( + "id" int4 PRIMARY KEY GENERATED ALWAYS AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "name" string NOT NULL, + "email" string NOT NULL +); +--> statement-breakpoint +CREATE TABLE "users13" ( + "id" int4 PRIMARY KEY GENERATED ALWAYS AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "name" string NOT NULL, + "email" string NOT NULL +); diff --git a/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/snapshot.json b/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/snapshot.json new file mode 100644 index 0000000000..73070f4230 --- /dev/null +++ b/integration-tests/drizzle2/cockroach-init/20251120194306_misty_albert_cleary/snapshot.json @@ -0,0 +1,135 @@ +{ + "version": "1", + "dialect": "cockroach", + "id": "a6989e29-6373-4ce5-b3d1-2a5ada54dcfc", + "prevIds": [ + "00000000-0000-0000-0000-000000000000" + ], + "ddl": [ + { + "isRlsEnabled": false, + "name": "users12", + "entityType": "tables", + "schema": "public" + }, + { + "isRlsEnabled": false, + "name": "users13", + "entityType": "tables", + "schema": "public" + }, + { + "type": "int4", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": { + "type": "always", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": 1 + }, + "name": "id", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "name", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "email", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "int4", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": { + "type": "always", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": 1 + }, + "name": "id", + "entityType": "columns", + "schema": "public", + "table": "users13" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "name", + "entityType": "columns", + "schema": "public", + "table": "users13" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "email", + "entityType": "columns", + "schema": "public", + "table": "users13" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users12_pkey", + "schema": "public", + "table": "users12", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users13_pkey", + "schema": "public", + "table": "users13", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/migration.sql b/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/migration.sql new file mode 100644 index 0000000000..64f9507ee2 --- /dev/null +++ b/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/migration.sql @@ -0,0 +1 @@ +DROP TABLE "users13"; \ No newline at end of file diff --git a/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/snapshot.json b/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/snapshot.json new file mode 100644 index 0000000000..e8c13cff08 --- /dev/null +++ b/integration-tests/drizzle2/cockroach-init/20251120194310_flashy_nebula/snapshot.json @@ -0,0 +1,73 @@ +{ + "version": "1", + "dialect": "cockroach", + "id": "603be25f-26a8-425f-9248-47230b99bd2e", + "prevIds": [ + "a6989e29-6373-4ce5-b3d1-2a5ada54dcfc" + ], + "ddl": [ + { + "isRlsEnabled": false, + "name": "users12", + "entityType": "tables", + "schema": "public" + }, + { + "type": "int4", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": { + "type": "always", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": 1 + }, + "name": "id", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "name", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "email", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users12_pkey", + "schema": "public", + "table": "users12", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/cockroach/0000_workable_captain_britain.sql b/integration-tests/drizzle2/cockroach/0000_workable_captain_britain.sql new file mode 100644 index 0000000000..d7f96c1771 --- /dev/null +++ b/integration-tests/drizzle2/cockroach/0000_workable_captain_britain.sql @@ -0,0 +1,5 @@ +CREATE TABLE "users12" ( + "id" int4 GENERATED ALWAYS AS IDENTITY (INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1), + "name" string, + "email" string NOT NULL +); diff --git a/integration-tests/drizzle2/cockroach/meta/0000_snapshot.json b/integration-tests/drizzle2/cockroach/meta/0000_snapshot.json new file mode 100644 index 0000000000..b167127efd --- /dev/null +++ b/integration-tests/drizzle2/cockroach/meta/0000_snapshot.json @@ -0,0 +1,61 @@ +{ + "version": "1", + "dialect": "cockroach", + "id": "cd7bca85-fda1-4129-a13c-4c08c82c0ec4", + "prevId": "00000000-0000-0000-0000-000000000000", + "ddl": [ + { + "isRlsEnabled": false, + "name": "users12", + "entityType": "tables", + "schema": "public" + }, + { + "type": "int4", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": { + "type": "always", + "increment": "1", + "startWith": "1", + "minValue": "1", + "maxValue": "2147483647", + "cache": 1 + }, + "name": "id", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": false, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "name", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "string", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "email", + "entityType": "columns", + "schema": "public", + "table": "users12" + } + ], + "renames": [] +} diff --git a/integration-tests/drizzle2/cockroach/meta/_journal.json b/integration-tests/drizzle2/cockroach/meta/_journal.json new file mode 100644 index 0000000000..c6ad64478e --- /dev/null +++ b/integration-tests/drizzle2/cockroach/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "7", + "dialect": "cockroach", + "entries": [ + { + "idx": 0, + "version": "1", + "when": 1757581488674, + "tag": "0000_workable_captain_britain", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/migration.sql b/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/migration.sql new file mode 100644 index 0000000000..68b1bf2e38 --- /dev/null +++ b/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/migration.sql @@ -0,0 +1,13 @@ +CREATE TABLE [users12] ( + [id] int IDENTITY(1, 1), + [name] text NOT NULL, + [email] text NOT NULL, + CONSTRAINT [users12_pkey] PRIMARY KEY([id]) +); +--> statement-breakpoint +CREATE TABLE [users13] ( + [id] int IDENTITY(1, 1), + [name] text NOT NULL, + [email] text NOT NULL, + CONSTRAINT [users13_pkey] PRIMARY KEY([id]) +); diff --git a/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/snapshot.json b/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/snapshot.json new file mode 100644 index 0000000000..2a3dd8ccd1 --- /dev/null +++ b/integration-tests/drizzle2/mssql-init/20251120231500_parched_boomerang/snapshot.json @@ -0,0 +1,107 @@ +{ + "version": "1", + "dialect": "mssql", + "id": "4a793769-8ce6-4179-bbf5-cea1ba3de5ef", + "prevIds": [ + "00000000-0000-0000-0000-000000000000" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables", + "schema": "dbo" + }, + { + "name": "users13", + "entityType": "tables", + "schema": "dbo" + }, + { + "type": "int", + "notNull": true, + "generated": null, + "identity": { + "seed": 1, + "increment": 1 + }, + "name": "id", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "name", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "email", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "type": "int", + "notNull": true, + "generated": null, + "identity": { + "seed": 1, + "increment": 1 + }, + "name": "id", + "schema": "dbo", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "name", + "schema": "dbo", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "email", + "schema": "dbo", + "entityType": "columns", + "table": "users13" + }, + { + "nameExplicit": false, + "columns": [ + "id" + ], + "name": "users12_pkey", + "table": "users12", + "schema": "dbo", + "entityType": "pks" + }, + { + "nameExplicit": false, + "columns": [ + "id" + ], + "name": "users13_pkey", + "table": "users13", + "schema": "dbo", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/migration.sql b/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/migration.sql new file mode 100644 index 0000000000..a5520ba63b --- /dev/null +++ b/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/migration.sql @@ -0,0 +1 @@ +DROP TABLE [users13]; \ No newline at end of file diff --git a/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/snapshot.json b/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/snapshot.json new file mode 100644 index 0000000000..7387f5dded --- /dev/null +++ b/integration-tests/drizzle2/mssql-init/20251120231510_sharp_loners/snapshot.json @@ -0,0 +1,59 @@ +{ + "version": "1", + "dialect": "mssql", + "id": "0c15c859-9a9b-491f-b3e1-4bb0ee502afb", + "prevIds": [ + "4a793769-8ce6-4179-bbf5-cea1ba3de5ef" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables", + "schema": "dbo" + }, + { + "type": "int", + "notNull": true, + "generated": null, + "identity": { + "seed": 1, + "increment": 1 + }, + "name": "id", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "name", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "generated": null, + "identity": null, + "name": "email", + "schema": "dbo", + "entityType": "columns", + "table": "users12" + }, + { + "nameExplicit": false, + "columns": [ + "id" + ], + "name": "users12_pkey", + "table": "users12", + "schema": "dbo", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mssql/0000_nostalgic_carnage.sql b/integration-tests/drizzle2/mssql/0000_nostalgic_carnage.sql new file mode 100644 index 0000000000..840f726be7 --- /dev/null +++ b/integration-tests/drizzle2/mssql/0000_nostalgic_carnage.sql @@ -0,0 +1,20 @@ +CREATE TABLE [cities_migration] ( + [id] int, + [fullname_name] text, + [state] text +); +--> statement-breakpoint +CREATE TABLE [users_migration] ( + [id] int PRIMARY KEY NOT NULL, + [full_name] text, + [phone] int, + [invited_by] int, + [city_id] int, + [date] timestamp +); +--> statement-breakpoint +CREATE TABLE [users12] ( + [id] int identity PRIMARY KEY NOT NULL, + [name] text NOT NULL, + [email] text NOT NULL +); diff --git a/integration-tests/drizzle2/mssql/meta/0000_snapshot.json b/integration-tests/drizzle2/mssql/meta/0000_snapshot.json new file mode 100644 index 0000000000..f25ed0c02d --- /dev/null +++ b/integration-tests/drizzle2/mssql/meta/0000_snapshot.json @@ -0,0 +1,132 @@ +{ + "version": "5", + "dialect": "mysql", + "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", + "prevId": "00000000-0000-0000-0000-000000000000", + "tables": { + "cities_migration": { + "name": "cities_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "fullname_name": { + "name": "fullname_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "state": { + "name": "state", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users_migration": { + "name": "users_migration", + "columns": { + "id": { + "name": "id", + "type": "int", + "primaryKey": true, + "notNull": true, + "autoincrement": false + }, + "full_name": { + "name": "full_name", + "type": "text", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "phone": { + "name": "phone", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "invited_by": { + "name": "invited_by", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "city_id": { + "name": "city_id", + "type": "int", + "primaryKey": false, + "notNull": false, + "autoincrement": false + }, + "date": { + "name": "date", + "type": "timestamp", + "primaryKey": false, + "notNull": false, + "autoincrement": false, + "default": "(now())" + } + }, + "indexes": {}, + "foreignKeys": {}, + "compositePrimaryKeys": {} + }, + "users12": { + "name": "users12", + "columns": { + "id": { + "name": "id", + "type": "serial", + "primaryKey": true, + "notNull": true, + "autoincrement": true + }, + "name": { + "name": "name", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + }, + "email": { + "name": "email", + "type": "text", + "primaryKey": false, + "notNull": true, + "autoincrement": false + } + }, + "indexes": { + "my_unique_index": { + "name": "my_unique_index", + "columns": [ + "name" + ], + "isUnique": true, + "using": "btree" + } + }, + "foreignKeys": {}, + "compositePrimaryKeys": {} + } + }, + "schemas": {}, + "_meta": { + "schemas": {}, + "tables": {}, + "columns": {} + } +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mssql/meta/_journal.json b/integration-tests/drizzle2/mssql/meta/_journal.json new file mode 100644 index 0000000000..708471cf51 --- /dev/null +++ b/integration-tests/drizzle2/mssql/meta/_journal.json @@ -0,0 +1,13 @@ +{ + "version": "5", + "dialect": "mysql", + "entries": [ + { + "idx": 0, + "version": "5", + "when": 1680270921944, + "tag": "0000_nostalgic_carnage", + "breakpoints": true + } + ] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/migration.sql b/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/migration.sql new file mode 100644 index 0000000000..5ea6f38b8f --- /dev/null +++ b/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/migration.sql @@ -0,0 +1,11 @@ +CREATE TABLE `users12` ( + `id` serial PRIMARY KEY, + `name` text NOT NULL, + `email` text NOT NULL +); +--> statement-breakpoint +CREATE TABLE `users13` ( + `id` serial PRIMARY KEY, + `name` text NOT NULL, + `email` text NOT NULL +); diff --git a/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/snapshot.json b/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/snapshot.json new file mode 100644 index 0000000000..d7b0f12bfd --- /dev/null +++ b/integration-tests/drizzle2/mysql-init/20251120204726_swift_wendell_rand/snapshot.json @@ -0,0 +1,119 @@ +{ + "version": "6", + "dialect": "mysql", + "id": "39891942-6c9a-43fd-84df-f53a9b256e21", + "prevIds": [ + "00000000-0000-0000-0000-000000000000" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables" + }, + { + "name": "users13", + "entityType": "tables" + }, + { + "type": "serial", + "notNull": true, + "autoIncrement": true, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users12" + }, + { + "type": "serial", + "notNull": true, + "autoIncrement": true, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users13" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "users12", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "users13", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/migration.sql b/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/migration.sql new file mode 100644 index 0000000000..842cd9005e --- /dev/null +++ b/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/migration.sql @@ -0,0 +1 @@ +DROP TABLE `users13`; \ No newline at end of file diff --git a/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/snapshot.json b/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/snapshot.json new file mode 100644 index 0000000000..b1feed1d9d --- /dev/null +++ b/integration-tests/drizzle2/mysql-init/20251120204734_slimy_phil_sheldon/snapshot.json @@ -0,0 +1,65 @@ +{ + "version": "6", + "dialect": "mysql", + "id": "c9e80830-642e-4b79-8410-190d8984f1d7", + "prevIds": [ + "39891942-6c9a-43fd-84df-f53a9b256e21" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables" + }, + { + "type": "serial", + "notNull": true, + "autoIncrement": true, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoIncrement": false, + "default": null, + "onUpdateNow": false, + "onUpdateNowFsp": null, + "charSet": null, + "collation": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users12" + }, + { + "columns": [ + "id" + ], + "name": "PRIMARY", + "table": "users12", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/migration.sql b/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/migration.sql new file mode 100644 index 0000000000..de1ab7c10c --- /dev/null +++ b/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/migration.sql @@ -0,0 +1,5 @@ +CREATE TABLE "users12" ( + "id" serial PRIMARY KEY, + "name" text NOT NULL, + "email" text NOT NULL +); diff --git a/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/snapshot.json b/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/snapshot.json new file mode 100644 index 0000000000..1765ae3a92 --- /dev/null +++ b/integration-tests/drizzle2/pg-init/20251120153629_exotic_eternals/snapshot.json @@ -0,0 +1,66 @@ +{ + "dialect": "postgres", + "id": "d8fbcab5-bce3-40b6-9ee0-92e30bc4cc4a", + "prevIds": [ + "00000000-0000-0000-0000-000000000000" + ], + "version": "8", + "ddl": [ + { + "isRlsEnabled": false, + "name": "users12", + "entityType": "tables", + "schema": "public" + }, + { + "type": "serial", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "id", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "text", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "name", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "type": "text", + "typeSchema": null, + "notNull": true, + "dimensions": 0, + "default": null, + "generated": null, + "identity": null, + "name": "email", + "entityType": "columns", + "schema": "public", + "table": "users12" + }, + { + "columns": [ + "id" + ], + "nameExplicit": true, + "name": "users12_pkey", + "entityType": "pks", + "schema": "public", + "table": "users12" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/migration.sql b/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/migration.sql new file mode 100644 index 0000000000..690fee9d13 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/migration.sql @@ -0,0 +1,11 @@ +CREATE TABLE `users12` ( + `id` integer PRIMARY KEY, + `name` text NOT NULL, + `email` text NOT NULL +); +--> statement-breakpoint +CREATE TABLE `users13` ( + `id` integer PRIMARY KEY, + `name` text NOT NULL, + `email` text NOT NULL +); diff --git a/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/snapshot.json b/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/snapshot.json new file mode 100644 index 0000000000..94aaa4732b --- /dev/null +++ b/integration-tests/drizzle2/sqlite-init/20251121133136_even_frog_thor/snapshot.json @@ -0,0 +1,97 @@ +{ + "version": "7", + "dialect": "sqlite", + "id": "3536f101-e83a-4dd1-a7d5-149121fd2abf", + "prevIds": [ + "00000000-0000-0000-0000-000000000000" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables" + }, + { + "name": "users13", + "entityType": "tables" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users12" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users13" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users13" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users12_pk", + "table": "users12", + "entityType": "pks" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users13_pk", + "table": "users13", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/migration.sql b/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/migration.sql new file mode 100644 index 0000000000..842cd9005e --- /dev/null +++ b/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/migration.sql @@ -0,0 +1 @@ +DROP TABLE `users13`; \ No newline at end of file diff --git a/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/snapshot.json b/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/snapshot.json new file mode 100644 index 0000000000..eb4264d956 --- /dev/null +++ b/integration-tests/drizzle2/sqlite-init/20251121133143_perpetual_whizzer/snapshot.json @@ -0,0 +1,54 @@ +{ + "version": "7", + "dialect": "sqlite", + "id": "b2c3faaa-48ee-49b8-a938-a133e9b40269", + "prevIds": [ + "3536f101-e83a-4dd1-a7d5-149121fd2abf" + ], + "ddl": [ + { + "name": "users12", + "entityType": "tables" + }, + { + "type": "integer", + "notNull": false, + "autoincrement": false, + "default": null, + "generated": null, + "name": "id", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "name", + "entityType": "columns", + "table": "users12" + }, + { + "type": "text", + "notNull": true, + "autoincrement": false, + "default": null, + "generated": null, + "name": "email", + "entityType": "columns", + "table": "users12" + }, + { + "columns": [ + "id" + ], + "nameExplicit": false, + "name": "users12_pk", + "table": "users12", + "entityType": "pks" + } + ], + "renames": [] +} \ No newline at end of file diff --git a/integration-tests/js-tests/driver-init/commonjs/better-sqlite3.test.cjs b/integration-tests/js-tests/driver-init/commonjs/better-sqlite3.test.cjs index 4327527fb7..feb368b477 100644 --- a/integration-tests/js-tests/driver-init/commonjs/better-sqlite3.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/better-sqlite3.test.cjs @@ -86,28 +86,15 @@ describe('better-sqlite3', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Database(':memory:'); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = new Database(':memory:'); - const db = drizzle(client, { - schema, - }); - - await db.$client.exec('SELECT 1;'); - - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Database(':memory:'); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/commonjs/libsql.test.cjs b/integration-tests/js-tests/driver-init/commonjs/libsql.test.cjs index 3ebb64cea6..333bfdccd0 100644 --- a/integration-tests/js-tests/driver-init/commonjs/libsql.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/libsql.test.cjs @@ -53,32 +53,17 @@ describe('libsql', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createClient({ url: ':memory:', }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = createClient({ - url: ':memory:', - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createClient({ url: ':memory:', diff --git a/integration-tests/js-tests/driver-init/commonjs/mysql2.test.cjs b/integration-tests/js-tests/driver-init/commonjs/mysql2.test.cjs index 5c295e2052..6f60314469 100644 --- a/integration-tests/js-tests/driver-init/commonjs/mysql2.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/mysql2.test.cjs @@ -61,33 +61,18 @@ describe('mysql2', async (it) => { expect(db.$client.getConnection).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); - it('drizzle(client, config)', async () => { - const client = createPool({ - uri: process.env['MYSQL_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - mode: 'default', - }); - - await db.$client.execute('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client.getConnection).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], @@ -107,33 +92,18 @@ describe('mysql2', async (it) => { }); describe('mysql2:connection', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).toStrictEqual(undefined); }); - it('drizzle(client, config)', async () => { - const client = createConnection({ - uri: process.env['MYSQL_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - mode: 'default', - }); - - await db.$client.execute('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client.getConnection).toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/commonjs/neon-http.test.cjs b/integration-tests/js-tests/driver-init/commonjs/neon-http.test.cjs index cc23ac6f87..b5aaff0954 100644 --- a/integration-tests/js-tests/driver-init/commonjs/neon-http.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/neon-http.test.cjs @@ -54,28 +54,15 @@ describe('neon-http', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = pg( - process.env['NEON_CONNECTION_STRING'], - ); - const db = drizzle(client, { - schema, - }); - - await db.$client('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/commonjs/neon-ws.test.cjs b/integration-tests/js-tests/driver-init/commonjs/neon-ws.test.cjs index f5bb8366fb..5f2f692d77 100644 --- a/integration-tests/js-tests/driver-init/commonjs/neon-ws.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/neon-ws.test.cjs @@ -60,31 +60,17 @@ describe('neon-ws', async (it) => { expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client, config)', async () => { - const client = new Pool({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client).toBeInstanceOf(Pool); - }); - it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], @@ -102,34 +88,17 @@ describe('neon-ws', async (it) => { }); describe('neon-ws:Client', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); await client.connect(); - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Client); - expect(db.$client).not.toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { - const client = new Client({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await client.connect(); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); - expect(db._query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); @@ -154,37 +123,18 @@ describe('neon-ws:Client', async (it) => { }); describe('neon-ws:PoolClient', async (it) => { - it('drizzle(client)', async () => { - const pool = new Pool({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const client = await pool.connect(); - - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - client.release(); - - expect(db.$client).toBeInstanceOf(Client); - expect(db.$client).not.toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); - expect(db._query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); diff --git a/integration-tests/js-tests/driver-init/commonjs/node-mssql.test.cjs b/integration-tests/js-tests/driver-init/commonjs/node-mssql.test.cjs new file mode 100644 index 0000000000..f14ec95652 --- /dev/null +++ b/integration-tests/js-tests/driver-init/commonjs/node-mssql.test.cjs @@ -0,0 +1,91 @@ +require('dotenv/config'); +const { drizzle } = require('drizzle-orm/node-mssql'); +const mssql = require('mssql'); +const { mssql: schema } = require('./schema.cjs'); +import { afterAll, beforeAll, describe, expect } from 'vitest'; +import { createDockerDB } from '../../../tests/mssql/mssql-common.ts'; + +const Pool = mssql.ConnectionPool; +let container; +let connectionString; + +describe('node-mssql', async (it) => { + beforeAll(async () => { + if (process.env['MSSQL_CONNECTION_STRING']) { + connectionString = process.env['MSSQL_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + container = contrainerObj; + connectionString = conStr; + } + + while (true) { + try { + await mssql.connect(connectionString); + break; + } catch (e) { + await new Promise((resolve) => setTimeout(resolve, 1000)); + } + } + }); + + afterAll(async () => { + await container?.stop(); + }); + + it('drizzle(string)', async () => { + const db = drizzle(connectionString); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); + }); + + it('drizzle(string, config)', async () => { + const db = drizzle(connectionString, { + schema, + }); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); + }); + + it('drizzle({connection: string, ...config})', async () => { + const db = drizzle({ + connection: connectionString, + schema, + }); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); + }); + + it('drizzle({ client })', async () => { + const client = await mssql.connect(connectionString); + const db = drizzle({ client }); + + await db.$client.query('SELECT 1;'); + + expect(db.$client).toBeInstanceOf(Pool); + }); + + it('drizzle({ client, ...config })', async () => { + const client = await mssql.connect(connectionString); + const db = drizzle({ client, schema }); + + await db.$client.query('SELECT 1;'); + + expect(db.$client).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); + }); +}); diff --git a/integration-tests/js-tests/driver-init/commonjs/node-pg.test.cjs b/integration-tests/js-tests/driver-init/commonjs/node-pg.test.cjs index be4b9fa540..9960a11cd3 100644 --- a/integration-tests/js-tests/driver-init/commonjs/node-pg.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/node-pg.test.cjs @@ -57,31 +57,17 @@ describe('node-pg', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client, config)', async () => { - const client = new Pool({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Pool); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], @@ -99,27 +85,11 @@ describe('node-pg', async (it) => { }); describe('node-pg:Client', async (it) => { - it('drizzle(client)', async () => { - const client = new Client({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const db = drizzle(client); - - await client.connect(); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await client.connect(); @@ -127,7 +97,6 @@ describe('node-pg:Client', async (it) => { expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { @@ -150,12 +119,12 @@ describe('node-pg:Client', async (it) => { }); describe('node-pg:PoolClient', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); @@ -164,23 +133,6 @@ describe('node-pg:PoolClient', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const pool = new Pool({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const client = await pool.connect(); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - client.release(); - - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/commonjs/pglite.test.cjs b/integration-tests/js-tests/driver-init/commonjs/pglite.test.cjs index ab3b46562b..4ddfd9e7b6 100644 --- a/integration-tests/js-tests/driver-init/commonjs/pglite.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/pglite.test.cjs @@ -53,26 +53,14 @@ describe('pglite', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Database('memory://'); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = new Database('memory://'); - const db = drizzle(client, { - schema, - }); - - await db.$client.exec('SELECT 1;'); - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Database('memory://'); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/commonjs/planetscale.test.cjs b/integration-tests/js-tests/driver-init/commonjs/planetscale.test.cjs index f30f1d8347..ecc28aef6b 100644 --- a/integration-tests/js-tests/driver-init/commonjs/planetscale.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/planetscale.test.cjs @@ -59,31 +59,17 @@ describe('planetscale', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const client = new Client({ - url: process.env['PLANETSCALE_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/commonjs/postgres-js.test.cjs b/integration-tests/js-tests/driver-init/commonjs/postgres-js.test.cjs index cdb864b2ec..3c5319f826 100644 --- a/integration-tests/js-tests/driver-init/commonjs/postgres-js.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/postgres-js.test.cjs @@ -49,24 +49,13 @@ describe('postgres-js', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.unsafe('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = pg(process.env['PG_CONNECTION_STRING']); - const db = drizzle(client, { - schema, - }); - - await db.$client.unsafe('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/commonjs/schema.cjs b/integration-tests/js-tests/driver-init/commonjs/schema.cjs index 7015a068de..6be9f0563e 100644 --- a/integration-tests/js-tests/driver-init/commonjs/schema.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/schema.cjs @@ -1,6 +1,7 @@ const { int: mysqlInt, mysqlTable } = require('drizzle-orm/mysql-core'); const { integer: pgInt, pgTable } = require('drizzle-orm/pg-core'); const { integer: sqliteInt, sqliteTable } = require('drizzle-orm/sqlite-core'); +const { int: mssqlInt, mssqlTable } = require('drizzle-orm/mssql-core'); module.exports.sqlite = { User: sqliteTable('test', { @@ -19,3 +20,9 @@ module.exports.mysql = { id: mysqlInt('id').primaryKey().notNull(), }), }; + +module.exports.pg = { + User: mssqlTable('test', { + id: pgInt('id').primaryKey(), + }), +}; diff --git a/integration-tests/js-tests/driver-init/commonjs/tidb.test.cjs b/integration-tests/js-tests/driver-init/commonjs/tidb.test.cjs index f2ffc9dbc5..c1dce14c6f 100644 --- a/integration-tests/js-tests/driver-init/commonjs/tidb.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/tidb.test.cjs @@ -51,28 +51,16 @@ describe('tidb', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = connect({ - url: process.env['TIDB_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/commonjs/vercel.test.cjs b/integration-tests/js-tests/driver-init/commonjs/vercel.test.cjs index 2a5c43dcec..69fb0305ef 100644 --- a/integration-tests/js-tests/driver-init/commonjs/vercel.test.cjs +++ b/integration-tests/js-tests/driver-init/commonjs/vercel.test.cjs @@ -29,7 +29,7 @@ describe('vercel:sql', async (it) => { expect(db.$client).toBeTypeOf('function'); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const db = drizzle(sql); await db.$client.query('SELECT 1;'); @@ -37,17 +37,6 @@ describe('vercel:sql', async (it) => { expect(db.$client).toBeTypeOf('function'); }); - it('drizzle(client, config)', async () => { - const db = drizzle(sql, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeTypeOf('function'); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const db = drizzle({ client: sql, @@ -73,31 +62,16 @@ describe('vercel:sql', async (it) => { }); describe('vercel:Pool', async (it) => { - it('drizzle(client)', async () => { - const client = createPool({ - connectionString: process.env['VERCEL_CONNECTION_STRING'], - }); - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); - expect(db._query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { @@ -118,11 +92,11 @@ describe('vercel:Pool', async (it) => { }); describe('vercel:Client', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await client.connect(); @@ -133,24 +107,6 @@ describe('vercel:Client', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const client = createClient({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await client.connect(); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], @@ -172,13 +128,13 @@ describe('vercel:Client', async (it) => { }); describe('vercel:PoolClient', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); @@ -188,25 +144,6 @@ describe('vercel:PoolClient', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const pool = createPool({ - connectionString: process.env['VERCEL_CONNECTION_STRING'], - }); - const client = await pool.connect(); - - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - client.release(); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/better-sqlite3.test.mjs b/integration-tests/js-tests/driver-init/module/better-sqlite3.test.mjs index faecae3587..8e95d0d3e2 100644 --- a/integration-tests/js-tests/driver-init/module/better-sqlite3.test.mjs +++ b/integration-tests/js-tests/driver-init/module/better-sqlite3.test.mjs @@ -86,28 +86,15 @@ describe('better-sqlite3', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Database(':memory:'); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = new Database(':memory:'); - const db = drizzle(client, { - schema, - }); - - await db.$client.exec('SELECT 1;'); - - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Database(':memory:'); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/module/libsql.test.mjs b/integration-tests/js-tests/driver-init/module/libsql.test.mjs index 0a9a3fc789..e0b5a8b5a1 100644 --- a/integration-tests/js-tests/driver-init/module/libsql.test.mjs +++ b/integration-tests/js-tests/driver-init/module/libsql.test.mjs @@ -53,32 +53,17 @@ describe('libsql', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createClient({ url: ':memory:', }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = createClient({ - url: ':memory:', - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createClient({ url: ':memory:', diff --git a/integration-tests/js-tests/driver-init/module/mysql2.test.mjs b/integration-tests/js-tests/driver-init/module/mysql2.test.mjs index a10e80b274..e94076a2c5 100644 --- a/integration-tests/js-tests/driver-init/module/mysql2.test.mjs +++ b/integration-tests/js-tests/driver-init/module/mysql2.test.mjs @@ -61,33 +61,18 @@ describe('mysql2', async (it) => { expect(db.$client.getConnection).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); - it('drizzle(client, config)', async () => { - const client = createPool({ - uri: process.env['MYSQL_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - mode: 'default', - }); - - await db.$client.execute('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client.getConnection).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], @@ -107,33 +92,18 @@ describe('mysql2', async (it) => { }); describe('mysql2:connection', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).toStrictEqual(undefined); }); - it('drizzle(client, config)', async () => { - const client = createConnection({ - uri: process.env['MYSQL_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - mode: 'default', - }); - - await db.$client.execute('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client.getConnection).toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/neon-http.test.mjs b/integration-tests/js-tests/driver-init/module/neon-http.test.mjs index 512edc28e3..36c899a673 100644 --- a/integration-tests/js-tests/driver-init/module/neon-http.test.mjs +++ b/integration-tests/js-tests/driver-init/module/neon-http.test.mjs @@ -54,28 +54,15 @@ describe('neon-http', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = pg( - process.env['NEON_CONNECTION_STRING'], - ); - const db = drizzle(client, { - schema, - }); - - await db.$client('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/neon-ws.test.mjs b/integration-tests/js-tests/driver-init/module/neon-ws.test.mjs index 3a21b754e3..d2faeb9593 100644 --- a/integration-tests/js-tests/driver-init/module/neon-ws.test.mjs +++ b/integration-tests/js-tests/driver-init/module/neon-ws.test.mjs @@ -60,31 +60,17 @@ describe('neon-ws', async (it) => { expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client, config)', async () => { - const client = new Pool({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - expect(db.$client).toBeInstanceOf(Pool); - }); - it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], @@ -102,34 +88,17 @@ describe('neon-ws', async (it) => { }); describe('neon-ws:Client', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); await client.connect(); - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Client); - expect(db.$client).not.toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { - const client = new Client({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await client.connect(); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); - expect(db._query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); @@ -154,37 +123,18 @@ describe('neon-ws:Client', async (it) => { }); describe('neon-ws:PoolClient', async (it) => { - it('drizzle(client)', async () => { - const pool = new Pool({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const client = await pool.connect(); - - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - client.release(); - - expect(db.$client).toBeInstanceOf(Client); - expect(db.$client).not.toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); - expect(db._query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); diff --git a/integration-tests/js-tests/driver-init/module/node-mssql.test.mjs b/integration-tests/js-tests/driver-init/module/node-mssql.test.mjs new file mode 100644 index 0000000000..5dbaccd150 --- /dev/null +++ b/integration-tests/js-tests/driver-init/module/node-mssql.test.mjs @@ -0,0 +1,62 @@ +import 'dotenv/config'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import { ConnectionPool as Pool } from 'mssql'; +import { expect } from 'vitest'; +import { test } from '../../../tests/mssql/instrumentation'; +import * as schema from './schema.mjs'; + +test('mssql:drizzle(string)', async ({ url2 }) => { + const db = drizzle(url2); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); +}); + +test('mssql:drizzle(string, config)', async ({ url2 }) => { + const db = drizzle(url2, { + schema, + }); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); +}); + +test('mssql:drizzle({connection: string, ...config})', async ({ url2 }) => { + const db = drizzle({ + connection: url2, + schema, + }); + + const awaitedPool = await db.$client; + + await awaitedPool.query('SELECT 1;'); + + expect(awaitedPool).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); +}); + +test('mssql:drizzle(client)', async ({ url, client }) => { + const db = drizzle(client); + + await db.$client.query('SELECT 1;'); + + expect(db.$client).toBeInstanceOf(Pool); +}); + +test('mssql:drizzle(client, config)', async ({ url, client }) => { + const db = drizzle(client, { + schema, + }); + + await db.$client.query('SELECT 1;'); + + expect(db.$client).toBeInstanceOf(Pool); + // expect(db.query.User).not.toStrictEqual(undefined); +}); diff --git a/integration-tests/js-tests/driver-init/module/node-pg.test.mjs b/integration-tests/js-tests/driver-init/module/node-pg.test.mjs index 25bf4da5e1..54b27739c7 100644 --- a/integration-tests/js-tests/driver-init/module/node-pg.test.mjs +++ b/integration-tests/js-tests/driver-init/module/node-pg.test.mjs @@ -57,31 +57,17 @@ describe('node-pg', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); - it('drizzle(client, config)', async () => { - const client = new Pool({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Pool); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], @@ -99,27 +85,11 @@ describe('node-pg', async (it) => { }); describe('node-pg:Client', async (it) => { - it('drizzle(client)', async () => { - const client = new Client({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const db = drizzle(client); - - await client.connect(); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await client.connect(); @@ -127,7 +97,6 @@ describe('node-pg:Client', async (it) => { expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { @@ -150,12 +119,12 @@ describe('node-pg:Client', async (it) => { }); describe('node-pg:PoolClient', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); @@ -164,23 +133,6 @@ describe('node-pg:PoolClient', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const pool = new Pool({ - connectionString: process.env['PG_CONNECTION_STRING'], - }); - const client = await pool.connect(); - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - client.release(); - - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/pglite.test.mjs b/integration-tests/js-tests/driver-init/module/pglite.test.mjs index 67d8cc5bd2..4e4b12deab 100644 --- a/integration-tests/js-tests/driver-init/module/pglite.test.mjs +++ b/integration-tests/js-tests/driver-init/module/pglite.test.mjs @@ -53,26 +53,14 @@ describe('pglite', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Database('memory://'); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); - it('drizzle(client, config)', async () => { - const client = new Database('memory://'); - const db = drizzle(client, { - schema, - }); - - await db.$client.exec('SELECT 1;'); - await db.$client.close(); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Database('memory://'); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/module/planetscale.test.mjs b/integration-tests/js-tests/driver-init/module/planetscale.test.mjs index a9b3d98503..142b8aadd4 100644 --- a/integration-tests/js-tests/driver-init/module/planetscale.test.mjs +++ b/integration-tests/js-tests/driver-init/module/planetscale.test.mjs @@ -59,31 +59,17 @@ describe('planetscale', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const client = new Client({ - url: process.env['PLANETSCALE_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/postgres-js.test.mjs b/integration-tests/js-tests/driver-init/module/postgres-js.test.mjs index 7e4c36f26b..305f9069f5 100644 --- a/integration-tests/js-tests/driver-init/module/postgres-js.test.mjs +++ b/integration-tests/js-tests/driver-init/module/postgres-js.test.mjs @@ -49,24 +49,13 @@ describe('postgres-js', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.unsafe('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = pg(process.env['PG_CONNECTION_STRING']); - const db = drizzle(client, { - schema, - }); - - await db.$client.unsafe('SELECT 1;'); - - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle({ diff --git a/integration-tests/js-tests/driver-init/module/tidb.test.mjs b/integration-tests/js-tests/driver-init/module/tidb.test.mjs index df73e3bbec..9ea64b210a 100644 --- a/integration-tests/js-tests/driver-init/module/tidb.test.mjs +++ b/integration-tests/js-tests/driver-init/module/tidb.test.mjs @@ -51,28 +51,16 @@ describe('tidb', async (it) => { expect(db._query.User).not.toStrictEqual(undefined); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.execute('SELECT 1;'); }); - it('drizzle(client, config)', async () => { - const client = connect({ - url: process.env['TIDB_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await db.$client.execute('SELECT 1;'); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], diff --git a/integration-tests/js-tests/driver-init/module/vercel.test.mjs b/integration-tests/js-tests/driver-init/module/vercel.test.mjs index ebc3f14c7c..cc7fe45761 100644 --- a/integration-tests/js-tests/driver-init/module/vercel.test.mjs +++ b/integration-tests/js-tests/driver-init/module/vercel.test.mjs @@ -29,7 +29,7 @@ describe('vercel:sql', async (it) => { expect(db.$client).toBeTypeOf('function'); }); - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const db = drizzle(sql); await db.$client.query('SELECT 1;'); @@ -37,17 +37,6 @@ describe('vercel:sql', async (it) => { expect(db.$client).toBeTypeOf('function'); }); - it('drizzle(client, config)', async () => { - const db = drizzle(sql, { - schema, - }); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).toBeTypeOf('function'); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const db = drizzle({ client: sql, @@ -73,31 +62,16 @@ describe('vercel:sql', async (it) => { }); describe('vercel:Pool', async (it) => { - it('drizzle(client)', async () => { - const client = createPool({ - connectionString: process.env['VERCEL_CONNECTION_STRING'], - }); - const db = drizzle(client); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).toBeInstanceOf(Pool); - }); - - it('drizzle(client, config)', async () => { + it('drizzle({ client })', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); - const db = drizzle(client, { - schema, - }); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); - expect(db._query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { @@ -118,11 +92,11 @@ describe('vercel:Pool', async (it) => { }); describe('vercel:Client', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); - const db = drizzle(client); + const db = drizzle({ client }); await client.connect(); @@ -133,24 +107,6 @@ describe('vercel:Client', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const client = createClient({ - connectionString: process.env['NEON_CONNECTION_STRING'], - }); - const db = drizzle(client, { - schema, - }); - - await client.connect(); - - await db.$client.query('SELECT 1;'); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], @@ -172,13 +128,13 @@ describe('vercel:Client', async (it) => { }); describe('vercel:PoolClient', async (it) => { - it('drizzle(client)', async () => { + it('drizzle({ client })', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); - const db = drizzle(client); + const db = drizzle({ client }); await db.$client.query('SELECT 1;'); client.release(); @@ -188,25 +144,6 @@ describe('vercel:PoolClient', async (it) => { expect(db.$client).toBeInstanceOf(Client); }); - it('drizzle(client, config)', async () => { - const pool = createPool({ - connectionString: process.env['VERCEL_CONNECTION_STRING'], - }); - const client = await pool.connect(); - - const db = drizzle(client, { - schema, - }); - - await db.$client.query('SELECT 1;'); - client.release(); - - expect(db.$client).not.toBeTypeOf('function'); - expect(db.$client).not.toBeInstanceOf(Pool); - expect(db.$client).toBeInstanceOf(Client); - expect(db._query.User).not.toStrictEqual(undefined); - }); - it('drizzle({client, ...config})', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], diff --git a/integration-tests/package.json b/integration-tests/package.json index cdb08ca21d..06bfdad79f 100644 --- a/integration-tests/package.json +++ b/integration-tests/package.json @@ -6,9 +6,11 @@ "scripts": { "test:types": "tsc && cd type-tests/join-nodenext && tsc", "test": "pnpm test:vitest", - "test:vitest": "vitest run --pass-with-no-tests", + "test:vitest": "vitest run --printConsoleTrace=true --silent=false --pass-with-no-tests", "test:esm": "node tests/imports.test.mjs && node tests/imports.test.cjs", - "test:data-api": "sst shell vitest run tests/pg/awsdatapi.test.ts" + "test:data-api": "sst shell vitest run tests/pg/awsdatapi.test.ts", + "test:mysql": "vitest run --reporter=verbose --silent=false run tests/mysql/default tests/relational/mysql.test.ts tests/relational/mysql-v1.test.ts", + "test:planetscale": "vitest run --reporter=verbose --silent=false run tests/mysql/planetscale.test.ts tests/relational/mysql.planetscale" }, "keywords": [], "author": "Drizzle Team", @@ -23,13 +25,13 @@ "@types/async-retry": "^1.4.8", "@types/better-sqlite3": "^7.6.4", "@types/dockerode": "^3.3.18", + "@types/mssql": "^9.1.4", "@types/node": "^20.2.5", "@types/pg": "^8.10.1", "@types/sql.js": "^1.4.4", "@types/uuid": "^9.0.1", "@types/ws": "^8.5.10", "@upstash/redis": "^1.34.3", - "@vitest/ui": "^1.6.0", "ava": "^5.3.0", "bun-types": "^1.2.23", "cross-env": "^7.0.3", @@ -44,7 +46,6 @@ "@aws-sdk/client-rds-data": "^3.549.0", "@aws-sdk/credential-providers": "^3.549.0", "@electric-sql/pglite": "0.2.12", - "@libsql/client": "^0.10.0", "@miniflare/d1": "^2.14.4", "@miniflare/shared": "^2.14.4", "@planetscale/database": "^1.16.0", @@ -69,6 +70,7 @@ "drizzle-zod": "workspace:../drizzle-zod/dist", "gel": "^2.0.0", "get-port": "^7.0.0", + "mssql": "^11.0.1", "mysql2": "^3.14.1", "pg": "^8.11.0", "postgres": "^3.3.5", @@ -79,7 +81,6 @@ "sst": "^3.14.24", "uuid": "^9.0.0", "uvu": "^0.5.6", - "vitest": "^3.2.4", "ws": "^8.18.2", "zod": "^3.20.2" } diff --git a/integration-tests/tests/bun/bun-mysql.test.ts b/integration-tests/tests/bun/bun-mysql.test.ts index f7897dd631..f85248cca2 100644 --- a/integration-tests/tests/bun/bun-mysql.test.ts +++ b/integration-tests/tests/bun/bun-mysql.test.ts @@ -79,7 +79,6 @@ import { unionAll, unique, uniqueIndex, - uniqueKeyName, varbinary, varchar, year, @@ -310,9 +309,9 @@ beforeAll(async () => { client?.end(); }, }); - db = drizzle.mysql(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle.mysql(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle.mysql(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); + db = drizzle.mysql({ client, logger: ENABLE_LOGGING, relations }); + cachedDb = drizzle.mysql({ client, logger: ENABLE_LOGGING, cache: new TestCache() }); + dbGlobalCached = drizzle.mysql({ client, logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { @@ -580,21 +579,6 @@ describe('common', () => { expect(tableConfig.foreignKeys[0]!.getName()).toStrictEqual('custom_fk'); }); - test('table config: primary keys name', async () => { - const table = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - expect(tableConfig.primaryKeys).toHaveLength(1); - expect(tableConfig.primaryKeys[0]!.getName()).toStrictEqual('custom_pk'); - }); - test('table configs: unique third param', async () => { const cities1Table = mysqlTable('cities1', { id: serial('id').primaryKey(), @@ -627,7 +611,7 @@ describe('common', () => { const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toStrictEqual(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toStrictEqual(undefined); expect(columnName?.isUnique).toBeTruthy(); const columnState = tableConfig.columns.find((it) => it.name === 'state'); @@ -2207,8 +2191,7 @@ describe('common', () => { await db.execute(sql`drop table ${products}`); }); - // test.skipIf doesn't work - (Date.now() > new Date('2025.10.17').getTime() ? test : test.skip)( + test.skip( 'transaction with options (set isolationLevel)', async () => { const users = mysqlTable('users_transactions', { @@ -3869,7 +3852,7 @@ describe('common', () => { id: int(), }, (t) => [ index('name').on(t.id), - primaryKey({ columns: [t.id], name: 'custom' }), + primaryKey({ columns: [t.id] }), ]); const { indexes, primaryKeys } = getTableConfig(table); @@ -3882,7 +3865,7 @@ describe('common', () => { const table = mysqlTable('name', { id: int(), }, (t) => [ - [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], + [index('name').on(t.id), primaryKey({ columns: [t.id] })], ]); const { indexes, primaryKeys } = getTableConfig(table); diff --git a/integration-tests/tests/bun/bun-sql.test.ts b/integration-tests/tests/bun/bun-sql.test.ts index 51ae9b45f4..1b6e051822 100644 --- a/integration-tests/tests/bun/bun-sql.test.ts +++ b/integration-tests/tests/bun/bun-sql.test.ts @@ -1,4 +1,3 @@ -import retry from 'async-retry'; import { SQL as BunSQL } from 'bun'; import { afterAll, afterEach, beforeAll, beforeEach, expect, test } from 'bun:test'; import type Docker from 'dockerode'; @@ -92,9 +91,9 @@ import { uuid as pgUuid, varchar, } from 'drizzle-orm/pg-core'; -import relations from '~/pg/relations'; import { clear, init, rqbPost, rqbUser } from '~/pg/schema'; import { Expect } from '~/utils'; +import { relations } from '../pg/relations'; export const usersTable = pgTable('users', { id: serial('id' as string).primaryKey(), @@ -350,37 +349,20 @@ const allTypesTable = pgTable('all_types', { }); // oxlint-disable-next-line no-unassigned-vars -let pgContainer: Docker.Container | undefined; +let pgContainer: Docker.Container | undefined; // oxlint-disable-line no-unassigned-vars afterAll(async () => { await pgContainer?.stop().catch(console.error); }); let db: BunSQLDatabase; -let client: BunSQL; beforeAll(async () => { - const connectionString = process.env['PG_CONNECTION_STRING']; - client = await retry(async () => { - // @ts-expect-error - const connClient = new BunSQL(connectionString, { max: 1 }); - await connClient.unsafe(`select 1`); - return connClient; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: false, relations }); -}); + const connectionString = process.env['PG_CONNECTION_STRING']!; + const connClient = new BunSQL(connectionString, { max: 1 }); + await connClient.unsafe(`select 1`); -afterAll(async () => { - await client?.end(); + db = drizzle({ client: connClient, logger: false, relations }); }); beforeEach(async () => { @@ -592,10 +574,7 @@ test('table configs: unique third param', async () => { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - })); + }, (t) => [unique('custom_name').on(t.name, t.state).nullsNotDistinct(), unique('custom_name1').on(t.name, t.state)]); const tableConfig = getTableConfig(cities1Table); @@ -622,7 +601,7 @@ test('table configs: unique in column', async () => { const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toBe(undefined); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); @@ -640,9 +619,7 @@ test('table config: foreign keys name', async () => { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); + }, (t) => [foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' })]); const tableConfig = getTableConfig(table); @@ -655,9 +632,9 @@ test('table config: primary keys name', async () => { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); + }, (t) => [ + primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + ]); const tableConfig = getTableConfig(table); @@ -4441,7 +4418,7 @@ test.skip('proper json and jsonb handling', async () => { ]); }); -test.todo('set json/jsonb fields with objects and retrieve with the ->> operator', async () => { +test.skip('set json/jsonb fields with objects and retrieve with the ->> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; @@ -4465,7 +4442,7 @@ test.todo('set json/jsonb fields with objects and retrieve with the ->> operator }]); }); -test.todo('set json/jsonb fields with strings and retrieve with the ->> operator', async () => { +test.skip('set json/jsonb fields with strings and retrieve with the ->> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; @@ -4489,7 +4466,7 @@ test.todo('set json/jsonb fields with strings and retrieve with the ->> operator }]); }); -test.todo('set json/jsonb fields with objects and retrieve with the -> operator', async () => { +test.skip('set json/jsonb fields with objects and retrieve with the -> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; @@ -4513,7 +4490,7 @@ test.todo('set json/jsonb fields with objects and retrieve with the -> operator' }]); }); -test.todo('set json/jsonb fields with strings and retrieve with the -> operator', async () => { +test.skip('set json/jsonb fields with strings and retrieve with the -> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; @@ -4866,10 +4843,10 @@ test('policy', () => { const table = pgTable('table_with_policy', { id: serial('id').primaryKey(), name: text('name').notNull(), - }, () => ({ + }, () => [ p1, p2, - })); + ]); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); expect(config.policies[0]).toBe(p1); @@ -4888,10 +4865,13 @@ test('neon: policy', () => { for (const it of Object.values(policy)) { expect(is(it, PgPolicy)).toBe(true); expect(it?.to).toStrictEqual(authenticatedRole); - // oxlint-disable-next-line no-unused-expressions - it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; - // oxlint-disable-next-line no-unused-expressions - it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; + + if (it?.using) { + expect(it.using).toStrictEqual(sql`true`); + } + if (it?.withCheck) { + expect(it.withCheck).toStrictEqual(sql`true`); + } } } @@ -4930,9 +4910,9 @@ test('neon: neon_auth', () => { }); test('Enable RLS function', () => { - const usersWithRLS = pgTable('users', { + const usersWithRLS = pgTable.withRLS('users', { id: integer(), - }).enableRLS(); + }); const config1 = getTableConfig(usersWithRLS); diff --git a/integration-tests/tests/bun/bun-sqlite.test.ts b/integration-tests/tests/bun/bun-sqlite.test.ts index f307d6a958..714d309f12 100644 --- a/integration-tests/tests/bun/bun-sqlite.test.ts +++ b/integration-tests/tests/bun/bun-sqlite.test.ts @@ -236,9 +236,9 @@ let client: SQL; beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; client = new SQL(dbPath); - db = drizzle.sqlite(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle.sqlite(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle.sqlite(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); + db = drizzle.sqlite({ client, logger: ENABLE_LOGGING, relations }); + cachedDb = drizzle.sqlite({ client, logger: ENABLE_LOGGING, cache: new TestCache() }); + dbGlobalCached = drizzle.sqlite({ client, logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { @@ -3866,7 +3866,7 @@ test('table configs: unique in column', () => { const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.isUnique).toBeTruthy(); - expect(columnName?.uniqueName).toStrictEqual(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toStrictEqual(undefined); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.isUnique).toBeTruthy(); @@ -3874,7 +3874,7 @@ test('table configs: unique in column', () => { const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.isUnique).toBeTruthy(); - expect(columnField?.uniqueName).toStrictEqual(uniqueKeyName(cities1Table, [columnField!.name])); + expect(columnField?.uniqueName).toStrictEqual(undefined); }); test('update ... from', async () => { diff --git a/integration-tests/tests/bun/sqlite-nw.test.ts b/integration-tests/tests/bun/sqlite-nw.test.ts index ce35410e8b..7ed6676642 100644 --- a/integration-tests/tests/bun/sqlite-nw.test.ts +++ b/integration-tests/tests/bun/sqlite-nw.test.ts @@ -34,7 +34,7 @@ test.before((ctx) => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; const client = new Database(dbPath); - ctx.db = drizzle(client, { logger: new DefaultLogger() }); + ctx.db = drizzle({ client, logger: new DefaultLogger() }); } catch (e) { console.error(e); } diff --git a/integration-tests/tests/bun/sqlite.test.ts b/integration-tests/tests/bun/sqlite.test.ts index fafd3995e9..e4699ccc78 100644 --- a/integration-tests/tests/bun/sqlite.test.ts +++ b/integration-tests/tests/bun/sqlite.test.ts @@ -21,7 +21,7 @@ beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; const client = new Database(dbPath); - db = drizzle(client); + db = drizzle({ client }); } catch (e) { console.error(e); } diff --git a/integration-tests/tests/pg/vercel-pg.test.ts b/integration-tests/tests/cockroach/cockroach.test.ts similarity index 63% rename from integration-tests/tests/pg/vercel-pg.test.ts rename to integration-tests/tests/cockroach/cockroach.test.ts index 99367d9b93..f9a1edca4b 100644 --- a/integration-tests/tests/pg/vercel-pg.test.ts +++ b/integration-tests/tests/cockroach/cockroach.test.ts @@ -1,57 +1,43 @@ -import { createClient, type VercelClient } from '@vercel/postgres'; +import retry from 'async-retry'; import { sql } from 'drizzle-orm'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; -import { migrate } from 'drizzle-orm/vercel-postgres/migrator'; +import type { NodeCockroachDatabase } from 'drizzle-orm/cockroach'; +import { drizzle } from 'drizzle-orm/cockroach'; +import { cockroachTable, getTableConfig, int4, timestamp } from 'drizzle-orm/cockroach-core'; +import { migrate } from 'drizzle-orm/cockroach/migrator'; +import { Client } from 'pg'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; -import { createDockerDB, tests, tests as cacheTests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache } from './pg-common-cache'; -import relations from './relations'; +import { createDockerDB, tests, usersMigratorTable, usersTable } from './common'; const ENABLE_LOGGING = false; -let db: VercelPgDatabase; -let dbGlobalCached: VercelPgDatabase; -let cachedDb: VercelPgDatabase; -let client: VercelClient; +let db: NodeCockroachDatabase; +let client: Client; beforeAll(async () => { let connectionString; - if (process.env['PG_CONNECTION_STRING']) { - connectionString = process.env['PG_CONNECTION_STRING']; + if (process.env['COCKROACH_CONNECTION_STRING']) { + connectionString = process.env['COCKROACH_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = createClient({ connectionString }); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.log(connectionString); - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - // await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); + client = await retry(async () => { + client = new Client(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle({ client, logger: ENABLE_LOGGING }); }); afterAll(async () => { @@ -59,21 +45,16 @@ afterAll(async () => { }); beforeEach((ctx) => { - ctx.pg = { + ctx.cockroach = { db, }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; }); test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg' }); + await migrate(db, { migrationsFolder: './drizzle2/cockroach' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); @@ -81,20 +62,19 @@ test('migrator : default migration strategy', async () => { expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { - await db.execute(sql`drop table if exists all_columns`); + const customSchema = randomString(); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); + await migrate(db, { migrationsFolder: './drizzle2/cockroach', migrationsSchema: customSchema }); // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected @@ -102,18 +82,16 @@ test('migrator : migrate with custom schema', async () => { const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + await migrate(db, { migrationsFolder: './drizzle2/cockroach', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); @@ -124,26 +102,25 @@ test('migrator : migrate with custom table', async () => { const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); + const customSchema = randomString(); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { - migrationsFolder: './drizzle2/pg', + migrationsFolder: './drizzle2/cockroach', migrationsTable: customTable, - migrationsSchema: 'custom_migrations', + migrationsSchema: customSchema, }); // test if the custom migrations table was created const { rowCount } = await db.execute( - sql`select * from custom_migrations.${sql.identifier(customTable)};`, + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, ); expect(rowCount && rowCount > 0).toBeTruthy(); @@ -152,14 +129,118 @@ test('migrator : migrate with custom table and custom schema', async () => { const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('migrator : --init', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/cockroach', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND table_name = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/cockroach-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND table_name = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res.rows[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + + await migrate(db, { + migrationsFolder: './drizzle2/cockroach-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/cockroach', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_schema = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND table_name = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(2); + expect(res.rows[0]?.tableExists).toStrictEqual(true); }); test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); @@ -167,14 +248,14 @@ test('all date and time columns without timezone first case mode string', async await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, + { id: 1, timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same @@ -194,8 +275,8 @@ test('all date and time columns without timezone first case mode string', async }); test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); @@ -203,7 +284,7 @@ test('all date and time columns without timezone second case mode string', async await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(6) not null ) `); @@ -225,8 +306,8 @@ test('all date and time columns without timezone second case mode string', async }); test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); @@ -234,7 +315,7 @@ test('all date and time columns without timezone third case mode date', async () await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated always as identity, timestamp_string timestamp(3) not null ) `); @@ -248,7 +329,7 @@ test('all date and time columns without timezone third case mode date', async () // 2, Select as raw query as string const result = await db.execute<{ - id: number; + id: string; timestamp_string: string; }>(sql`select * from ${table}`); @@ -259,8 +340,8 @@ test('all date and time columns without timezone third case mode date', async () }); test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); @@ -268,7 +349,7 @@ test('test mode string for timestamp with timezone', async () => { await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(6) with time zone not null ) `); @@ -283,24 +364,22 @@ test('test mode string for timestamp with timezone', async () => { // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ - id: number; + id: string; timestamp_string: string; }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); @@ -308,31 +387,26 @@ test('test mode date for timestamp with timezone', async () => { await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); - // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ - id: number; + id: string; timestamp_string: string; }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); @@ -345,8 +419,8 @@ test('test mode string for timestamp with timezone in UTC timezone', async () => // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); @@ -354,31 +428,26 @@ test('test mode string for timestamp with timezone in UTC timezone', async () => await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); - // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ - id: number; + id: string; timestamp_string: string; }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); @@ -393,8 +462,8 @@ test('test mode string for timestamp with timezone in different timezone', async // set timezone to HST (UTC - 10) await db.execute(sql`set time zone 'HST'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); @@ -402,7 +471,7 @@ test('test mode string for timestamp with timezone in different timezone', async await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(6) with time zone not null ) `); @@ -421,7 +490,7 @@ test('test mode string for timestamp with timezone in different timezone', async // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ - id: number; + id: string; timestamp_string: string; }>(sql`select * from ${table}`); @@ -446,19 +515,16 @@ skipTests([ 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', - 'build query insert with onConflict do nothing + target', // - 'select from tables with same name from different schema using alias', // ]); tests(); -cacheTests(); beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); + await db.execute(sql`drop database defaultdb;`); + await db.execute(sql`create database defaultdb;`); await db.execute( sql` create table users ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, verified boolean not null default false, jsonb jsonb, @@ -473,14 +539,14 @@ test('insert via db.execute + select via db.execute', async () => { sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); - const result = await db.execute<{ id: number; name: string }>( + const result = await db.execute<{ id: string; name: string }>( sql`select id, name from "users"`, ); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( + const inserted = await db.execute<{ id: string; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, diff --git a/integration-tests/tests/pg/pg-common.ts b/integration-tests/tests/cockroach/common.ts similarity index 77% rename from integration-tests/tests/pg/pg-common.ts rename to integration-tests/tests/cockroach/common.ts index 46751b44e1..18c106ca40 100644 --- a/integration-tests/tests/pg/pg-common.ts +++ b/integration-tests/tests/cockroach/common.ts @@ -33,108 +33,81 @@ import { sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; -import { authenticatedRole, crudPolicy, usersSync } from 'drizzle-orm/neon'; -import type { NeonHttpDatabase } from 'drizzle-orm/neon-http'; -import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; +import type { CockroachColumn, CockroachDatabase, CockroachQueryResultHKT } from 'drizzle-orm/cockroach-core'; import { alias, bigint, - bigserial, + bit, + bool, boolean, - bytea, char, - cidr, + CockroachDialect, + cockroachEnum, + cockroachMaterializedView, + CockroachPolicy, + cockroachPolicy, + cockroachSchema, + cockroachTable, + cockroachTableCreator, + cockroachView, date, doublePrecision, except, exceptAll, + float, foreignKey, getMaterializedViewConfig, getTableConfig, getViewConfig, - index, inet, - integer, + int4, intersect, intersectAll, interval, - json, jsonb, - line, - macaddr, - macaddr8, numeric, - PgDialect, - pgEnum, - pgMaterializedView, - PgPolicy, - pgPolicy, - pgSchema, - pgTable, - pgTableCreator, - pgView, - point, primaryKey, real, - serial, smallint, - smallserial, + string, text, time, timestamp, union, unionAll, unique, - uniqueKeyName, uuid, - uuid as pgUuid, + uuid as cockroachUuid, + varbit, varchar, -} from 'drizzle-orm/pg-core'; +} from 'drizzle-orm/cockroach-core'; import getPort from 'get-port'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, afterEach, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; import { Expect } from '~/utils'; -import type { neonRelations, schema } from './neon-http-batch.test'; -import type relations from './relations'; -import { clear, init, rqbPost, rqbUser } from './schema'; -// eslint-disable-next-line @typescript-eslint/no-import-type-side-effects -// import { type NodePgDatabase } from 'drizzle-orm/node-postgres'; declare module 'vitest' { interface TestContext { - pg: { - db: PgDatabase; - }; - neonPg: { - db: NeonHttpDatabase; + cockroach: { + db: CockroachDatabase; }; } } -const en = pgEnum('en', ['enVal1', 'enVal2']); +const en = cockroachEnum('en', ['enVal1', 'enVal2']); -export const allTypesTable = pgTable('all_types', { - serial: serial('serial'), - bigserial53: bigserial('bigserial53', { - mode: 'number', - }), - bigserial64: bigserial('bigserial64', { - mode: 'bigint', - }), - int: integer('int'), +const allTypesTable = cockroachTable('all_types', { + int4: int4('int4'), bigint53: bigint('bigint53', { mode: 'number', }), bigint64: bigint('bigint64', { mode: 'bigint', }), - bigintString: bigint('bigint_string', { - mode: 'string', - }), - bool: boolean('bool'), - bytea: bytea('bytea'), + bool: bool('bool'), + boolean: bool('boolean'), char: char('char'), - cidr: cidr('cidr'), + string: string('string'), date: date('date', { mode: 'date', }), @@ -145,16 +118,7 @@ export const allTypesTable = pgTable('all_types', { enum: en('enum'), inet: inet('inet'), interval: interval('interval'), - json: json('json'), jsonb: jsonb('jsonb'), - line: line('line', { - mode: 'abc', - }), - lineTuple: line('line_tuple', { - mode: 'tuple', - }), - macaddr: macaddr('macaddr'), - macaddr8: macaddr8('macaddr8'), numeric: numeric('numeric'), numericNum: numeric('numeric_num', { mode: 'number', @@ -162,15 +126,9 @@ export const allTypesTable = pgTable('all_types', { numericBig: numeric('numeric_big', { mode: 'bigint', }), - point: point('point', { - mode: 'xy', - }), - pointTuple: point('point_tuple', { - mode: 'tuple', - }), real: real('real'), + float: float('float'), smallint: smallint('smallint'), - smallserial: smallserial('smallserial'), text: text('text'), time: time('time'), timestamp: timestamp('timestamp', { @@ -189,20 +147,17 @@ export const allTypesTable = pgTable('all_types', { }), uuid: uuid('uuid'), varchar: varchar('varchar'), - arrint: integer('arrint').array(), + arrint: int4('arrint').array(), arrbigint53: bigint('arrbigint53', { mode: 'number', }).array(), arrbigint64: bigint('arrbigint64', { mode: 'bigint', }).array(), - arrbigintString: bigint('arrbigint_string', { - mode: 'string', - }).array(), - arrbool: boolean('arrbool').array(), - arrbytea: bytea('arrbytea').array(), + arrbool: bool('arrbool').array(), + arrboolean: boolean('arrboolean').array(), arrchar: char('arrchar').array(), - arrcidr: cidr('arrcidr').array(), + arrstring: string('arrstring').array(), arrdate: date('arrdate', { mode: 'date', }).array(), @@ -210,19 +165,10 @@ export const allTypesTable = pgTable('all_types', { mode: 'string', }).array(), arrdouble: doublePrecision('arrdouble').array(), + arrfloat: float('arrfloat').array(), arrenum: en('arrenum').array(), arrinet: inet('arrinet').array(), arrinterval: interval('arrinterval').array(), - arrjson: json('arrjson').array(), - arrjsonb: jsonb('arrjsonb').array(), - arrline: line('arrline', { - mode: 'abc', - }).array(), - arrlineTuple: line('arrline_tuple', { - mode: 'tuple', - }).array(), - arrmacaddr: macaddr('arrmacaddr').array(), - arrmacaddr8: macaddr8('arrmacaddr8').array(), arrnumeric: numeric('arrnumeric').array(), arrnumericNum: numeric('arrnumeric_num', { mode: 'number', @@ -230,12 +176,6 @@ export const allTypesTable = pgTable('all_types', { arrnumericBig: numeric('arrnumeric_big', { mode: 'bigint', }).array(), - arrpoint: point('arrpoint', { - mode: 'xy', - }).array(), - arrpointTuple: point('arrpoint_tuple', { - mode: 'tuple', - }).array(), arrreal: real('arrreal').array(), arrsmallint: smallint('arrsmallint').array(), arrtext: text('arrtext').array(), @@ -256,172 +196,169 @@ export const allTypesTable = pgTable('all_types', { }).array(), arruuid: uuid('arruuid').array(), arrvarchar: varchar('arrvarchar').array(), + bit: bit('bit'), + varbit: varbit('varbit'), + arrbit: bit('arrbit').array(), + arrvarbit: varbit('arrvarbit').array(), }); -export const usersTable = pgTable('users', { - id: serial('id' as string).primaryKey(), +export const usersTable = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), + verified: bool('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); -const usersOnUpdate = pgTable('users_on_update', { - id: serial('id').primaryKey(), +const usersOnUpdate = cockroachTable('users_on_update', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updateCounter: int4('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), - // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), looks like this is not supported in pg + // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper("name")`), }); -const citiesTable = pgTable('cities', { - id: serial('id').primaryKey(), +const citiesTable = cockroachTable('cities', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), state: char('state', { length: 2 }), }); -const cities2Table = pgTable('cities', { - id: serial('id').primaryKey(), +const cities2Table = cockroachTable('cities', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), }); -const users2Table = pgTable('users2', { - id: serial('id').primaryKey(), +const users2Table = cockroachTable('users2', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), + cityId: int4('city_id').references(() => citiesTable.id), }); -const coursesTable = pgTable('courses', { - id: serial('id').primaryKey(), +const coursesTable = cockroachTable('courses', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - categoryId: integer('category_id').references(() => courseCategoriesTable.id), + categoryId: int4('category_id').references(() => courseCategoriesTable.id), }); -const courseCategoriesTable = pgTable('course_categories', { - id: serial('id').primaryKey(), +const courseCategoriesTable = cockroachTable('course_categories', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), }); -const orders = pgTable('orders', { - id: serial('id').primaryKey(), +const orders = cockroachTable('orders', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), + amount: int4('amount').notNull(), + quantity: int4('quantity').notNull(), }); -const network = pgTable('network_table', { +const network = cockroachTable('network_table', { inet: inet('inet').notNull(), - cidr: cidr('cidr').notNull(), - macaddr: macaddr('macaddr').notNull(), - macaddr8: macaddr8('macaddr8').notNull(), }); -const salEmp = pgTable('sal_emp', { +const salEmp = cockroachTable('sal_emp', { name: text('name'), - payByQuarter: integer('pay_by_quarter').array(), - schedule: text('schedule').array().array(), -}); - -const _tictactoe = pgTable('tictactoe', { - squares: integer('squares').array(3).array(3), + payByQuarter: int4('pay_by_quarter').array(), }); -export const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), +export const usersMigratorTable = cockroachTable('users12', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), email: text('email').notNull(), }); // To test aggregate functions -const aggregateTable = pgTable('aggregate_table', { - id: serial('id').notNull(), +const aggregateTable = cockroachTable('aggregate_table', { + id: int4('id').notNull().generatedByDefaultAsIdentity(), name: text('name').notNull(), - a: integer('a'), - b: integer('b'), - c: integer('c'), - nullOnly: integer('null_only'), + a: int4('a'), + b: int4('b'), + c: int4('c'), + nullOnly: int4('null_only'), }); // To test another schema and multischema -export const mySchema = pgSchema('mySchema'); +export const mySchema = cockroachSchema('mySchema'); export const usersMySchemaTable = mySchema.table('users', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), + verified: bool('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const citiesMySchemaTable = mySchema.table('cities', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), state: char('state', { length: 2 }), }); const users2MySchemaTable = mySchema.table('users2', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').references(() => citiesTable.id), + cityId: int4('city_id').references(() => citiesTable.id), }); -const jsonTestTable = pgTable('jsontest', { - id: serial('id').primaryKey(), - json: json('json').$type<{ string: string; number: number }>(), +const jsonTestTable = cockroachTable('jsontest', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), jsonb: jsonb('jsonb').$type<{ string: string; number: number }>(), }); -let pgContainer: Docker.Container; +let cockroachContainer: Docker.Container; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; + const port = await getPort({ port: 26257 }); + const image = 'cockroachdb/cockroach:v24.1.0'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); - pgContainer = await docker.createContainer({ + cockroachContainer = await docker.createContainer({ Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], + Cmd: ['start-single-node', '--insecure'], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], + '26257/tcp': [{ HostPort: `${port}` }], }, }, }); - await pgContainer.start(); + await cockroachContainer.start(); - return { connectionString: `postgres://postgres:postgres@localhost:${port}/postgres`, container: pgContainer }; + return { + connectionString: `postgresql://root@127.0.0.1:${port}/defaultdb?sslmode=disable`, + container: cockroachContainer, + }; } afterAll(async () => { - await pgContainer?.stop().catch(console.error); + await cockroachContainer?.stop().catch(console.error); }); export function tests() { describe('common', () => { beforeEach(async (ctx) => { - const { db } = ctx.pg; - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`drop schema if exists ${mySchema} cascade`); - await db.execute(sql`create schema public`); + const { db } = ctx.cockroach; + await db.execute(sql`drop database defaultdb;`); + await db.execute(sql`create database defaultdb;`); await db.execute(sql`create schema if not exists custom_migrations`); await db.execute(sql`create schema ${mySchema}`); // public users await db.execute( sql` create table users ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - verified boolean not null default false, + verified bool not null default false, jsonb jsonb, created_at timestamptz not null default now() ) @@ -431,7 +368,7 @@ export function tests() { await db.execute( sql` create table cities ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, state char(2) ) @@ -441,16 +378,16 @@ export function tests() { await db.execute( sql` create table users2 ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - city_id integer references cities(id) + city_id int4 references cities(id) ) `, ); await db.execute( sql` create table course_categories ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null ) `, @@ -458,30 +395,27 @@ export function tests() { await db.execute( sql` create table courses ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - category_id integer references course_categories(id) + category_id int4 references course_categories(id) ) `, ); await db.execute( sql` create table orders ( - id serial primary key, + id int4 primary key generated by default as identity, region text not null, product text not null, - amount integer not null, - quantity integer not null + amount int4 not null, + quantity int4 not null ) `, ); await db.execute( sql` create table network_table ( - inet inet not null, - cidr cidr not null, - macaddr macaddr not null, - macaddr8 macaddr8 not null + inet inet not null ) `, ); @@ -489,15 +423,7 @@ export function tests() { sql` create table sal_emp ( name text not null, - pay_by_quarter integer[] not null, - schedule text[][] not null - ) - `, - ); - await db.execute( - sql` - create table tictactoe ( - squares integer[3][3] not null + pay_by_quarter int4[] not null ) `, ); @@ -505,9 +431,9 @@ export function tests() { await db.execute( sql` create table ${usersMySchemaTable} ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - verified boolean not null default false, + verified bool not null default false, jsonb jsonb, created_at timestamptz not null default now() ) @@ -517,7 +443,7 @@ export function tests() { await db.execute( sql` create table ${citiesMySchemaTable} ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, state char(2) ) @@ -527,9 +453,9 @@ export function tests() { await db.execute( sql` create table ${users2MySchemaTable} ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - city_id integer references "mySchema".cities(id) + city_id int4 references "mySchema".cities(id) ) `, ); @@ -537,7 +463,7 @@ export function tests() { await db.execute( sql` create table jsontest ( - id serial primary key, + id int4 primary key generated by default as identity, json json, jsonb jsonb ) @@ -546,19 +472,17 @@ export function tests() { }); afterEach(async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.execute(sql`drop schema if exists custom_migrations cascade`); }); - async function setupSetOperationTest( - db: PgDatabase, - ) { + async function setupSetOperationTest(db: CockroachDatabase) { await db.execute(sql`drop table if exists users2`); await db.execute(sql`drop table if exists cities`); await db.execute( sql` create table cities ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null ) `, @@ -566,9 +490,9 @@ export function tests() { await db.execute( sql` create table users2 ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - city_id integer references cities(id) + city_id int4 references cities(id) ) `, ); @@ -591,19 +515,17 @@ export function tests() { ]); } - async function setupAggregateFunctionsTest( - db: PgDatabase, - ) { + async function setupAggregateFunctionsTest(db: CockroachDatabase) { await db.execute(sql`drop table if exists "aggregate_table"`); await db.execute( sql` create table "aggregate_table" ( - "id" serial not null, + "id" int4 not null generated by default as identity, "name" text not null, - "a" integer, - "b" integer, - "c" integer, - "null_only" integer + "a" int4, + "b" int4, + "c" int4, + "null_only" int4 ); `, ); @@ -619,41 +541,42 @@ export function tests() { } test('table configs: unique third param', async () => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: char('state', { length: 2 }), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - })); + const cities1Table = cockroachTable( + 'cities1', + { + id: int4('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }, + ( + t, + ) => [unique('custom_name').on(t.name, t.state), unique('custom_name1').on(t.name, t.state)], + ); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); - expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); - expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { - const cities1Table = pgTable('cities1', { - id: serial('id').primaryKey(), + const cities1Table = cockroachTable('cities1', { + id: int4('id').primaryKey(), name: text('name').notNull().unique(), state: char('state', { length: 2 }).unique('custom'), - field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), + field: char('field', { length: 2 }).unique('custom_field'), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toBe(undefined); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); @@ -663,17 +586,15 @@ export function tests() { const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBe(true); - expect(columnField?.uniqueType).toBe('not distinct'); + expect(columnField?.uniqueType).toBe(undefined); }); test('table config: foreign keys name', async () => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), + const table = cockroachTable('cities', { + id: int4('id'), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); + }, (t) => [foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' })]); const tableConfig = getTableConfig(table); @@ -682,13 +603,11 @@ export function tests() { }); test('table config: primary keys name', async () => { - const table = pgTable('cities', { - id: serial('id').primaryKey(), + const table = cockroachTable('cities', { + id: int4('id'), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); + }, (t) => [primaryKey({ columns: [t.id, t.name], name: 'custom_pk' })]); const tableConfig = getTableConfig(table); @@ -697,7 +616,7 @@ export function tests() { }); test('select all fields', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const now = Date.now(); @@ -710,7 +629,7 @@ export function tests() { }); test('select sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -723,7 +642,7 @@ export function tests() { }); test('select typed sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); @@ -735,7 +654,7 @@ export function tests() { }); test('select with empty array in inArray', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db @@ -749,7 +668,7 @@ export function tests() { }); test('select with empty array in notInArray', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db @@ -763,7 +682,7 @@ export function tests() { }); test('$default function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) .returning(); @@ -787,16 +706,16 @@ export function tests() { }); test('select distinct', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), + const usersDistinctTable = cockroachTable('users_distinct', { + id: int4('id').notNull(), name: text('name').notNull(), - age: integer('age').notNull(), + age: int4('age').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); + await db.execute(sql`create table ${usersDistinctTable} (id int4, name text, age int4)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John', age: 24 }, @@ -844,7 +763,7 @@ export function tests() { }); test('insert returning sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const users = await db .insert(usersTable) @@ -857,7 +776,7 @@ export function tests() { }); test('delete returning sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -871,7 +790,7 @@ export function tests() { }); test('update returning sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -886,7 +805,7 @@ export function tests() { }); test('update with returning all fields', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const now = Date.now(); @@ -905,7 +824,7 @@ export function tests() { }); test('update with returning partial', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -921,7 +840,7 @@ export function tests() { }); test('delete with returning all fields', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const now = Date.now(); @@ -936,7 +855,7 @@ export function tests() { }); test('delete with returning partial', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ @@ -948,7 +867,7 @@ export function tests() { }); test('insert + select', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); @@ -965,7 +884,7 @@ export function tests() { }); test('json insert', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db @@ -980,7 +899,7 @@ export function tests() { }); test('char insert', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); const result = await db @@ -991,7 +910,7 @@ export function tests() { }); test('char update', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); @@ -1003,7 +922,7 @@ export function tests() { }); test('char delete', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); @@ -1015,7 +934,7 @@ export function tests() { }); test('insert with overridden default values', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); @@ -1026,7 +945,7 @@ export function tests() { }); test('insert many', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(usersTable) @@ -1054,7 +973,7 @@ export function tests() { }); test('insert many with returning', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const result = await db .insert(usersTable) @@ -1080,7 +999,7 @@ export function tests() { }); test('select with group by as field', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1089,11 +1008,11 @@ export function tests() { .from(usersTable) .groupBy(usersTable.name); - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with exists', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1108,7 +1027,7 @@ export function tests() { }); test('select with group by as sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1117,11 +1036,11 @@ export function tests() { .from(usersTable) .groupBy(sql`${usersTable.name}`); - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1130,11 +1049,11 @@ export function tests() { .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1143,11 +1062,11 @@ export function tests() { .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); @@ -1162,7 +1081,7 @@ export function tests() { }); test('build query', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db .select({ id: usersTable.id, name: usersTable.name }) @@ -1177,7 +1096,7 @@ export function tests() { }); test('insert sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); @@ -1185,7 +1104,7 @@ export function tests() { }); test('partial join with alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -1213,17 +1132,17 @@ export function tests() { }); test('full join with alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const pgTable = pgTableCreator((name) => `prefixed_${name}`); + const cockroachTable = cockroachTableCreator((name) => `prefixed_${name}`); - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + await db.execute(sql`create table ${users} (id int4 primary key, name text not null)`); const customers = alias(users, 'customer'); @@ -1249,17 +1168,17 @@ export function tests() { }); test('select from alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const pgTable = pgTableCreator((name) => `prefixed_${name}`); + const cockroachTable = cockroachTableCreator((name) => `prefixed_${name}`); - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + await db.execute(sql`create table ${users} (id int4 primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); @@ -1286,7 +1205,7 @@ export function tests() { }); test('insert with spaces', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); @@ -1295,7 +1214,7 @@ export function tests() { }); test('prepared statement', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const statement = db @@ -1311,7 +1230,7 @@ export function tests() { }); test('insert: placeholders on columns with encoder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const statement = db.insert(usersTable).values({ name: 'John', @@ -1333,7 +1252,7 @@ export function tests() { }); test('prepared statement reuse', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const stmt = db .insert(usersTable) @@ -1370,7 +1289,7 @@ export function tests() { }); test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const stmt = db @@ -1387,7 +1306,7 @@ export function tests() { }); test('prepared statement with placeholder in .limit', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const stmt = db @@ -1407,7 +1326,7 @@ export function tests() { }); test('prepared statement with placeholder in .offset', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db @@ -1425,7 +1344,7 @@ export function tests() { }); test('prepared statement built using $dynamic', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); @@ -1447,12 +1366,11 @@ export function tests() { expect(result).toHaveLength(1); }); - // TODO change tests to new structure test('Query check: Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').default('Dan'), state: text('state'), }); @@ -1469,10 +1387,10 @@ export function tests() { }); test('Query check: Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').default('Dan'), state: text('state').default('UA'), }); @@ -1490,10 +1408,10 @@ export function tests() { }); test('Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('empty_insert_single', { - id: serial('id').primaryKey(), + const users = cockroachTable('empty_insert_single', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').default('Dan'), state: text('state'), }); @@ -1501,7 +1419,7 @@ export function tests() { await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + sql`create table ${users} (id int4 primary key generated by default as identity, name text default 'Dan', state text)`, ); await db.insert(users).values({}); @@ -1512,10 +1430,10 @@ export function tests() { }); test('Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('empty_insert_multiple', { - id: serial('id').primaryKey(), + const users = cockroachTable('empty_insert_multiple', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').default('Dan'), state: text('state'), }); @@ -1523,7 +1441,7 @@ export function tests() { await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + sql`create table ${users} (id int4 primary key generated by default as identity, name text default 'Dan', state text)`, ); await db.insert(users).values([{}, {}]); @@ -1534,7 +1452,7 @@ export function tests() { }); test('build query insert with onConflict do update', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db .insert(usersTable) @@ -1550,7 +1468,7 @@ export function tests() { }); test('build query insert with onConflict do update / multiple columns', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db .insert(usersTable) @@ -1566,7 +1484,7 @@ export function tests() { }); test('build query insert with onConflict do nothing', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db .insert(usersTable) @@ -1582,7 +1500,7 @@ export function tests() { }); test('build query insert with onConflict do nothing + target', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db .insert(usersTable) @@ -1598,7 +1516,7 @@ export function tests() { }); test('insert with onConflict do update', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); @@ -1616,7 +1534,7 @@ export function tests() { }); test('insert with onConflict do nothing', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); @@ -1631,7 +1549,7 @@ export function tests() { }); test('insert with onConflict do nothing + target', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); @@ -1649,7 +1567,7 @@ export function tests() { }); test('left join (flat object fields)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const { id: cityId } = await db .insert(citiesTable) @@ -1676,7 +1594,7 @@ export function tests() { }); test('left join (grouped fields)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const { id: cityId } = await db .insert(citiesTable) @@ -1717,7 +1635,7 @@ export function tests() { }); test('left join (all fields)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const { id: cityId } = await db .insert(citiesTable) @@ -1756,80 +1674,8 @@ export function tests() { ]); }); - test('select from a many subquery', async (ctx) => { - const { db } = ctx.pg; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - population: db.select({ count: count().as('count') }).from(users2Table).where( - eq(users2Table.cityId, citiesTable.id), - ).as( - 'population', - ), - name: citiesTable.name, - }).from(citiesTable); - - expectTypeOf(res).toEqualTypeOf<{ - population: number; - name: string; - }[]>(); - - expect(res).toStrictEqual([{ - population: 1, - name: 'Paris', - }, { - population: 2, - name: 'London', - }]); - }); - - test('select from a one subquery', async (ctx) => { - const { db } = ctx.pg; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) - .as( - 'cityName', - ), - name: users2Table.name, - }).from(users2Table); - - expectTypeOf(res).toEqualTypeOf<{ - cityName: string; - name: string; - }[]>(); - - expect(res).toStrictEqual([{ - cityName: 'Paris', - name: 'John', - }, { - cityName: 'London', - name: 'Jane', - }, { - cityName: 'London', - name: 'Jack', - }]); - }); - test('join subquery', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(courseCategoriesTable) @@ -1877,7 +1723,7 @@ export function tests() { }); test('with ... select', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, @@ -1896,7 +1742,7 @@ export function tests() { db .select({ region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), + totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), @@ -1923,8 +1769,8 @@ export function tests() { .select({ region: orders.region, product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, + productUnits: sql`sum(${orders.quantity})::int4`, + productSales: sql`sum(${orders.amount})::int4`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) @@ -1935,8 +1781,8 @@ export function tests() { .selectDistinct({ region: orders.region, product: orders.product, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, + productUnits: sql`sum(${orders.quantity})::int4`, + productSales: sql`sum(${orders.amount})::int4`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) @@ -1946,8 +1792,8 @@ export function tests() { .with(regionalSales, topRegions) .selectDistinctOn([orders.region], { region: orders.region, - productUnits: sql`sum(${orders.quantity})::int`, - productSales: sql`sum(${orders.amount})::int`, + productUnits: sql`sum(${orders.quantity})::int4`, + productSales: sql`sum(${orders.amount})::int4`.mapWith(Number), }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) @@ -1996,20 +1842,20 @@ export function tests() { }); test('with ... update', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const products = pgTable('products', { - id: serial('id').primaryKey(), + const products = cockroachTable('products', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), price: numeric('price').notNull(), - cheap: boolean('cheap').notNull().default(false), + cheap: bool('cheap').notNull().default(false), }); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql` create table ${products} ( - id serial primary key, + id int4 primary key generated by default as identity, price numeric not null, - cheap boolean not null default false + cheap bool not null default false ) `); @@ -2050,15 +1896,15 @@ export function tests() { }); test('with ... insert', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { + const users = cockroachTable('users', { username: text('username').notNull(), - admin: boolean('admin').notNull(), + admin: bool('admin').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); + await db.execute(sql`create table ${users} (username text not null, admin bool not null default false)`); const userCount = db .$with('user_count') @@ -2084,7 +1930,7 @@ export function tests() { }); test('with ... delete', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, @@ -2123,7 +1969,7 @@ export function tests() { }); test('select from subquery sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); @@ -2138,13 +1984,13 @@ export function tests() { }); test('select a field without joining its table', (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); @@ -2152,7 +1998,7 @@ export function tests() { }); test('select count()', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); @@ -2162,11 +2008,11 @@ export function tests() { }); test('select count w/ custom mapper', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - function count(value: PgColumn | SQLWrapper): SQL; - function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; - function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { + function count(value: CockroachColumn | SQLWrapper): SQL; + function count(value: CockroachColumn | SQLWrapper, alias: string): SQL.Aliased; + function count(value: CockroachColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { const result = sql`count(${value})`.mapWith(Number); if (!alias) { return result; @@ -2182,13 +2028,10 @@ export function tests() { }); test('network types', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const value: typeof network.$inferSelect = { inet: '127.0.0.1', - cidr: '192.168.100.128/25', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', }; await db.insert(network).values(value); @@ -2199,18 +2042,16 @@ export function tests() { }); test('array types', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const values: typeof salEmp.$inferSelect[] = [ { name: 'John', payByQuarter: [10000, 10000, 10000, 10000], - schedule: [['meeting', 'lunch'], ['training', 'presentation']], }, { name: 'Carol', payByQuarter: [20000, 25000, 25000, 25000], - schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], }, ]; @@ -2222,7 +2063,7 @@ export function tests() { }); test('select for ...', (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; { const query = db @@ -2276,7 +2117,7 @@ export function tests() { }); test('having', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); @@ -2289,7 +2130,7 @@ export function tests() { .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})::int`.as('users_count'), + usersCount: sql`count(${users2Table.id})::int4`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) @@ -2313,21 +2154,21 @@ export function tests() { }); test('view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const newYorkers1 = pgView('new_yorkers') + const newYorkers1 = cockroachView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - const newYorkers2 = pgView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers2 = cockroachView('new_yorkers', { + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - const newYorkers3 = pgView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers3 = cockroachView('new_yorkers', { + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); @@ -2377,21 +2218,21 @@ export function tests() { // NEXT test('materialized view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const newYorkers1 = pgMaterializedView('new_yorkers') + const newYorkers1 = cockroachMaterializedView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - const newYorkers2 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers2 = cockroachMaterializedView('new_yorkers', { + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - const newYorkers3 = pgMaterializedView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers3 = cockroachMaterializedView('new_yorkers', { + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); @@ -2447,12 +2288,12 @@ export function tests() { }); test('select from existing view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const schema = pgSchema('test_schema'); + const schema = cockroachSchema('test_schema'); const newYorkers = schema.view('new_yorkers', { - id: integer('id').notNull(), + id: int4('id').notNull(), }).existing(); await db.execute(sql`drop schema if exists ${schema} cascade`); @@ -2468,27 +2309,26 @@ export function tests() { expect(result).toEqual([{ id: 100 }]); }); - // TODO: copy to SQLite and MySQL, add to docs test('select from raw sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const result = await db.select({ - id: sql`id`, + id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); - Expect>; + Expect>; expect(result).toEqual([ - { id: 1, name: 'John' }, + { id: '1', name: 'John' }, ]); }); test('select from raw sql with joins', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const result = await db .select({ - id: sql`users.id`, + id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, @@ -2496,19 +2336,19 @@ export function tests() { .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - Expect>; + Expect>; expect(result).toEqual([ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + { id: '1', name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const result = await db .select({ - userId: sql`users.id`.as('userId'), + userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), @@ -2518,20 +2358,20 @@ export function tests() { .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + Equal<{ userId: string; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + { userId: '1', name: 'John', userCity: 'New York', cityId: '1', cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const users = db.$with('users').as( db.select({ - id: sql`id`.as('userId'), + id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( @@ -2541,7 +2381,7 @@ export function tests() { const cities = db.$with('cities').as( db.select({ - id: sql`id`.as('cityId'), + id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, @@ -2561,28 +2401,28 @@ export function tests() { .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + Equal<{ userId: string; name: string; userCity: string; cityId: string; cityName: string }[], typeof result> >; expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + { userId: '1', name: 'John', userCity: 'New York', cityId: '1', cityName: 'Paris' }, ]); }); test('prefixed table', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const pgTable = pgTableCreator((name) => `myprefix_${name}`); + const cockroachTable = cockroachTableCreator((name) => `myprefix_${name}`); - const users = pgTable('test_prefixed_table_with_unique_name', { - id: integer('id').primaryKey(), + const users = cockroachTable('test_prefixed_table_with_unique_name', { + id: int4('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, + sql`create table myprefix_test_prefixed_table_with_unique_name (id int4 not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); @@ -2595,7 +2435,7 @@ export function tests() { }); test('select from enum as ts enum', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; enum Muscle { abdominals = 'abdominals', @@ -2649,20 +2489,20 @@ export function tests() { full_body = 'full_body', } - const muscleEnum = pgEnum('muscle', Muscle); + const muscleEnum = cockroachEnum('muscle', Muscle); - const forceEnum = pgEnum('force', Force); + const forceEnum = cockroachEnum('force', Force); - const levelEnum = pgEnum('level', Level); + const levelEnum = cockroachEnum('level', Level); - const mechanicEnum = pgEnum('mechanic', Mechanic); + const mechanicEnum = cockroachEnum('mechanic', Mechanic); - const equipmentEnum = pgEnum('equipment', Equipment); + const equipmentEnum = cockroachEnum('equipment', Equipment); - const categoryEnum = pgEnum('category', Category); + const categoryEnum = cockroachEnum('category', Category); - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), + const exercises = cockroachTable('exercises', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: varchar('name').notNull(), force: forceEnum('force'), level: levelEnum('level'), @@ -2706,7 +2546,7 @@ export function tests() { ); await db.execute(sql` create table ${exercises} ( - id serial primary key, + id int4 primary key generated by default as identity, name varchar not null, force force, level level, @@ -2764,9 +2604,9 @@ export function tests() { }); test('select from enum', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const muscleEnum = pgEnum('muscle', [ + const muscleEnum = cockroachEnum('muscle', [ 'abdominals', 'hamstrings', 'adductors', @@ -2786,13 +2626,13 @@ export function tests() { 'abductors', ]); - const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); + const forceEnum = cockroachEnum('force', ['isometric', 'isotonic', 'isokinetic']); - const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); + const levelEnum = cockroachEnum('level', ['beginner', 'intermediate', 'advanced']); - const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); + const mechanicEnum = cockroachEnum('mechanic', ['compound', 'isolation']); - const equipmentEnum = pgEnum('equipment', [ + const equipmentEnum = cockroachEnum('equipment', [ 'barbell', 'dumbbell', 'bodyweight', @@ -2801,10 +2641,10 @@ export function tests() { 'kettlebell', ]); - const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); + const categoryEnum = cockroachEnum('category', ['upper_body', 'lower_body', 'full_body']); - const exercises = pgTable('exercises', { - id: serial('id').primaryKey(), + const exercises = cockroachTable('exercises', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: varchar('name').notNull(), force: forceEnum('force'), level: levelEnum('level'), @@ -2848,7 +2688,7 @@ export function tests() { ); await db.execute(sql` create table ${exercises} ( - id serial primary key, + id int4 primary key generated by default as identity, name varchar not null, force force, level level, @@ -2906,10 +2746,10 @@ export function tests() { }); test('all date and time columns', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), dateString: date('date_string', { mode: 'string' }).notNull(), time: time('time', { precision: 3 }).notNull(), datetime: timestamp('datetime').notNull(), @@ -2924,7 +2764,7 @@ export function tests() { await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, date_string date not null, time time(3) not null, datetime timestamp not null, @@ -2999,10 +2839,10 @@ export function tests() { }); test('all date and time columns with timezone second case mode date', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); @@ -3010,7 +2850,7 @@ export function tests() { await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(3) with time zone not null ) `); @@ -3035,10 +2875,10 @@ export function tests() { }); test('all date and time columns with timezone third case mode date', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), + const table = cockroachTable('all_columns', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); @@ -3046,7 +2886,7 @@ export function tests() { await db.execute(sql` create table ${table} ( - id serial primary key, + id int4 primary key generated by default as identity, timestamp_string timestamp(3) with time zone not null ) `); @@ -3069,7 +2909,7 @@ export function tests() { }); test('orderBy with aliased column', (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db.select({ test: sql`something`.as('test'), @@ -3079,10 +2919,10 @@ export function tests() { }); test('select from sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const metricEntry = pgTable('metric_entry', { - id: pgUuid('id').notNull(), + const metricEntry = cockroachTable('metric_entry', { + id: cockroachUuid('id').notNull(), createdAt: timestamp('created_at').notNull(), }); @@ -3126,10 +2966,10 @@ export function tests() { }); test('timestamp timezone', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { - id: serial('id').primaryKey(), + const usersTableWithAndWithoutTimezone = cockroachTable('users_test_with_and_without_timezone', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), @@ -3140,7 +2980,7 @@ export function tests() { await db.execute( sql` create table users_test_with_and_without_timezone ( - id serial not null primary key, + id int4 not null primary key generated by default as identity, name text not null, created_at timestamptz not null default now(), updated_at timestamp not null default now() @@ -3168,24 +3008,26 @@ export function tests() { }); test('transaction', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), + const users = cockroachTable('users_transactions', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + balance: int4('balance').notNull(), }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), + const products = cockroachTable('products_transactions', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + price: int4('price').notNull(), + stock: int4('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, + sql`create table users_transactions (id int4 not null primary key generated by default as identity, balance int4 not null)`, + ); + await db.execute( + sql`create table products_transactions (id int4 not null primary key generated by default as identity, price int4 not null, stock int4 not null)`, ); const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); @@ -3205,17 +3047,17 @@ export function tests() { }); test('transaction rollback', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), + const users = cockroachTable('users_transactions_rollback', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + balance: int4('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, + sql`create table users_transactions_rollback (id int4 not null primary key generated by default as identity, balance int4 not null)`, ); await expect((async () => { @@ -3233,17 +3075,17 @@ export function tests() { }); test('nested transaction', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), + const users = cockroachTable('users_nested_transactions', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + balance: int4('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, + sql`create table users_nested_transactions (id int4 not null primary key generated by default as identity, balance int4 not null)`, ); await db.transaction(async (tx) => { @@ -3262,17 +3104,17 @@ export function tests() { }); test('nested transaction rollback', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), + const users = cockroachTable('users_nested_transactions_rollback', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + balance: int4('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, + sql`create table users_nested_transactions_rollback (id int4 not null primary key generated by default as identity, balance int4 not null)`, ); await db.transaction(async (tx) => { @@ -3294,27 +3136,27 @@ export function tests() { }); test('join subquery with join', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const internalStaff = pgTable('internal_staff', { - userId: integer('user_id').notNull(), + const internalStaff = cockroachTable('internal_staff', { + userId: int4('user_id').notNull(), }); - const customUser = pgTable('custom_user', { - id: integer('id').notNull(), + const customUser = cockroachTable('custom_user', { + id: int4('id').notNull(), }); - const ticket = pgTable('ticket', { - staffId: integer('staff_id').notNull(), + const ticket = cockroachTable('ticket', { + staffId: int4('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); + await db.execute(sql`create table internal_staff (user_id int4 not null)`); + await db.execute(sql`create table custom_user (id int4 not null)`); + await db.execute(sql`create table ticket (staff_id int4 not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); @@ -3345,21 +3187,21 @@ export function tests() { }); test('subquery with view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_subquery_view', { - id: serial('id').primaryKey(), + const users = cockroachTable('users_subquery_view', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }); - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + const newYorkers = cockroachView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null, city_id int4 not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); @@ -3383,21 +3225,21 @@ export function tests() { }); test('join view as subquery', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users_join_view', { - id: serial('id').primaryKey(), + const users = cockroachTable('users_join_view', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }); - const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + const newYorkers = cockroachView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null, city_id int4 not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); @@ -3436,18 +3278,18 @@ export function tests() { }); test('table selection with single table', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null, city_id int4 not null)`, ); await db.insert(users).values({ name: 'John', cityId: 1 }); @@ -3460,17 +3302,17 @@ export function tests() { }); test('set null to jsonb field', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), jsonb: jsonb('jsonb'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, jsonb jsonb)`, ); const result = await db.insert(users).values({ jsonb: null }).returning(); @@ -3481,17 +3323,17 @@ export function tests() { }); test('insert undefined', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text)`, ); await expect((async () => { @@ -3502,17 +3344,17 @@ export function tests() { }); test('update undefined', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text)`, ); await expect((async () => { @@ -3526,17 +3368,17 @@ export function tests() { }); test('array operators', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const posts = pgTable('posts', { - id: serial('id').primaryKey(), + const posts = cockroachTable('posts', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), tags: text('tags').array(), }); await db.execute(sql`drop table if exists ${posts}`); await db.execute( - sql`create table ${posts} (id serial primary key, tags text[])`, + sql`create table ${posts} (id int4 primary key generated by default as identity, tags text[])`, ); await db.insert(posts).values([{ @@ -3572,7 +3414,7 @@ export function tests() { }); test('set operations (union) from query builder with subquery', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3606,7 +3448,7 @@ export function tests() { }); test('set operations (union) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3644,7 +3486,7 @@ export function tests() { }); test('set operations (union all) from query builder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3677,7 +3519,7 @@ export function tests() { }); test('set operations (union all) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3717,7 +3559,7 @@ export function tests() { }); test('set operations (intersect) from query builder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3749,7 +3591,7 @@ export function tests() { }); test('set operations (intersect) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3785,7 +3627,7 @@ export function tests() { }); test('set operations (intersect all) from query builder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3816,7 +3658,7 @@ export function tests() { }); test('set operations (intersect all) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3854,7 +3696,7 @@ export function tests() { }); test('set operations (except) from query builder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3884,7 +3726,7 @@ export function tests() { }); test('set operations (except) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3923,7 +3765,7 @@ export function tests() { }); test('set operations (except all) from query builder', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3954,7 +3796,7 @@ export function tests() { }); test('set operations (except all) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -3995,7 +3837,7 @@ export function tests() { }); test('set operations (mixed) from query builder with subquery', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); const sq = db @@ -4034,7 +3876,7 @@ export function tests() { }); test('set operations (mixed all) as function', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await setupSetOperationTest(db); @@ -4085,7 +3927,7 @@ export function tests() { }); test('aggregate function: count', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -4099,7 +3941,7 @@ export function tests() { }); test('aggregate function: avg', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -4107,13 +3949,13 @@ export function tests() { const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - expect(result1[0]?.value).toBe('33.3333333333333333'); + expect(result1[0]?.value).toBe('33.333333333333333333'); expect(result2[0]?.value).toBeNull(); - expect(result3[0]?.value).toBe('42.5000000000000000'); + expect(result3[0]?.value).toBe('42.500000000000000000'); }); test('aggregate function: sum', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -4127,7 +3969,7 @@ export function tests() { }); test('aggregate function: max', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -4139,7 +3981,7 @@ export function tests() { }); test('aggregate function: min', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -4151,28 +3993,25 @@ export function tests() { }); test('array mapping and parsing', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const arrays = pgTable('arrays_tests', { - id: serial('id').primaryKey(), + const arrays = cockroachTable('arrays_tests', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), tags: text('tags').array(), - nested: text('nested').array().array(), - numbers: integer('numbers').notNull().array(), + numbers: int4('numbers').notNull().array(), }); await db.execute(sql`drop table if exists ${arrays}`); await db.execute(sql` create table ${arrays} ( - id serial primary key, + id int4 primary key generated by default as identity, tags text[], - nested text[][], - numbers integer[] + numbers int4[] ) `); await db.insert(arrays).values({ tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }); @@ -4181,7 +4020,6 @@ export function tests() { expect(result).toEqual([{ id: 1, tags: ['', 'b', 'c'], - nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }]); @@ -4189,16 +4027,16 @@ export function tests() { }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - update_counter integer default 1 not null, + update_counter int4 default 1 not null, updated_at timestamp(3), always_null text ) @@ -4232,16 +4070,16 @@ export function tests() { }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null, - update_counter integer default 1, + update_counter int4 default 1, updated_at timestamp(3), always_null text ) @@ -4281,12 +4119,12 @@ export function tests() { }); test('test if method with sql operators', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey(), name: text('name').notNull(), - age: integer('age').notNull(), + age: int4('age').notNull(), city: text('city').notNull(), }); @@ -4294,9 +4132,9 @@ export function tests() { await db.execute(sql` create table ${users} ( - id serial primary key, + id int4 primary key, name text not null, - age integer not null, + age int4 not null, city text not null ) `); @@ -4483,7 +4321,7 @@ export function tests() { // MySchema tests test('mySchema :: select all fields', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const now = Date.now(); @@ -4496,7 +4334,7 @@ export function tests() { }); test('mySchema :: select sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ @@ -4507,7 +4345,7 @@ export function tests() { }); test('mySchema :: select typed sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ @@ -4518,15 +4356,15 @@ export function tests() { }); test('mySchema :: select distinct', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), + const usersDistinctTable = cockroachTable('users_distinct', { + id: int4('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); + await db.execute(sql`create table ${usersDistinctTable} (id int4, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, @@ -4559,7 +4397,7 @@ export function tests() { }); test('mySchema :: insert returning sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const users = await db.insert(usersMySchemaTable).values({ name: 'John' }).returning({ name: sql`upper(${usersMySchemaTable.name})`, @@ -4569,7 +4407,7 @@ export function tests() { }); test('mySchema :: delete returning sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning({ @@ -4580,7 +4418,7 @@ export function tests() { }); test('mySchema :: update with returning partial', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where(eq(usersMySchemaTable.name, 'John')) @@ -4593,7 +4431,7 @@ export function tests() { }); test('mySchema :: delete with returning all fields', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const now = Date.now(); @@ -4606,7 +4444,7 @@ export function tests() { }); test('mySchema :: insert + select', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); @@ -4621,7 +4459,7 @@ export function tests() { }); test('mySchema :: insert with overridden default values', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); @@ -4630,7 +4468,7 @@ export function tests() { }); test('mySchema :: insert many', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values([ { name: 'John' }, @@ -4654,29 +4492,29 @@ export function tests() { }); test('mySchema :: select with group by as field', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.name); - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('mySchema :: select with group by as column + sql', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); - expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('mySchema :: build query', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) @@ -4689,7 +4527,7 @@ export function tests() { }); test('mySchema :: partial join with alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const customerAlias = alias(usersMySchemaTable, 'customer'); await db.insert(usersMySchemaTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -4714,7 +4552,7 @@ export function tests() { }); test('mySchema :: insert with spaces', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( @@ -4725,7 +4563,7 @@ export function tests() { }); test('mySchema :: prepared statement with placeholder in .limit', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ name: 'John' }); const stmt = db @@ -4745,7 +4583,7 @@ export function tests() { }); test('mySchema :: build query insert with onConflict do update / multiple columns', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) @@ -4760,7 +4598,7 @@ export function tests() { }); test('mySchema :: build query insert with onConflict do nothing + target', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) @@ -4775,7 +4613,7 @@ export function tests() { }); test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id: 11, name: 'Hans' }); @@ -4806,21 +4644,21 @@ export function tests() { }); test('mySchema :: view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const newYorkers1 = mySchema.view('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); @@ -4869,21 +4707,21 @@ export function tests() { }); test('mySchema :: materialized view', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const newYorkers1 = mySchema.materializedView('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.materializedView('new_yorkers', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.materializedView('new_yorkers', { - id: serial('id').primaryKey(), + id: int4('id').primaryKey(), name: text('name').notNull(), - cityId: integer('city_id').notNull(), + cityId: int4('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); @@ -4939,7 +4777,7 @@ export function tests() { }); test('limit 0', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -4951,7 +4789,7 @@ export function tests() { }); test('limit -1', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(usersTable).values({ name: 'John' }); const users = await db @@ -4963,28 +4801,28 @@ export function tests() { }); test('Object keys as column names', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; // Tests the following: // Column with required config // Column with optional config without providing a value // Column with optional config providing a value // Column without config - const users = pgTable('users', { - id: bigserial({ mode: 'number' }).primaryKey(), + const users = cockroachTable('users', { + id: bigint({ mode: 'number' }).primaryKey().generatedByDefaultAsIdentity(), firstName: varchar(), lastName: varchar({ length: 50 }), - admin: boolean(), + admin: bool(), }); await db.execute(sql`drop table if exists users`); await db.execute( sql` create table users ( - "id" bigserial primary key, + "id" bigint primary key generated by default as identity, "firstName" varchar, "lastName" varchar(50), - "admin" boolean + "admin" bool ) `, ); @@ -5006,10 +4844,9 @@ export function tests() { }); test('proper json and jsonb handling', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const jsonTable = pgTable('json_table', { - json: json('json').$type<{ name: string; age: number }>(), + const jsonTable = cockroachTable('json_table', { jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), }); @@ -5017,136 +4854,113 @@ export function tests() { await db.execute(sql`create table ${jsonTable} (json json, jsonb jsonb)`); - await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); + await db.insert(jsonTable).values({ jsonb: { name: 'Pete', age: 23 } }); const result = await db.select().from(jsonTable); const justNames = await db.select({ - name1: sql`${jsonTable.json}->>'name'`.as('name1'), name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), }).from(jsonTable); expect(result).toStrictEqual([ { - json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 }, }, ]); expect(justNames).toStrictEqual([ { - name1: 'Tom', name2: 'Pete', }, ]); }); test('set json/jsonb fields with objects and retrieve with the ->> operator', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ - json: obj, jsonb: obj, }); const result = await db.select({ - jsonStringField: sql`${jsonTestTable.json}->>'string'`, - jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ - jsonStringField: testString, - jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test('set json/jsonb fields with strings and retrieve with the ->> operator', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ - json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ - jsonStringField: sql`${jsonTestTable.json}->>'string'`, - jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ - jsonStringField: testString, - jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test('set json/jsonb fields with objects and retrieve with the -> operator', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ - json: obj, jsonb: obj, }); const result = await db.select({ - jsonStringField: sql`${jsonTestTable.json}->'string'`, - jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ - jsonStringField: testString, - jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test('set json/jsonb fields with strings and retrieve with the -> operator', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ - json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ - jsonStringField: sql`${jsonTestTable.json}->'string'`, - jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ - jsonStringField: testString, - jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test('update ... from', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(cities2Table).values([ { name: 'New York City' }, @@ -5178,7 +4992,7 @@ export function tests() { }); test('update ... from with alias', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.insert(cities2Table).values([ { name: 'New York City' }, @@ -5212,21 +5026,21 @@ export function tests() { }); test('update ... from with join', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const states = pgTable('states', { - id: serial('id').primaryKey(), + const states = cockroachTable('states', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), }); - const cities = pgTable('cities', { - id: serial('id').primaryKey(), + const cities = cockroachTable('cities', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - stateId: integer('state_id').references(() => states.id), + stateId: int4('state_id').references(() => states.id), }); - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), - cityId: integer('city_id').notNull().references(() => cities.id), + cityId: int4('city_id').notNull().references(() => cities.id), }); await db.execute(sql`drop table if exists "states" cascade`); @@ -5234,22 +5048,22 @@ export function tests() { await db.execute(sql`drop table if exists "users" cascade`); await db.execute(sql` create table "states" ( - "id" serial primary key, + "id" int4 primary key generated by default as identity, "name" text not null ) `); await db.execute(sql` create table "cities" ( - "id" serial primary key, + "id" int4 primary key generated by default as identity, "name" text not null, - "state_id" integer references "states"("id") + "state_id" int4 references "states"("id") ) `); await db.execute(sql` create table "users" ( - "id" serial primary key, + "id" int4 primary key generated by default as identity, "name" text not null, - "city_id" integer not null references "cities"("id") + "city_id" int4 not null references "cities"("id") ) `); @@ -5315,39 +5129,37 @@ export function tests() { }); test('insert into ... select', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const notifications = pgTable('notifications', { - id: serial('id').primaryKey(), + const notifications = cockroachTable('notifications', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), sentAt: timestamp('sent_at').notNull().defaultNow(), message: text('message').notNull(), }); - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), name: text('name').notNull(), }); - const userNotications = pgTable('user_notifications', { - userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), - notificationId: integer('notification_id').notNull().references(() => notifications.id, { + const userNotications = cockroachTable('user_notifications', { + userId: int4('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), + notificationId: int4('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade', }), - }, (t) => ({ - pk: primaryKey({ columns: [t.userId, t.notificationId] }), - })); + }, (t) => [primaryKey({ columns: [t.userId, t.notificationId] })]); await db.execute(sql`drop table if exists notifications`); await db.execute(sql`drop table if exists users`); await db.execute(sql`drop table if exists user_notifications`); await db.execute(sql` create table notifications ( - id serial primary key, + id int4 primary key generated by default as identity, sent_at timestamp not null default now(), message text not null ) `); await db.execute(sql` create table users ( - id serial primary key, + id int4 primary key generated by default as identity, name text not null ) `); @@ -5394,14 +5206,14 @@ export function tests() { }); test('insert into ... select with keys in different order', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users1 = pgTable('users1', { - id: serial('id').primaryKey(), + const users1 = cockroachTable('users1', { + id: int4('id').primaryKey(), name: text('name').notNull(), }); - const users2 = pgTable('users2', { - id: serial('id').primaryKey(), + const users2 = cockroachTable('users2', { + id: int4('id').primaryKey(), name: text('name').notNull(), }); @@ -5409,13 +5221,13 @@ export function tests() { await db.execute(sql`drop table if exists users2`); await db.execute(sql` create table users1 ( - id serial primary key, + id int4 primary key, name text not null ) `); await db.execute(sql` create table users2 ( - id serial primary key, + id int4 primary key, name text not null ) `); @@ -5437,14 +5249,14 @@ export function tests() { test('policy', () => { { - const policy = pgPolicy('test policy'); + const policy = cockroachPolicy('test policy'); - expect(is(policy, PgPolicy)).toBe(true); + expect(is(policy, CockroachPolicy)).toBe(true); expect(policy.name).toBe('test policy'); } { - const policy = pgPolicy('test policy', { + const policy = cockroachPolicy('test policy', { as: 'permissive', for: 'all', to: 'public', @@ -5452,12 +5264,12 @@ export function tests() { withCheck: sql`1=1`, }); - expect(is(policy, PgPolicy)).toBe(true); + expect(is(policy, CockroachPolicy)).toBe(true); expect(policy.name).toBe('test policy'); expect(policy.as).toBe('permissive'); expect(policy.for).toBe('all'); expect(policy.to).toBe('public'); - const dialect = new PgDialect(); + const dialect = new CockroachDialect(); expect(is(policy.using, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); expect(is(policy.withCheck, SQL)).toBe(true); @@ -5465,7 +5277,7 @@ export function tests() { } { - const policy = pgPolicy('test policy', { + const policy = cockroachPolicy('test policy', { to: 'custom value', }); @@ -5473,21 +5285,21 @@ export function tests() { } { - const p1 = pgPolicy('test policy'); - const p2 = pgPolicy('test policy 2', { + const p1 = cockroachPolicy('test policy'); + const p2 = cockroachPolicy('test policy 2', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); - const table = pgTable('table_with_policy', { - id: serial('id').primaryKey(), + const table = cockroachTable('table_with_policy', { + id: int4('id').primaryKey(), name: text('name').notNull(), - }, () => ({ + }, () => [ p1, p2, - })); + ]); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); expect(config.policies[0]).toBe(p1); @@ -5495,65 +5307,15 @@ export function tests() { } }); - test('neon: policy', () => { - { - const policy = crudPolicy({ - read: true, - modify: true, - role: authenticatedRole, - }); - - for (const it of Object.values(policy)) { - expect(is(it, PgPolicy)).toBe(true); - expect(it?.to).toStrictEqual(authenticatedRole); - it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; - it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; - } - } - - { - const table = pgTable('name', { - id: integer('id'), - }, (t) => [ - index('name').on(t.id), - crudPolicy({ - read: true, - modify: true, - role: authenticatedRole, - }), - primaryKey({ columns: [t.id], name: 'custom' }), - ]); - - const { policies, indexes, primaryKeys } = getTableConfig(table); - - expect(policies.length).toBe(4); - expect(indexes.length).toBe(1); - expect(primaryKeys.length).toBe(1); - - expect(policies[0]?.name === 'crud-custom-policy-modify'); - expect(policies[1]?.name === 'crud-custom-policy-read'); - } - }); - - test('neon: neon_auth', () => { - const usersSyncTable = usersSync; - - const { columns, schema, name } = getTableConfig(usersSyncTable); - - expect(name).toBe('users_sync'); - expect(schema).toBe('neon_auth'); - expect(columns).toHaveLength(7); - }); - test('Enable RLS function', () => { - const usersWithRLS = pgTable('users', { - id: integer(), + const usersWithRLS = cockroachTable('users', { + id: int4(), }).enableRLS(); const config1 = getTableConfig(usersWithRLS); - const usersNoRLS = pgTable('users', { - id: integer(), + const usersNoRLS = cockroachTable('users', { + id: int4(), }); const config2 = getTableConfig(usersNoRLS); @@ -5562,49 +5324,11 @@ export function tests() { expect(config2.enableRLS).toBeFalsy(); }); - test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { - const { db } = ctx.pg; - - const users = pgTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updatedAt: timestamp('updated_at', { mode: 'date' }).notNull().$onUpdate(() => sql`now()`), - }); - - await db.execute( - sql` - create table ${users} ( - "id" serial primary key, - "name" text not null, - "updated_at" timestamp(3) - ) - `, - ); - - const insertResp = await db.insert(users).values({ - name: 'John', - }).returning({ - updatedAt: users.updatedAt, - }); - await new Promise((resolve) => setTimeout(resolve, 1000)); - - const now = Date.now(); - await new Promise((resolve) => setTimeout(resolve, 1000)); - const updateResp = await db.update(users).set({ - name: 'John', - }).returning({ - updatedAt: users.updatedAt, - }); - - expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); - expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); - }); - test('$count separate', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5626,10 +5350,10 @@ export function tests() { }); test('$count embedded', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5658,10 +5382,10 @@ export function tests() { }); test('$count separate reuse', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5695,10 +5419,10 @@ export function tests() { }); test('$count embedded reuse', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5752,10 +5476,10 @@ export function tests() { }); test('$count separate with filters', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5777,10 +5501,10 @@ export function tests() { }); test('$count embedded with filters', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const countTestTable = pgTable('count_test', { - id: integer('id').notNull(), + const countTestTable = cockroachTable('count_test', { + id: int4('id').notNull(), name: text('name').notNull(), }); @@ -5809,18 +5533,18 @@ export function tests() { }); test('insert multiple rows into table with generated identity column', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const identityColumnsTable = pgTable('identity_columns_table', { - id: integer('id').generatedAlwaysAsIdentity(), - id1: integer('id1').generatedByDefaultAsIdentity(), + const identityColumnsTable = cockroachTable('identity_columns_table', { + id: int4('id').generatedAlwaysAsIdentity(), + id1: int4('id1').generatedByDefaultAsIdentity(), name: text('name').notNull(), }); // not passing identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( - sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, + sql`create table ${identityColumnsTable} ("id" int4 generated always as identity, "id1" int4 generated by default as identity, "name" text)`, ); let result = await db.insert(identityColumnsTable).values([ @@ -5838,7 +5562,7 @@ export function tests() { // passing generated by default as identity column await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( - sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, + sql`create table ${identityColumnsTable} ("id" int4 generated always as identity, "id1" int4 generated by default as identity, "name" text)`, ); result = await db.insert(identityColumnsTable).values([ @@ -5852,36 +5576,20 @@ export function tests() { { id: 2, id1: 5, name: 'Jane' }, { id: 3, id1: 5, name: 'Bob' }, ]); - - // passing all identity columns - await db.execute(sql`drop table if exists ${identityColumnsTable}`); - await db.execute( - sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, - ); - - result = await db.insert(identityColumnsTable).overridingSystemValue().values([ - { name: 'John', id: 2, id1: 3 }, - { name: 'Jane', id: 4, id1: 5 }, - { name: 'Bob', id: 4, id1: 5 }, - ]).returning(); - - expect(result).toEqual([ - { id: 2, id1: 3, name: 'John' }, - { id: 4, id1: 5, name: 'Jane' }, - { id: 4, id1: 5, name: 'Bob' }, - ]); }); test('insert as cte', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.execute( + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null)`, + ); const sq1 = db.$with('sq').as( db.insert(users).values({ name: 'John' }).returning(), @@ -5902,17 +5610,17 @@ export function tests() { }); test('update as cte', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), - age: integer('age').notNull(), + age: int4('age').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, age integer not null)`, + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null, age int4 not null)`, ); await db.insert(users).values([ @@ -5941,15 +5649,17 @@ export function tests() { }); test('delete as cte', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.execute( + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null)`, + ); await db.insert(users).values([ { name: 'John' }, @@ -5977,15 +5687,17 @@ export function tests() { }); test('sql operator as cte', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; - const users = pgTable('users', { - id: serial('id').primaryKey(), + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.execute( + sql`create table ${users} (id int4 not null primary key generated by default as identity, name text not null)`, + ); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, @@ -6012,7 +5724,7 @@ export function tests() { }); test('cross join', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(usersTable) @@ -6046,7 +5758,7 @@ export function tests() { }); test('left join (lateral)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(citiesTable) @@ -6081,7 +5793,7 @@ export function tests() { }); test('inner join (lateral)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(citiesTable) @@ -6115,7 +5827,7 @@ export function tests() { }); test('cross join (lateral)', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db .insert(citiesTable) @@ -6187,43 +5899,289 @@ export function tests() { ]); }); + test('column.as', async (ctx) => { + const { db } = ctx.cockroach; + + const users = cockroachTable('users_column_as', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + name: text('name').notNull(), + cityId: int4('city_id').references(() => cities.id), + }); + + const cities = cockroachTable('cities_column_as', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + name: text('name').notNull(), + }); + + const ucView = cockroachView('cities_users_column_as_view').as((qb) => + qb.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)) + ); + + await db.execute(sql`CREATE TABLE ${cities} ( + "id" INT4 PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + "name" TEXT NOT NULL + );`); + + await db.execute(sql`CREATE TABLE ${users} ( + "id" INT4 GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + "name" TEXT NOT NULL, + "city_id" INT4 REFERENCES ${cities}("id") + );`); + + await db.execute( + sql`CREATE VIEW ${ucView} AS SELECT ${users.id} as "user_id", ${cities.id} as "city_id", ${users.name} as "user_name", ${cities.name} as "city_name" FROM ${users} LEFT JOIN ${cities} ON ${ + eq(cities.id, users.cityId) + };`, + ); + + const citiesInsRet = await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]).returning({ + cityId: cities.id.as('city_id'), + cityName: cities.name.as('city_name'), + }); + + expect(citiesInsRet).toStrictEqual(expect.arrayContaining([{ + cityId: 1, + cityName: 'Firstistan', + }, { + cityId: 2, + cityName: 'Secondaria', + }])); + + const usersInsRet = await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]).returning({ + userId: users.id.as('user_id'), + userName: users.name.as('users_name'), + userCityId: users.cityId, + }); + + expect(usersInsRet).toStrictEqual(expect.arrayContaining([{ userId: 1, userName: 'First', userCityId: 1 }, { + userId: 2, + userName: 'Second', + userCityId: 2, + }, { + userId: 3, + userName: 'Third', + userCityId: null, + }])); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewSelectReturn = await db.select().from(ucView); + + expect(viewSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewJoinReturn = await db.select({ + userId: ucView.userId.as('user_id_ucv'), + cityId: cities.id.as('city_id'), + userName: ucView.userName.as('user_name_ucv'), + cityName: cities.name.as('city_name'), + }).from(ucView).leftJoin(cities, eq(cities.id, ucView.cityId)); + + expect(viewJoinReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + }); + + test('select from a many subquery', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + population: db.select({ count: count().as('count') }).from(users2Table).where( + eq(users2Table.cityId, citiesTable.id), + ).as( + 'population', + ), + name: citiesTable.name, + }).from(citiesTable); + + expectTypeOf(res).toEqualTypeOf<{ + population: number; + name: string; + }[]>(); + + expect(res).toStrictEqual([{ + population: 1, + name: 'Paris', + }, { + population: 2, + name: 'London', + }]); + }); + + test('select from a one subquery', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) + .as( + 'cityName', + ), + name: users2Table.name, + }).from(users2Table); + + expectTypeOf(res).toEqualTypeOf<{ + cityName: string; + name: string; + }[]>(); + + expect(res).toStrictEqual([{ + cityName: 'Paris', + name: 'John', + }, { + cityName: 'London', + name: 'Jane', + }, { + cityName: 'London', + name: 'Jack', + }]); + }); + + test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { + const { db } = ctx.cockroach; + + const users = cockroachTable('users_on_update', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + name: text('name').notNull(), + updatedAt: timestamp('updated_at', { mode: 'date' }).notNull().$onUpdate(() => sql`now()`), + }); + + await db.execute(sql`DROP TABLE IF EXISTS ${users}`); + await db.execute(sql`CREATE TABLE ${users} ( + id INT4 PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, + name TEXT NOT NULL, + updated_at TIMESTAMPTZ NOT NULL + );`); + + const insertResp = await db.insert(users).values({ + name: 'John', + }).returning({ + updatedAt: users.updatedAt, + }); + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const now = Date.now(); + await new Promise((resolve) => setTimeout(resolve, 1000)); + const updateResp = await db.update(users).set({ + name: 'John', + }).returning({ + updatedAt: users.updatedAt, + }); + + expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); + expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); + }); + test('all types', async (ctx) => { - const { db } = ctx.pg; + const { db } = ctx.cockroach; await db.execute(sql`CREATE TYPE "public"."en" AS ENUM('enVal1', 'enVal2');`); await db.execute(sql` CREATE TABLE "all_types" ( - "serial" serial NOT NULL, - "bigserial53" bigserial NOT NULL, - "bigserial64" bigserial, - "int" integer, - "bigint53" bigint, + "int4" int4 NOT NULL, + "bigint53" bigint NOT NULL, "bigint64" bigint, - "bigint_string" bigint, - "bool" boolean, - "bytea" bytea, + "bool" bool, + "boolean" bool, "char" char, - "cidr" "cidr", "date" date, "date_str" date, + "string" string, "double" double precision, + "float" float, "enum" "en", "inet" "inet", "interval" interval, - "json" json, "jsonb" jsonb, - "line" "line", - "line_tuple" "line", - "macaddr" "macaddr", - "macaddr8" "macaddr8", "numeric" numeric, "numeric_num" numeric, "numeric_big" numeric, - "point" "point", - "point_tuple" "point", "real" real, "smallint" smallint, - "smallserial" "smallserial" NOT NULL, "text" text, "time" time, "timestamp" timestamp, @@ -6232,31 +6190,20 @@ export function tests() { "timestamp_tz_str" timestamp with time zone, "uuid" uuid, "varchar" varchar, - "arrint" integer[], + "arrint" int4[], "arrbigint53" bigint[], "arrbigint64" bigint[], - "arrbigint_string" bigint[], - "arrbool" boolean[], - "arrbytea" bytea[], + "arrbool" bool[], "arrchar" char[], - "arrcidr" "cidr"[], "arrdate" date[], "arrdate_str" date[], "arrdouble" double precision[], "arrenum" "en"[], "arrinet" "inet"[], "arrinterval" interval[], - "arrjson" json[], - "arrjsonb" jsonb[], - "arrline" "line"[], - "arrline_tuple" "line"[], - "arrmacaddr" "macaddr"[], - "arrmacaddr8" "macaddr8"[], "arrnumeric" numeric[], "arrnumeric_num" numeric[], "arrnumeric_big" numeric[], - "arrpoint" "point"[], - "arrpoint_tuple" "point"[], "arrreal" real[], "arrsmallint" smallint[], "arrtext" text[], @@ -6266,55 +6213,38 @@ export function tests() { "arrtimestamp_str" timestamp[], "arrtimestamp_tz_str" timestamp with time zone[], "arruuid" uuid[], - "arrvarchar" varchar[] + "arrstring" string[], + "arrfloat" float[], + "arrvarchar" varchar[], + "bit" bit, + "varbit" varbit, + "arrbit" bit[], + "arrvarbit" varbit[], + "arrboolean" bool[] ); `); await db.insert(allTypesTable).values({ - serial: 1, - smallserial: 15, + int4: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - bigserial53: 9007199254740991, - bigserial64: 5044565289845416380n, bool: true, - bytea: Buffer.from('BYTES'), char: 'c', - cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', - inet: '192.168.0.1/24', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', date: new Date(1741743161623), dateStr: new Date(1741743161623).toISOString(), double: 15.35325689124218, enum: 'enVal1', - int: 621, - interval: '2 months ago', - json: { - str: 'strval', - arr: ['str', 10], - }, + inet: '192.168.0.1/24', + interval: '-2 months', jsonb: { str: 'strvalb', arr: ['strb', 11], }, - line: { - a: 1, - b: 2, - c: 3, - }, - lineTuple: [1, 2, 3], numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, - point: { - x: 24.5, - y: 49.6, - }, - pointTuple: [57.2, 94.3], real: 1.048596, - smallint: 10, + smallint: 15, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date(1741743161623), @@ -6325,42 +6255,18 @@ export function tests() { varchar: 'C4-', arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], - arrbigintString: ['5044565289845416380'], arrbool: [true], - arrbytea: [Buffer.from('BYTES')], arrchar: ['c'], - arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrinet: ['192.168.0.1/24'], - arrmacaddr: ['08:00:2b:01:02:03'], - arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrdate: [new Date(1741743161623)], arrdateStr: [new Date(1741743161623).toISOString()], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrint: [621], - arrinterval: ['2 months ago'], - arrjson: [{ - str: 'strval', - arr: ['str', 10], - }], - arrjsonb: [{ - str: 'strvalb', - arr: ['strb', 11], - }], - arrline: [{ - a: 1, - b: 2, - c: 3, - }], - arrlineTuple: [[1, 2, 3]], + arrinterval: ['-2 months'], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], - arrpoint: [{ - x: 24.5, - y: 49.6, - }], - arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], @@ -6371,49 +6277,38 @@ export function tests() { arrtimestampTzStr: [new Date(1741743161623).toISOString()], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], + string: 'TEXT STRING', + arrfloat: [1.12, 1.13], + arrstring: ['TEXT STRING', 'TEXT STRING1'], + float: 1.12, + arrbit: ['1'], + arrvarbit: ['1'], + arrboolean: [true, false], + boolean: true, + varbit: '1', + bit: '1', }); const rawRes = await db.select().from(allTypesTable); type ExpectedType = { - serial: number; - bigserial53: number; - bigserial64: bigint; - int: number | null; + int4: number | null; bigint53: number | null; bigint64: bigint | null; - bigintString: string | null; bool: boolean | null; - bytea: Buffer | null; char: string | null; - cidr: string | null; date: Date | null; dateStr: string | null; double: number | null; enum: 'enVal1' | 'enVal2' | null; inet: string | null; interval: string | null; - json: unknown; jsonb: unknown; - line: { - a: number; - b: number; - c: number; - } | null; - lineTuple: [number, number, number] | null; - macaddr: string | null; - macaddr8: string | null; numeric: string | null; numericNum: number | null; numericBig: bigint | null; - point: { - x: number; - y: number; - } | null; - pointTuple: [number, number] | null; real: number | null; smallint: number | null; - smallserial: number; text: string | null; time: string | null; timestamp: Date | null; @@ -6425,32 +6320,17 @@ export function tests() { arrint: number[] | null; arrbigint53: number[] | null; arrbigint64: bigint[] | null; - arrbigintString: string[] | null; arrbool: boolean[] | null; - arrbytea: Buffer[] | null; arrchar: string[] | null; - arrcidr: string[] | null; arrdate: Date[] | null; arrdateStr: string[] | null; arrdouble: number[] | null; arrenum: ('enVal1' | 'enVal2')[] | null; arrinet: string[] | null; arrinterval: string[] | null; - arrjson: unknown[] | null; - arrjsonb: unknown[] | null; - arrline: { - a: number; - b: number; - c: number; - }[] | null; - arrlineTuple: [number, number, number][] | null; - arrmacaddr: string[] | null; - arrmacaddr8: string[] | null; arrnumeric: string[] | null; arrnumericNum: number[] | null; arrnumericBig: bigint[] | null; - arrpoint: { x: number; y: number }[] | null; - arrpointTuple: [number, number][] | null; arrreal: number[] | null; arrsmallint: number[] | null; arrtext: string[] | null; @@ -6461,41 +6341,37 @@ export function tests() { arrtimestampTzStr: string[] | null; arruuid: string[] | null; arrvarchar: string[] | null; + string: string | null; + arrfloat: number[] | null; + arrstring: string[] | null; + float: number | null; + arrbit: string[] | null; + arrvarbit: string[] | null; + arrboolean: boolean[] | null; + boolean: boolean | null; + varbit: string | null; + bit: string | null; }[]; const expectedRes: ExpectedType = [ { - serial: 1, - bigserial53: 9007199254740991, - bigserial64: 5044565289845416380n, - int: 621, + int4: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', bool: true, - bytea: Buffer.from('BYTES'), char: 'c', - cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', date: new Date('2025-03-12T00:00:00.000Z'), dateStr: '2025-03-12', double: 15.35325689124218, enum: 'enVal1', inet: '192.168.0.1/24', interval: '-2 mons', - json: { str: 'strval', arr: ['str', 10] }, jsonb: { arr: ['strb', 11], str: 'strvalb' }, - line: { a: 1, b: 2, c: 3 }, - lineTuple: [1, 2, 3], - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, - point: { x: 24.5, y: 49.6 }, - pointTuple: [57.2, 94.3], real: 1.048596, - smallint: 10, - smallserial: 15, + smallint: 15, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date('2025-03-12T01:32:41.623Z'), @@ -6507,28 +6383,17 @@ export function tests() { arrint: [621], arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], - arrbigintString: ['5044565289845416380'], arrbool: [true], - arrbytea: [Buffer.from('BYTES')], arrchar: ['c'], - arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrdate: [new Date('2025-03-12T00:00:00.000Z')], arrdateStr: ['2025-03-12'], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrinet: ['192.168.0.1/24'], arrinterval: ['-2 mons'], - arrjson: [{ str: 'strval', arr: ['str', 10] }], - arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], - arrline: [{ a: 1, b: 2, c: 3 }], - arrlineTuple: [[1, 2, 3]], - arrmacaddr: ['08:00:2b:01:02:03'], - arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], - arrpoint: [{ x: 24.5, y: 49.6 }], - arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], @@ -6539,6 +6404,16 @@ export function tests() { arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], + arrfloat: [1.12, 1.13], + arrstring: ['TEXT STRING', 'TEXT STRING1'], + float: 1.12, + string: 'TEXT STRING', + arrbit: ['1'], + arrboolean: [true, false], + arrvarbit: ['1'], + bit: '1', + boolean: true, + varbit: '1', }, ]; @@ -6546,634 +6421,22 @@ export function tests() { expect(rawRes).toStrictEqual(expectedRes); }); - test('RQB v2 simple find first - no rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - multiple rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - with relation', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - placeholders', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_first_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - no rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - multiple rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); + test('generated always columns', async (ctx) => { + const { db } = ctx.cockroach; - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - with relation', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - placeholders', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_many_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - no rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - multiple rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - with relation', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - placeholders', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_first_tx_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - no rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - multiple rows', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - with relation', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - placeholders', async (ctx) => { - const { db } = ctx.pg; - try { - await init(db); - - const date = new Date(120000); + await db.execute(sql` + CREATE TABLE "gen_columns" ( + id int4, + gen1 int4 generated always as (1) stored + ); + `); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_many_placeholders'); - - const result = await query.execute({ - filter: 2, - }); + const genColumns = cockroachTable('gen_columns', { + id: int4(), + gen1: int4().generatedAlwaysAs(1), + }); - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - } finally { - await clear(db); - } + expect(db.insert(genColumns).values({ id: 1 })).resolves; }); }); } diff --git a/integration-tests/tests/cockroach/custom.test.ts b/integration-tests/tests/cockroach/custom.test.ts new file mode 100644 index 0000000000..50e6060c40 --- /dev/null +++ b/integration-tests/tests/cockroach/custom.test.ts @@ -0,0 +1,826 @@ +import retry from 'async-retry'; +import type Docker from 'dockerode'; +import { asc, eq, sql } from 'drizzle-orm'; +import type { NodeCockroachDatabase } from 'drizzle-orm/cockroach'; +import { drizzle } from 'drizzle-orm/cockroach'; +import { alias, cockroachTable, cockroachTableCreator, customType, int4, text } from 'drizzle-orm/cockroach-core'; +import { migrate } from 'drizzle-orm/cockroach/migrator'; +import { Client } from 'pg'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { randomString } from '~/utils'; +import { createDockerDB } from './common'; + +const ENABLE_LOGGING = false; + +let db: NodeCockroachDatabase; +let client: Client; +let container: Docker.Container | undefined; + +beforeAll(async () => { + let connectionString; + if (process.env['COCKROACH_CONNECTION_STRING']) { + connectionString = process.env['COCKROACH_CONNECTION_STRING']; + } else { + const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); + connectionString = conStr; + container = contrainerObj; + } + client = await retry(async () => { + client = new Client(connectionString); + await client.connect(); + return client; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle({ client, logger: ENABLE_LOGGING }); +}); + +afterAll(async () => { + await client?.end(); + await container?.stop().catch(console.error); +}); + +beforeEach((ctx) => { + ctx.cockroach = { + db, + }; +}); + +const customInt = customType<{ data: number; notNull: true; default: true; driverData: number | string }>({ + dataType() { + return 'integer'; + }, + fromDriver(value: number | string): number { + if (typeof value === 'string') { + return Number.parseInt(value); + } + + return value; + }, +}); + +const customText = customType<{ data: string }>({ + dataType() { + return 'text'; + }, +}); + +const customBoolean = customType<{ data: boolean }>({ + dataType() { + return 'boolean'; + }, +}); + +const customJsonb = (name: string) => + customType<{ data: TData; driverData: string }>({ + dataType() { + return 'jsonb'; + }, + toDriver(value: TData): string { + return JSON.stringify(value); + }, + })(name); + +const customTimestamp = customType< + { data: Date; driverData: string; config: { withTimezone: boolean; precision?: number } } +>({ + dataType(config) { + const precision = config?.precision === undefined ? '' : ` (${config.precision})`; + return `timestamp${precision}${config?.withTimezone ? ' with time zone' : ''}`; + }, + fromDriver(value: string): Date { + return new Date(value); + }, +}); + +const usersTable = cockroachTable('users', { + id: customInt('id').primaryKey(), // generated + name: customText('name').notNull(), + verified: customBoolean('verified').notNull().default(false), + jsonb: customJsonb('jsonb'), + createdAt: customTimestamp('created_at', { withTimezone: true }).notNull().default(sql`now()`), +}); + +const usersMigratorTable = cockroachTable('users12', { + id: int4('id').primaryKey().generatedAlwaysAsIdentity(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +beforeEach(async (ctx) => { + const { db } = ctx.cockroach; + await db.execute(sql`drop database defaultdb;`); + await db.execute(sql`create database defaultdb;`); + await db.execute( + sql` + create table users ( + id integer primary key generated by default as identity, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); +}); + +test('select all fields', async (ctx) => { + const { db } = ctx.cockroach; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + + expect(result).toEqual([{ + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); +}); + +test('select sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('insert returning sql', async (ctx) => { + const { db } = ctx.cockroach; + + const users = await db.insert(usersTable).values({ name: 'John' }).returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('delete returning sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('update returning sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JANE' }]); +}); + +test('update with returning all fields', async (ctx) => { + const { db } = ctx.cockroach; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + + expect(users).toEqual([{ + id: 1, + name: 'Jane', + verified: false, + jsonb: null, + createdAt: users[0]!.createdAt, + }]); +}); + +test('update with returning partial', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ + name: usersTable.name, + }); + + expect(users).toEqual([{ name: 'Jane' }]); +}); + +test('delete with returning all fields', async (ctx) => { + const { db } = ctx.cockroach; + + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + + expect(users).toEqual([{ + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: users[0]!.createdAt, + }]); +}); + +test('delete with returning partial', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + name: usersTable.name, + }); + + expect(users).toEqual([{ name: 'John' }]); +}); + +test('insert + select', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ + id: 1, + name: 'John', + verified: true, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); +}); + +test('insert many', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { name: 'John', jsonb: null, verified: false }, + { name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { name: 'Jane', jsonb: null, verified: false }, + { name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async (ctx) => { + const { db } = ctx.cockroach; + + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) + .returning({ + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }); + + expect(result).toEqual([ + { name: 'John', jsonb: null, verified: false }, + { name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { name: 'Jane', jsonb: null, verified: false }, + { name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('select with group by as field', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test('select with group by as sql + column', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async (ctx) => { + const { db } = ctx.cockroach; + + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', + params: [], + }); +}); + +test('insert sql', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async (ctx) => { + const { db } = ctx.cockroach; + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('full join with alias', async (ctx) => { + const { db } = ctx.cockroach; + + const cockroachTable = cockroachTableCreator((name) => `prefixed_${name}`); + + const users = cockroachTable('users', { + id: int4('id').primaryKey().generatedByDefaultAsIdentity(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute( + sql`create table ${users} (id int4 generated by default as identity primary key, name text not null)`, + ); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ name: 'Jo h n' }]); +}); + +test('prepared statement', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ name: 'John' }); + const statement = db.select({ + name: usersTable.name, + }).from(usersTable) + .prepare('statement1'); + const result = await statement.execute(); + + expect(result).toEqual([{ name: 'John' }]); +}); + +test('prepared statement reuse', async (ctx) => { + const { db } = ctx.cockroach; + + const stmt = db.insert(usersTable).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare('stmt2'); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { name: 'John 0', verified: true }, + { name: 'John 1', verified: true }, + { name: 'John 2', verified: true }, + { name: 'John 3', verified: true }, + { name: 'John 4', verified: true }, + { name: 'John 5', verified: true }, + { name: 'John 6', verified: true }, + { name: 'John 7', verified: true }, + { name: 'John 8', verified: true }, + { name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare('stmt3'); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement with placeholder in .limit', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John2' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); +}); + +test('prepared statement with placeholder in .offset', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .offset(sql.placeholder('offset')) + .prepare('stmt_offset'); + + const result = await stmt.execute({ offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); +}); + +test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/cockroach' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom schema', async () => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/cockroach', migrationsSchema: customSchema }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(rowCount! > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); +}); + +test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { migrationsFolder: './drizzle2/cockroach', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount! > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +}); + +test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/cockroach', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(rowCount! > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(result.rows).toEqual([{ id: '1', name: 'John' }]); +}); + +test('insert via db.execute + returning', async () => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier(usersTable.name.name) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: '1', name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: '1', name: 'John' }]); +}); + +test('build query insert with onConflict do update', async (ctx) => { + const { db } = ctx.cockroach; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('build query insert with onConflict do update / multiple columns', async (ctx) => { + const { db } = ctx.cockroach; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('build query insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.cockroach; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing() + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('build query insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.cockroach; + + const query = db.insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('insert with onConflict do update', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert with onConflict do nothing', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing(); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert with onConflict do nothing + target', async (ctx) => { + const { db } = ctx.cockroach; + + await db.insert(usersTable) + .values({ name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing({ target: usersTable.id }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); diff --git a/integration-tests/tests/extensions/postgis/pg.test.ts b/integration-tests/tests/extensions/postgis/pg.test.ts index 7daec5c760..a0a61447c0 100644 --- a/integration-tests/tests/extensions/postgis/pg.test.ts +++ b/integration-tests/tests/extensions/postgis/pg.test.ts @@ -1,19 +1,14 @@ -import Docker from 'dockerode'; import { defineRelations, sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { bigserial, geometry, line, pgTable, point } from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; import pg from 'pg'; -import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const { Client } = pg; const ENABLE_LOGGING = false; -let pgContainer: Docker.Container; -let docker: Docker; let client: pg.Client; let db: NodePgDatabase; @@ -37,66 +32,19 @@ const relations = defineRelations({ items }, (r) => ({ }, })); -async function createDockerDB(): Promise { - const inDocker = (docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgis/postgis:16-3.4'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - beforeAll(async () => { - const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = new Client(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING']; + if (!connectionString) throw new Error('PG_POSTGIS_CONNECTION_STRING is not set in env variables'); + + client = new Client(connectionString); + await client.connect(); + db = drizzle({ client, logger: ENABLE_LOGGING, relations }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS postgis;`); }); afterAll(async () => { await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { diff --git a/integration-tests/tests/extensions/postgis/postgres.test.ts b/integration-tests/tests/extensions/postgis/postgres.test.ts index 6dfd206045..462f5939cd 100644 --- a/integration-tests/tests/extensions/postgis/postgres.test.ts +++ b/integration-tests/tests/extensions/postgis/postgres.test.ts @@ -1,16 +1,11 @@ -import Docker from 'dockerode'; import { defineRelations, sql } from 'drizzle-orm'; import { bigserial, geometry, line, pgTable, point } from 'drizzle-orm/pg-core'; import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'; -import getPort from 'get-port'; import postgres, { type Sql } from 'postgres'; -import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const ENABLE_LOGGING = false; -let pgContainer: Docker.Container; -let docker: Docker; let client: Sql; let db: PostgresJsDatabase; @@ -34,71 +29,24 @@ const relations = defineRelations({ items }, (r) => ({ }, })); -async function createDockerDB(): Promise { - const inDocker = (docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgis/postgis:16-3.4'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, +beforeAll(async () => { + const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING']; + if (!connectionString) throw new Error('PG_POSTGIS_CONNECTION_STRING is not set in env variables'); + + client = postgres(connectionString, { + max: 1, + onnotice: () => { + // disable notices }, }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -beforeAll(async () => { - const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 1000; - let timeLeft = 20000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = postgres(connectionString, { - max: 1, - onnotice: () => { - // disable notices - }, - }); - await client`select 1`; - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + await client`select 1`; + db = drizzle({ client, logger: ENABLE_LOGGING, relations }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS postgis;`); }); afterAll(async () => { await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { diff --git a/integration-tests/tests/extensions/vectors/pg.test.ts b/integration-tests/tests/extensions/vectors/pg.test.ts index bd4782d977..52dcb9f147 100644 --- a/integration-tests/tests/extensions/vectors/pg.test.ts +++ b/integration-tests/tests/extensions/vectors/pg.test.ts @@ -1,19 +1,14 @@ -import Docker from 'dockerode'; import { defineRelations, eq, hammingDistance, jaccardDistance, l2Distance, not, sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { bigserial, bit, halfvec, pgTable, sparsevec, vector } from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; import pg from 'pg'; -import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const { Client } = pg; const ENABLE_LOGGING = false; -let pgContainer: Docker.Container; -let docker: Docker; let client: pg.Client; let db: NodePgDatabase; @@ -34,66 +29,19 @@ const relations = defineRelations({ items }, (r) => ({ }, })); -async function createDockerDB(): Promise { - const inDocker = (docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'pgvector/pgvector:pg16'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - beforeAll(async () => { - const connectionString = process.env['PG_VECTOR_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = new Client(connectionString); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + const connectionString = process.env['PG_VECTOR_CONNECTION_STRING']; + if (!connectionString) throw new Error('PG_VECTOR_CONNECTION_STRING is not set in env variables'); + + client = new Client(connectionString); + await client.connect(); + db = drizzle({ client, logger: ENABLE_LOGGING, relations }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS vector;`); }); afterAll(async () => { await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { diff --git a/integration-tests/tests/extensions/vectors/postgres.test.ts b/integration-tests/tests/extensions/vectors/postgres.test.ts index 41483f149c..77d3d8f64c 100644 --- a/integration-tests/tests/extensions/vectors/postgres.test.ts +++ b/integration-tests/tests/extensions/vectors/postgres.test.ts @@ -1,16 +1,11 @@ -import Docker from 'dockerode'; import { defineRelations, eq, hammingDistance, jaccardDistance, l2Distance, not, sql } from 'drizzle-orm'; import { bigserial, bit, halfvec, pgTable, sparsevec, vector } from 'drizzle-orm/pg-core'; import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'; -import getPort from 'get-port'; import postgres, { type Sql } from 'postgres'; -import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const ENABLE_LOGGING = false; -let pgContainer: Docker.Container; -let docker: Docker; let client: Sql; let db: PostgresJsDatabase; @@ -31,71 +26,24 @@ const relations = defineRelations({ items }, (r) => ({ }, })); -async function createDockerDB(): Promise { - const inDocker = (docker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'pgvector/pgvector:pg16'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, +beforeAll(async () => { + const connectionString = process.env['PG_VECTOR_CONNECTION_STRING']; + if (!connectionString) throw new Error('PG_VECTOR_CONNECTION_STRING is not set in env variables'); + + client = postgres(connectionString, { + max: 1, + onnotice: () => { + // disable notices }, }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -beforeAll(async () => { - const connectionString = process.env['PG_VECTOR_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = postgres(connectionString, { - max: 1, - onnotice: () => { - // disable notices - }, - }); - await client`select 1`; - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + await client`select 1`; + db = drizzle({ client, logger: ENABLE_LOGGING, relations }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS vector;`); }); afterAll(async () => { await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { diff --git a/integration-tests/tests/gel/createInstance.ts b/integration-tests/tests/gel/createInstance.ts deleted file mode 100644 index 6cc03e9476..0000000000 --- a/integration-tests/tests/gel/createInstance.ts +++ /dev/null @@ -1,36 +0,0 @@ -import Docker from 'dockerode'; -import getPort from 'get-port'; -import { v4 as uuidV4 } from 'uuid'; -import 'zx/globals'; - -export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { - const docker = new Docker(); - const port = await getPort({ port: 5656 }); - const image = 'geldata/gel:6'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - const gelContainer = await docker.createContainer({ - Image: image, - Env: [ - 'GEL_CLIENT_SECURITY=insecure_dev_mode', - 'GEL_SERVER_SECURITY=insecure_dev_mode', - 'GEL_CLIENT_TLS_SECURITY=no_host_verification', - 'GEL_SERVER_PASSWORD=password', - ], - name: `drizzle-integration-tests-${uuidV4()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5656/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await gelContainer.start(); - - return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; -} diff --git a/integration-tests/tests/gel/gel-custom.test.ts b/integration-tests/tests/gel/gel-custom.test.ts index a7b9476605..ab46f40bc3 100644 --- a/integration-tests/tests/gel/gel-custom.test.ts +++ b/integration-tests/tests/gel/gel-custom.test.ts @@ -1,56 +1,27 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; import { asc, eq, sql } from 'drizzle-orm'; import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; import { alias, customType, gelTable, gelTableCreator } from 'drizzle-orm/gel-core'; -import * as gel from 'gel'; +import createClient, { type Client } from 'gel'; import { afterAll, afterEach, beforeAll, beforeEach, expect, test } from 'vitest'; -import { createDockerDB } from './createInstance'; import 'zx/globals'; import relations from './relations'; -$.quiet = true; - -const ENABLE_LOGGING = false; - let db: GelJsDatabase; -let client: gel.Client; -let container: Docker.Container | undefined; +let client: Client; let dsn: string; -const tlsSecurity = '--tls-security=insecure'; - -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} +const tlsSecurity = 'insecure'; beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - await sleep(15 * 1000); - client = await retry(async () => { - client = gel.createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + const connectionString = process.env['GEL_CONNECTION_STRING']; + if (!connectionString) throw new Error('gel GEL_CONNECTION_STRING is not set. '); + + client = createClient({ dsn: connectionString, tlsSecurity }); + db = drizzle({ client, relations }); dsn = connectionString; - await $`gel query "CREATE TYPE default::users_custom { + await $`gel query "reset schema to initial ; + CREATE TYPE default::users_custom { create property id1: int16 { create constraint exclusive; }; @@ -59,22 +30,18 @@ beforeAll(async () => { SET default := false; }; create property json: json; - };" ${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::prefixed_users_custom { + }; + CREATE TYPE default::prefixed_users_custom { create property id1: int16 { create constraint exclusive; }; create required property name: str; -};" ${tlsSecurity} --dsn=${dsn}`; + }; + " --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterAll(async () => { - await $`gel query "DROP TYPE default::users_custom;" ${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::prefixed_users_custom;" ${tlsSecurity} --dsn=${dsn}`; - await client?.close(); - await container?.stop().catch(console.error); }); beforeEach((ctx) => { @@ -84,8 +51,10 @@ beforeEach((ctx) => { }); afterEach(async () => { - await $`gel query "DELETE default::users_custom;" ${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::prefixed_users_custom;" ${tlsSecurity} --dsn=${dsn}`; + await Promise.all([ + client.querySQL(`DELETE FROM "users_custom";`), + client.querySQL(`DELETE FROM "prefixed_users_custom";`), + ]); }); const customInteger = customType<{ data: number; notNull: false; default: false }>({ diff --git a/integration-tests/tests/gel/gel-ext.test.ts b/integration-tests/tests/gel/gel-ext.test.ts index c1829c77cd..fef6af6583 100644 --- a/integration-tests/tests/gel/gel-ext.test.ts +++ b/integration-tests/tests/gel/gel-ext.test.ts @@ -1,5 +1,3 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { relations } from 'drizzle-orm/_relations'; import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; @@ -7,11 +5,6 @@ import { foreignKey, gelSchema, gelTable, text, timestamptz, uniqueIndex, uuid } import createClient, { type Client } from 'gel'; import { afterAll, afterEach, beforeAll, describe, expect, test } from 'vitest'; import 'zx/globals'; -import { createDockerDB } from './createInstance'; - -$.quiet = true; - -const ENABLE_LOGGING = false; export const extauth = gelSchema('ext::auth'); @@ -55,48 +48,25 @@ let client: Client; let db: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; -let container: Docker.Container | undefined; - -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - await sleep(15 * 1000); - client = await retry(() => { - client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, schema: { user, identityInExtauth, userRelations } }); + const connectionString = process.env['GEL_CONNECTION_STRING']; + if (!connectionString) throw new Error('gel GEL_CONNECTION_STRING is not set. '); + + client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); + db = drizzle({ client, schema: { user, identityInExtauth, userRelations } }); dsn = connectionString; }); afterAll(async () => { await client?.close().catch(console.error); - await container?.stop().catch(console.error); }); describe('extensions tests group', async () => { beforeAll(async () => { - await $`gel query 'CREATE EXTENSION pgcrypto VERSION "1.3"; + await $`gel query 'reset schema to initial ; + CREATE EXTENSION pgcrypto VERSION "1.3"; CREATE EXTENSION auth VERSION "1.0"; CREATE TYPE default::User { CREATE REQUIRED LINK identity: ext::auth::Identity; @@ -115,7 +85,7 @@ describe('extensions tests group', async () => { }); afterEach(async () => { - await $`gel query "DELETE default::User;" --tls-security=${tlsSecurity} --dsn=${dsn}`; + await client.querySQL(`DELETE FROM "User";`); }); test('check that you can query from ext::auth schema in gel', async () => { diff --git a/integration-tests/tests/relational/gel.relations.ts b/integration-tests/tests/gel/gel.relations.ts similarity index 98% rename from integration-tests/tests/relational/gel.relations.ts rename to integration-tests/tests/gel/gel.relations.ts index 85cc4e0861..c9e613b32d 100644 --- a/integration-tests/tests/relational/gel.relations.ts +++ b/integration-tests/tests/gel/gel.relations.ts @@ -1,5 +1,5 @@ import { defineRelations } from 'drizzle-orm'; -import * as schema from './gel.schema.ts'; +import * as schema from './gel.schema'; export default defineRelations(schema, (r) => ({ usersView: { diff --git a/integration-tests/tests/relational/gel.test.ts b/integration-tests/tests/gel/gel.rels.test.ts similarity index 96% rename from integration-tests/tests/relational/gel.test.ts rename to integration-tests/tests/gel/gel.rels.test.ts index 57cade01be..c8de68c7c0 100644 --- a/integration-tests/tests/relational/gel.test.ts +++ b/integration-tests/tests/gel/gel.rels.test.ts @@ -5,14 +5,9 @@ import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; import { alias } from 'drizzle-orm/gel-core'; import createClient, { type Client } from 'gel'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './gel.relations.ts'; -import * as schema from './gel.schema.ts'; -import 'zx'; -import retry from 'async-retry'; -import { createDockerDB } from '~/gel/createInstance.ts'; - -$.quiet = true; -const ENABLE_LOGGING = false; +import relations from './gel.relations'; +import * as schema from './gel.schema'; +import 'zx/globals'; const { usersTable, @@ -35,42 +30,24 @@ declare module 'vitest' { } } -let globalDocker: Docker | undefined; -let gelContainer: Docker.Container; +let globalDocker: Docker | undefined; // oxlint-disable-line no-unassigned-vars let client: Client; let db: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - gelContainer = contrainerObj; - } - await sleep(15 * 1000); - client = await retry(() => { - client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations, casing: 'snake_case' }); + const connectionString = process.env['GEL_CONNECTION_STRING']; + if (!connectionString) throw new Error('gel GEL_CONNECTION_STRING is not set. '); + + client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); + db = drizzle({ client, relations, casing: 'snake_case' }); dsn = connectionString; - await $`gel query "CREATE SCALAR TYPE default::users_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::users { + await $`gel query 'reset schema to initial ; + CREATE SCALAR TYPE default::users_id EXTENDING sequence; + CREATE TYPE default::users { create property custom_id: default::users_id { create constraint exclusive; }; @@ -79,28 +56,25 @@ beforeAll(async () => { SET default := false; }; create PROPERTY invited_by: int64; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE default::groups_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::groups { + }; + CREATE SCALAR TYPE default::groups_id EXTENDING sequence; + CREATE TYPE default::groups { create property custom_id: default::groups_id { create constraint exclusive; }; create required property name: str; create property description: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE default::users_to_groups_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::users_to_groups { + }; + CREATE SCALAR TYPE default::users_to_groups_id EXTENDING sequence; + CREATE TYPE default::users_to_groups { create property custom_id: default::users_to_groups_id { create constraint exclusive; }; create required property user_id: int32; create required property group_id: int32; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE default::posts_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::posts { + }; + CREATE SCALAR TYPE default::posts_id EXTENDING sequence; + CREATE TYPE default::posts { create property custom_id: default::posts_id { create constraint exclusive; }; @@ -109,10 +83,9 @@ beforeAll(async () => { create required property created_at: datetime { SET default := datetime_of_statement(); }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE default::comments_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::comments { + }; + CREATE SCALAR TYPE default::comments_id EXTENDING sequence; + CREATE TYPE default::comments { create property custom_id: default::comments_id { create constraint exclusive; }; @@ -122,10 +95,9 @@ beforeAll(async () => { create required property created_at: datetime { SET default := datetime_of_statement(); }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE default::comment_likes_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::comment_likes { + }; + CREATE SCALAR TYPE default::comment_likes_id EXTENDING sequence; + CREATE TYPE default::comment_likes { create property custom_id: default::comment_likes_id { create constraint exclusive; }; @@ -134,12 +106,10 @@ beforeAll(async () => { create required property created_at: datetime { SET default := datetime_of_statement(); }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE MODULE rqb_test_schema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE rqb_test_schema::users_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE rqb_test_schema::users { + }; + CREATE MODULE rqb_test_schema; + CREATE SCALAR TYPE rqb_test_schema::users_id EXTENDING sequence; + CREATE TYPE rqb_test_schema::users { create property custom_id: rqb_test_schema::users_id { create constraint exclusive; }; @@ -148,28 +118,25 @@ beforeAll(async () => { SET default := false; }; create PROPERTY invited_by: int64; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE rqb_test_schema::groups_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE rqb_test_schema::groups { + }; + CREATE SCALAR TYPE rqb_test_schema::groups_id EXTENDING sequence; + CREATE TYPE rqb_test_schema::groups { create property custom_id: rqb_test_schema::groups_id { create constraint exclusive; }; create required property name: str; create property description: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE rqb_test_schema::users_to_groups_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE rqb_test_schema::users_to_groups { + }; + CREATE SCALAR TYPE rqb_test_schema::users_to_groups_id EXTENDING sequence; + CREATE TYPE rqb_test_schema::users_to_groups { create property custom_id: rqb_test_schema::users_to_groups_id { create constraint exclusive; }; create required property user_id: int32; create required property group_id: int32; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE SCALAR TYPE rqb_test_schema::posts_id EXTENDING sequence;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE rqb_test_schema::posts { + }; + CREATE SCALAR TYPE rqb_test_schema::posts_id EXTENDING sequence; + CREATE TYPE rqb_test_schema::posts { create property custom_id: rqb_test_schema::posts_id { create constraint exclusive; }; @@ -178,65 +145,40 @@ beforeAll(async () => { create required property created_at: datetime { SET default := datetime_of_statement(); }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; + }; + ' --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterAll(async () => { - await $`gel query "DROP TYPE default::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_to_groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::posts;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::comments;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::comment_likes;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE rqb_test_schema::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE rqb_test_schema::users_to_groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE rqb_test_schema::groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE rqb_test_schema::posts;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "DROP SCALAR TYPE default::users_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE default::groups_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE default::users_to_groups_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE default::posts_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE default::comments_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE default::comment_likes_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE rqb_test_schema::users_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE rqb_test_schema::groups_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE rqb_test_schema::users_to_groups_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP SCALAR TYPE rqb_test_schema::posts_id;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "DROP MODULE rqb_test_schema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await client?.close().catch(console.error); - await gelContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { ctx.geljsDb = db; ctx.gelClient = client; ctx.docker = globalDocker!; - ctx.gelContainer = gelContainer; - - await $`gel query "SELECT sequence_reset(introspect default::users_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect default::groups_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect default::users_to_groups_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect default::posts_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect default::comments_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect default::comment_likes_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect rqb_test_schema::users_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect rqb_test_schema::groups_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect rqb_test_schema::users_to_groups_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "SELECT sequence_reset(introspect rqb_test_schema::posts_id);" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "DELETE default::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::users_to_groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::posts;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::comments;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::comment_likes;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE rqb_test_schema::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE rqb_test_schema::users_to_groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE rqb_test_schema::groups;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE rqb_test_schema::posts;" --tls-security=${tlsSecurity} --dsn=${dsn}`; + + await $`gel query "SELECT sequence_reset(introspect default::users_id); + SELECT sequence_reset(introspect default::groups_id); + SELECT sequence_reset(introspect default::users_to_groups_id); + SELECT sequence_reset(introspect default::posts_id); + SELECT sequence_reset(introspect default::comments_id); + SELECT sequence_reset(introspect default::comment_likes_id); + SELECT sequence_reset(introspect rqb_test_schema::users_id); + SELECT sequence_reset(introspect rqb_test_schema::groups_id); + SELECT sequence_reset(introspect rqb_test_schema::users_to_groups_id); + SELECT sequence_reset(introspect rqb_test_schema::posts_id); + DELETE default::users; + DELETE default::users_to_groups; + DELETE default::groups; + DELETE default::posts; + DELETE default::comments; + DELETE default::comment_likes; + DELETE rqb_test_schema::users; + DELETE rqb_test_schema::users_to_groups; + DELETE rqb_test_schema::groups; + DELETE rqb_test_schema::posts; + " --tls-security=${tlsSecurity} --dsn=${dsn}`; }); test('[Find Many] Get users with posts', async (t) => { diff --git a/integration-tests/tests/relational/gel.schema.ts b/integration-tests/tests/gel/gel.schema.ts similarity index 100% rename from integration-tests/tests/relational/gel.schema.ts rename to integration-tests/tests/gel/gel.schema.ts diff --git a/integration-tests/tests/gel/gel.test.ts b/integration-tests/tests/gel/gel.test.ts index a8e18abdb5..1d008b5737 100644 --- a/integration-tests/tests/gel/gel.test.ts +++ b/integration-tests/tests/gel/gel.test.ts @@ -1,5 +1,3 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; import { and, arrayContained, @@ -64,7 +62,6 @@ import { union, unionAll, unique, - uniqueKeyName, uuid as gelUuid, } from 'drizzle-orm/gel-core'; import createClient, { @@ -77,29 +74,25 @@ import createClient, { RelativeDuration, } from 'gel'; import { v4 as uuidV4 } from 'uuid'; -import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, expectTypeOf, test, vi } from 'vitest'; +import { afterEach, beforeAll, beforeEach, describe, expect, expectTypeOf, test, vi } from 'vitest'; import { Expect } from '~/utils'; import 'zx/globals'; import { TestCache, TestGlobalCache } from './cache'; -import { createDockerDB } from './createInstance'; import relations from './relations'; -import { clear, init, rqbPost, rqbUser } from './schema'; +import { rqbPost, rqbUser } from './schema'; $.quiet = true; -const ENABLE_LOGGING = false; - let client: Client; let db: GelJsDatabase; let dbGlobalCached: GelJsDatabase; let cachedDb: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; -let container: Docker.Container | undefined; -function sleep(ms: number) { - return new Promise((resolve) => setTimeout(resolve, ms)); -} +// function sleep(ms: number) { +// return new Promise((resolve) => setTimeout(resolve, ms)); +// } declare module 'vitest' { interface TestContext { @@ -214,44 +207,15 @@ const usersMySchemaTable = mySchema.table('users', { }); beforeAll(async () => { - let connectionString; - if (process.env['GEL_CONNECTION_STRING']) { - connectionString = process.env['GEL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - await sleep(15 * 1000); - client = await retry(() => { - client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestCache(), - }); - dbGlobalCached = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestGlobalCache(), - }); - - dsn = connectionString; -}); + const url = process.env['GEL_CONNECTION_STRING']; + if (!url) throw new Error('GEL_CONNECTION_STRING is not set'); -afterAll(async () => { - await client?.close().catch(console.error); - await container?.stop().catch(console.error); + client = createClient({ dsn: url, tlsSecurity: 'insecure' }); + db = drizzle({ client, relations }); + cachedDb = drizzle({ client, cache: new TestCache() }); + dbGlobalCached = drizzle({ client, cache: new TestGlobalCache() }); + + dsn = url; }); beforeEach((ctx) => { @@ -270,344 +234,279 @@ describe('some', async () => { await ctx.cachedGel.dbGlobalCached.$cache?.invalidate({ tables: 'users' }); }); beforeAll(async () => { + await $`gel database wipe --tls-security=${tlsSecurity} --dsn=${dsn} --non-interactive`; + await $`gel query "CREATE TYPE default::users { - create property id1: int16 { - create constraint exclusive; - }; - create required property name: str; - create required property verified: bool { - SET default := false; - }; - create PROPERTY json: json; - create required property created_at: datetime { - SET default := datetime_of_statement(); - }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::users_with_cities { - create property id1: int16 { - create constraint exclusive; - }; - create required property name: str; - create required property cityId: int32; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::users_with_undefined { - create property id1: int16 { - create constraint exclusive; - }; - create property name: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_insert_select { - create property id1: int16 { - create constraint exclusive; - }; - create property name: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE MODULE mySchema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE mySchema::users { - create property id1: int16; - create required property name: str; - create required property verified: bool { - SET default := false; - }; - create PROPERTY json: json; - create required property created_at: datetime { - SET default := datetime_of_statement(); - }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::orders { - CREATE PROPERTY id1 -> int16; - CREATE REQUIRED PROPERTY region -> str; - CREATE REQUIRED PROPERTY product -> str; - CREATE REQUIRED PROPERTY amount -> int64; - CREATE REQUIRED PROPERTY quantity -> int64; - }; - " --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_distinct { - create required property id1 -> int16; - create required property name -> str; - create required property age -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users3 { - create property id1 -> int16; - create required property name -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::cities { - create required property id1 -> int16; - create required property name -> str; - create property state -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::courses { - create required property id1 -> int16; - create required property name -> str; - create property categoryId -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::course_categories { - create required property id1 -> int16; - create required property name -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::jsontest { - create property id1 -> int16; - create required property json -> json; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::sal_emp { - create property name -> str; - create property pay_by_quarter -> array; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::some_new_users { - create required property id1 -> int16; - create required property name -> str; - create property cityId -> int32; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::aggregate_table { - create property id1: int16; - create required property name: str; - create property a: int16; - create property b: int16; - create property c: int16; - create PROPERTY nullOnly: int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::prefixed_users { - CREATE PROPERTY id1 -> int16; - CREATE REQUIRED PROPERTY name -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::empty_insert_single { - CREATE PROPERTY id1 -> int16; - CREATE REQUIRED PROPERTY name -> str { - SET default := 'Dan'; - }; - CREATE PROPERTY state -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::empty_insert_multiple { - CREATE PROPERTY id1 -> int16; - CREATE REQUIRED PROPERTY name -> str { - SET default := 'Dan'; - }; - CREATE PROPERTY state -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::products { - CREATE PROPERTY id1 -> int16; - CREATE REQUIRED PROPERTY price -> decimal; - CREATE REQUIRED PROPERTY cheap -> bool { - SET default := false - }; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::myprefix_test_prefixed_table_with_unique_name { - create property id1 -> int16; - create required property name -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::metric_entry { - create required property id1 -> uuid; - create required property createdAt -> datetime; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_transactions { - create required property id1 -> int16; - create required property balance -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::products_transactions { - create required property id1 -> int16; - create required property price -> int16; - create required property stock -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_transactions_rollback { - create required property id1 -> int16; - create required property balance -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_nested_transactions { - create required property id1 -> int16; - create required property balance -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::internal_staff { - create required property userId -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::custom_user { - create required property id1 -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::ticket { - create required property staffId -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::posts { - create required property id1 -> int16; - create property tags -> array; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE dates_column { - create property datetimeColumn -> datetime; - create property local_datetimeColumn -> cal::local_datetime; - create property local_dateColumn -> cal::local_date; - create property local_timeColumn -> cal::local_time; - - create property durationColumn -> duration; - create property relative_durationColumn -> cal::relative_duration; - create property dateDurationColumn -> cal::date_duration; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE users_with_insert { - create required property username -> str; - create required property admin -> bool; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE users_test_with_and_without_timezone { - create required property username -> str; - create required property admin -> bool; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::arrays_tests { - create property id1: int16 { - create constraint exclusive; - }; - create property tags: array; - create required property numbers: array; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_on_update { - create required property id1 -> int16; - create required property name -> str; - create property update_counter -> int16 { - SET default := 1 - }; - create property always_null -> str; - create property updated_at -> datetime; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::json_table { - create PROPERTY json: json; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::notifications { - create required property id1 -> int16; - create required property sentAt: datetime { - SET default := datetime_of_statement(); - }; - create property message -> str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::user_notifications { - create required property userId -> int16; - create required property notificationId -> int16; - create property categoryId -> int16; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users1 { - create required property id1: int16; - create required property name: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::users2 { - create required property id1: int16; - create required property name: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::count_test { - create required property id1: int16; - create required property name: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_with_names { - create required property id1: int16; - create required property firstName: str; - create required property lastName: str; - create required property admin: bool; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_with_age { - create required property id1: int16; - create required property name: str; - create required property age: int32; - create required property city: str; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - - await $`gel query "CREATE TYPE default::users_on_update_sql { + create property id1: int16 { + create constraint exclusive; + }; + create required property name: str; + create required property verified: bool { + SET default := false; + }; + create PROPERTY json: json; + create required property created_at: datetime { + SET default := datetime_of_statement(); + }; + }; + CREATE TYPE default::users_with_cities { + create property id1: int16 { + create constraint exclusive; + }; + create required property name: str; + create property cityId: int32; + }; + CREATE TYPE default::users_with_undefined { + create property id1: int16 { + create constraint exclusive; + }; + create property name: str; + }; + CREATE TYPE default::users_insert_select { + create property id1: int16 { + create constraint exclusive; + }; + create property name: str; + }; + CREATE MODULE mySchema; + CREATE TYPE mySchema::users { + create property id1: int16; + create required property name: str; + create required property verified: bool { + SET default := false; + }; + create PROPERTY json: json; + create required property created_at: datetime { + SET default := datetime_of_statement(); + }; + }; + CREATE TYPE default::orders { + CREATE PROPERTY id1 -> int16; + CREATE REQUIRED PROPERTY region -> str; + CREATE REQUIRED PROPERTY product -> str; + CREATE REQUIRED PROPERTY amount -> int64; + CREATE REQUIRED PROPERTY quantity -> int64; + }; + CREATE TYPE default::users_distinct { + create required property id1 -> int16; + create required property name -> str; + create required property age -> int16; + }; + CREATE TYPE default::users3 { + create property id1 -> int16; + create required property name -> str; + }; + CREATE TYPE default::cities { + create required property id1 -> int16; + create required property name -> str; + create property state -> str; + }; + CREATE TYPE default::courses { + create required property id1 -> int16; + create required property name -> str; + create property categoryId -> int16; + }; + CREATE TYPE default::course_categories { + create required property id1 -> int16; + create required property name -> str; + }; + CREATE TYPE default::jsontest { + create property id1 -> int16; + create required property json -> json; + }; + CREATE TYPE default::sal_emp { + create property name -> str; + create property pay_by_quarter -> array; + }; + CREATE TYPE default::some_new_users { + create required property id1 -> int16; + create required property name -> str; + create property cityId -> int32; + }; + CREATE TYPE default::aggregate_table { + create property id1: int16; + create required property name: str; + create property a: int16; + create property b: int16; + create property c: int16; + create PROPERTY nullOnly: int16; + }; + CREATE TYPE default::prefixed_users { + CREATE PROPERTY id1 -> int16; + CREATE REQUIRED PROPERTY name -> str; + }; + CREATE TYPE default::empty_insert_single { + CREATE PROPERTY id1 -> int16; + CREATE REQUIRED PROPERTY name -> str { + SET default := 'Dan'; + }; + CREATE PROPERTY state -> str; + }; + CREATE TYPE default::empty_insert_multiple { + CREATE PROPERTY id1 -> int16; + CREATE REQUIRED PROPERTY name -> str { + SET default := 'Dan'; + }; + CREATE PROPERTY state -> str; + }; + CREATE TYPE default::products { + CREATE PROPERTY id1 -> int16; + CREATE REQUIRED PROPERTY price -> decimal; + CREATE REQUIRED PROPERTY cheap -> bool { + SET default := false + }; + }; + CREATE TYPE default::myprefix_test_prefixed_table_with_unique_name { + create property id1 -> int16; + create required property name -> str; + }; + CREATE TYPE default::metric_entry { + create required property id1 -> uuid; + create required property createdAt -> datetime; + }; + CREATE TYPE default::users_transactions { + create required property id1 -> int16; + create required property balance -> int16; + }; + CREATE TYPE default::products_transactions { + create required property id1 -> int16; + create required property price -> int16; + create required property stock -> int16; + }; + CREATE TYPE default::users_transactions_rollback { + create required property id1 -> int16; + create required property balance -> int16; + }; + CREATE TYPE default::users_nested_transactions { + create required property id1 -> int16; + create required property balance -> int16; + }; + CREATE TYPE default::internal_staff { + create required property userId -> int16; + }; + CREATE TYPE default::custom_user { + create required property id1 -> int16; + }; + CREATE TYPE default::ticket { + create required property staffId -> int16; + }; + CREATE TYPE default::posts { + create required property id1 -> int16; + create property tags -> array; + }; + CREATE TYPE dates_column { + create property datetimeColumn -> datetime; + create property local_datetimeColumn -> cal::local_datetime; + create property local_dateColumn -> cal::local_date; + create property local_timeColumn -> cal::local_time; + create property durationColumn -> duration; + create property relative_durationColumn -> cal::relative_duration; + create property dateDurationColumn -> cal::date_duration; + }; + CREATE TYPE users_with_insert { + create required property username -> str; + create required property admin -> bool; + }; + CREATE TYPE users_test_with_and_without_timezone { + create required property username -> str; + create required property admin -> bool; + }; + CREATE TYPE default::arrays_tests { + create property id1: int16 { + create constraint exclusive; + }; + create property tags: array; + create required property numbers: array; + }; + CREATE TYPE default::users_on_update { + create required property id1 -> int16; + create required property name -> str; + create property update_counter -> int16 { + SET default := 1 + }; + create property always_null -> str; + create property updated_at -> datetime; + }; + CREATE TYPE default::json_table { + create PROPERTY json: json; + }; + CREATE TYPE default::notifications { + create required property id1 -> int16; + create required property sentAt: datetime { + SET default := datetime_of_statement(); + }; + create property message -> str; + }; + CREATE TYPE default::user_notifications { + create required property userId -> int16; + create required property notificationId -> int16; + create property categoryId -> int16; + }; + CREATE TYPE default::users1 { + create required property id1: int16; + create required property name: str; + }; + CREATE TYPE default::users2 { + create required property id1: int16; + create required property name: str; + }; + CREATE TYPE default::count_test { + create required property id1: int16; + create required property name: str; + }; + CREATE TYPE default::users_with_names { + create required property id1: int16; + create required property firstName: str; + create required property lastName: str; + create required property admin: bool; + }; + CREATE TYPE default::users_with_age { + create required property id1: int16; + create required property name: str; + create required property age: int32; + create required property city: str; + }; + CREATE TYPE default::user_rqb_test { + create property custom_id: int32 { + create constraint exclusive; + }; + create property name: str; + create required property created_at -> datetime; + }; + CREATE TYPE default::post_rqb_test { + create property custom_id: int32 { + create constraint exclusive; + }; + create required property user_id: int32; + create property content: str; + create required property created_at -> datetime; + }; + CREATE TYPE default::users_on_update_sql { create required property id1: int16; create required property name: str; create required property updated_at: datetime; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; + }; + " --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterEach(async () => { - await $`gel query "DELETE default::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::prefixed_users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::some_new_users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::orders;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::cities;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::users_on_update;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::aggregate_table;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE mySchema::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::count_test;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::users1;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::users2;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::jsontest;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::users_on_update_sql;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - }); - - afterAll(async () => { - await $`gel query "DROP TYPE default::users" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_with_cities" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_with_undefined " --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_insert_select" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE mySchema::users" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::orders" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_distinct" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users3" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::cities" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::courses" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::course_categories" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::jsontest" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::sal_emp" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::some_new_users" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::aggregate_table" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::prefixed_users" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::empty_insert_single" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::empty_insert_multiple" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::products" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::myprefix_test_prefixed_table_with_unique_name" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::metric_entry" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::products_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_transactions_rollback" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_nested_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::internal_staff" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::custom_user" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::ticket" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::posts" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE dates_column" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE users_with_insert" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE users_test_with_and_without_timezone" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::arrays_tests" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_on_update" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::json_table" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::notifications" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::user_notifications" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users1" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users2" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::count_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_with_names" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP MODULE mySchema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE users_with_age;" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::users_on_update_sql;" --tls-security=${tlsSecurity} --dsn=${dsn}`; + await Promise.all([ + client.querySQL(`DELETE FROM "users";`), + client.querySQL(`DELETE FROM "prefixed_users";`), + client.querySQL(`DELETE FROM "some_new_users";`), + client.querySQL(`DELETE FROM "orders";`), + client.querySQL(`DELETE FROM "cities";`), + client.querySQL(`DELETE FROM "users_on_update";`), + client.querySQL(`DELETE FROM "aggregate_table";`), + client.querySQL(`DELETE FROM "count_test"`), + client.querySQL(`DELETE FROM "users1"`), + client.querySQL(`DELETE FROM "users2"`), + client.querySQL(`DELETE FROM "jsontest"`), + client.querySQL(`DELETE FROM "user_rqb_test"`), + client.querySQL(`DELETE FROM "post_rqb_test"`), + client.querySQL(`DELETE FROM "mySchema"."users";`), + client.querySQL(`DELETE FROM "users_on_update_sql";`), + ]); }); async function setupSetOperationTest(db: GelJsDatabase) { @@ -649,10 +548,7 @@ describe('some', async () => { name: text('name').notNull(), state: text('state'), }, - (t) => ({ - f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), - f1: unique('custom_name1').on(t.name, t.state), - }), + (t) => [unique('custom_name').on(t.name, t.state).nullsNotDistinct(), unique('custom_name1').on(t.name, t.state)], ); const tableConfig = getTableConfig(cities1Table); @@ -680,7 +576,7 @@ describe('some', async () => { const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toBe(undefined); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); @@ -701,9 +597,7 @@ describe('some', async () => { name: text('name').notNull(), state: text('state'), }, - (t) => ({ - f: foreignKey({ foreignColumns: [t.id1], columns: [t.id1], name: 'custom_fk' }), - }), + (t) => [foreignKey({ foreignColumns: [t.id1], columns: [t.id1], name: 'custom_fk' })], ); const tableConfig = getTableConfig(table); @@ -720,9 +614,7 @@ describe('some', async () => { name: text('name').notNull(), state: text('state'), }, - (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - }), + (t) => [primaryKey({ columns: [t.id, t.name], name: 'custom_pk' })], ); const tableConfig = getTableConfig(table); @@ -4779,10 +4671,10 @@ describe('some', async () => { id: integer('id').primaryKey(), name: text('name').notNull(), }, - () => ({ + () => [ p1, p2, - }), + ], ); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); @@ -5116,34 +5008,309 @@ describe('some', async () => { test('RQB v2 simple find first - no rows', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const result = await db.query.rqbUser.findFirst(); + const result = await db.query.rqbUser.findFirst(); - expect(result).toStrictEqual(undefined); - } finally { - await clear(tlsSecurity, dsn); - } + expect(result).toStrictEqual(undefined); }); test('RQB v2 simple find first - multiple rows', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.rqbUser.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + _id: expect.stringMatching(/(.*)/), + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test('RQB v2 simple find first - with relation', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.rqbUser.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); - await db.insert(rqbUser).values([{ + expect(result).toStrictEqual({ + _id: expect.stringMatching(/(.*)/), + id: 1, + createdAt: date, + name: 'First', + posts: [{ + _id: expect.stringMatching(/(.*)/), id: 1, + userId: 1, createdAt: date, - name: 'First', + content: null, }, { + _id: expect.stringMatching(/(.*)/), id: 2, + userId: 1, createdAt: date, - name: 'Second', - }]); + content: 'Has message this time', + }], + }); + }); + + test('RQB v2 simple find first - placeholders', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.rqbUser.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_first_placeholders'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + _id: expect.stringMatching(/(.*)/), + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test('RQB v2 simple find many - no rows', async (ctx) => { + const { db } = ctx.gel; + + const result = await db.query.rqbUser.findMany(); + + expect(result).toStrictEqual([]); + }); + + test('RQB v2 simple find many - multiple rows', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.rqbUser.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + _id: expect.stringMatching(/(.*)/), + id: 2, + createdAt: date, + name: 'Second', + }, { + _id: expect.stringMatching(/(.*)/), + id: 1, + createdAt: date, + name: 'First', + }]); + }); + + test('RQB v2 simple find many - with relation', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.rqbPost.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + _id: expect.stringMatching(/(.*)/), + id: 1, + userId: 1, + createdAt: date, + content: null, + author: { + _id: expect.stringMatching(/(.*)/), + id: 1, + createdAt: date, + name: 'First', + }, + }, { + _id: expect.stringMatching(/(.*)/), + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + author: { + _id: expect.stringMatching(/(.*)/), + id: 1, + createdAt: date, + name: 'First', + }, + }]); + }); + + test('RQB v2 simple find many - placeholders', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.rqbUser.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_many_placeholders'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + _id: expect.stringMatching(/(.*)/), + id: 2, + createdAt: date, + name: 'Second', + }]); + }); + + test('RQB v2 transaction find first - no rows', async (ctx) => { + const { db } = ctx.gel; + + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst(); + expect(result).toStrictEqual(undefined); + }); + }); + + test('RQB v2 transaction find first - multiple rows', async (ctx) => { + const { db } = ctx.gel; + + const date = new Date(12000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { const result = await db.query.rqbUser.findFirst({ orderBy: { id: 'desc', @@ -5156,40 +5323,37 @@ describe('some', async () => { createdAt: date, name: 'Second', }); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find first - with relation', async (ctx) => { + test('RQB v2 transaction find first - with relation', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + await db.transaction(async (db) => { const result = await db.query.rqbUser.findFirst({ with: { posts: { @@ -5222,28 +5386,25 @@ describe('some', async () => { content: 'Has message this time', }], }); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find first - placeholders', async (ctx) => { + test('RQB v2 transaction find first - placeholders', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + await db.transaction(async (db) => { const query = db.query.rqbUser.findFirst({ where: { id: { @@ -5253,7 +5414,7 @@ describe('some', async () => { orderBy: { id: 'asc', }, - }).prepare('rqb_v2_find_first_placeholders'); + }).prepare('rqb_v2_find_first_tx_placeholders'); const result = await query.execute({ filter: 2, @@ -5265,41 +5426,35 @@ describe('some', async () => { createdAt: date, name: 'Second', }); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find many - no rows', async (ctx) => { + test('RQB v2 transaction find many - no rows', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); + await db.transaction(async (db) => { const result = await db.query.rqbUser.findMany(); expect(result).toStrictEqual([]); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find many - multiple rows', async (ctx) => { + test('RQB v2 transaction find many - multiple rows', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + await db.transaction(async (db) => { const result = await db.query.rqbUser.findMany({ orderBy: { id: 'desc', @@ -5317,40 +5472,37 @@ describe('some', async () => { createdAt: date, name: 'First', }]); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find many - with relation', async (ctx) => { + test('RQB v2 transaction find many - with relation', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + await db.transaction(async (db) => { const result = await db.query.rqbPost.findMany({ with: { author: true, @@ -5385,28 +5537,25 @@ describe('some', async () => { name: 'First', }, }]); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); - test('RQB v2 simple find many - placeholders', async (ctx) => { + test('RQB v2 transaction find many - placeholders', async (ctx) => { const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - const date = new Date(12000); + const date = new Date(12000); - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + await db.transaction(async (db) => { const query = db.query.rqbUser.findMany({ where: { id: { @@ -5428,350 +5577,13 @@ describe('some', async () => { createdAt: date, name: 'Second', }]); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find first - no rows', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find first - multiple rows', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - _id: expect.stringMatching(/(.*)/), - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find first - with relation', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - _id: expect.stringMatching(/(.*)/), - id: 1, - createdAt: date, - name: 'First', - posts: [{ - _id: expect.stringMatching(/(.*)/), - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - _id: expect.stringMatching(/(.*)/), - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find first - placeholders', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_first_tx_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - _id: expect.stringMatching(/(.*)/), - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find many - no rows', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find many - multiple rows', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - _id: expect.stringMatching(/(.*)/), - id: 2, - createdAt: date, - name: 'Second', - }, { - _id: expect.stringMatching(/(.*)/), - id: 1, - createdAt: date, - name: 'First', - }]); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find many - with relation', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - _id: expect.stringMatching(/(.*)/), - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - _id: expect.stringMatching(/(.*)/), - id: 1, - createdAt: date, - name: 'First', - }, - }, { - _id: expect.stringMatching(/(.*)/), - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - _id: expect.stringMatching(/(.*)/), - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - } finally { - await clear(tlsSecurity, dsn); - } - }); - - test('RQB v2 transaction find many - placeholders', async (ctx) => { - const { db } = ctx.gel; - try { - await init(tlsSecurity, dsn); - - const date = new Date(12000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_many_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - _id: expect.stringMatching(/(.*)/), - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - } finally { - await clear(tlsSecurity, dsn); - } + }); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedGel; - const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); + using spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); @@ -5780,11 +5592,11 @@ describe('some', async () => { const { db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); @@ -5797,11 +5609,11 @@ describe('some', async () => { const { db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); @@ -5814,11 +5626,11 @@ describe('some', async () => { const { db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); @@ -5841,11 +5653,11 @@ describe('some', async () => { const { db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); @@ -5863,11 +5675,11 @@ describe('some', async () => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); @@ -5880,11 +5692,11 @@ describe('some', async () => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); @@ -5897,11 +5709,11 @@ describe('some', async () => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); @@ -5914,11 +5726,11 @@ describe('some', async () => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); @@ -5941,11 +5753,11 @@ describe('some', async () => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); @@ -6013,4 +5825,91 @@ describe('some', async () => { // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); + + test('column.as', async (ctx) => { + const { db } = ctx.gel; + + const users = gelTable('users_with_cities', { + id: integer('id1').primaryKey(), + name: text('name').notNull(), + cityId: integer('cityId').references(() => cities.id), + }); + + const cities = gelTable('cities', { + id: integer('id1').primaryKey(), + name: text('name').notNull(), + }); + + await db.delete(users); + await db.delete(cities); + + const citiesInsRet = await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]).returning({ + cityId: cities.id.as('city_id'), + cityName: cities.name.as('city_name'), + }); + + expect(citiesInsRet).toStrictEqual(expect.arrayContaining([{ + cityId: 1, + cityName: 'Firstistan', + }, { + cityId: 2, + cityName: 'Secondaria', + }])); + + const usersInsRet = await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]).returning({ + userId: users.id.as('user_id'), + userName: users.name.as('users_name'), + userCityId: users.cityId, + }); + + expect(usersInsRet).toStrictEqual(expect.arrayContaining([{ userId: 1, userName: 'First', userCityId: 1 }, { + userId: 2, + userName: 'Second', + userCityId: 2, + }, { + userId: 3, + userName: 'Third', + userCityId: null, + }])); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + await db.delete(users); + await db.delete(cities); + }); }); diff --git a/integration-tests/tests/gel/schema.ts b/integration-tests/tests/gel/schema.ts index 33d119be5f..59d3994e63 100644 --- a/integration-tests/tests/gel/schema.ts +++ b/integration-tests/tests/gel/schema.ts @@ -1,8 +1,5 @@ import { sql } from 'drizzle-orm'; import { gelTable, integer, text, timestamptz, uuid } from 'drizzle-orm/gel-core'; -import 'zx'; - -$.quiet = true; export const rqbUser = gelTable('user_rqb_test', { _id: uuid('id').primaryKey().default(sql`uuid_generate_v4()`), @@ -18,28 +15,3 @@ export const rqbPost = gelTable('post_rqb_test', { content: text(), createdAt: timestamptz('created_at').notNull(), }); - -export const init = async (tlsSecurity: string, dsn: string) => { - await $`gel query "CREATE TYPE default::user_rqb_test { - create property custom_id: int32 { - create constraint exclusive; - }; - create property name: str; - create required property created_at -> datetime; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "CREATE TYPE default::post_rqb_test { - create property custom_id: int32 { - create constraint exclusive; - }; - create required property user_id: int32; - create property content: str; - create required property created_at -> datetime; - };" --tls-security=${tlsSecurity} --dsn=${dsn}`; -}; - -export const clear = async (tlsSecurity: string, dsn: string) => { - await $`gel query "DELETE default::user_rqb_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DELETE default::post_rqb_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::user_rqb_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; - await $`gel query "DROP TYPE default::post_rqb_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; -}; diff --git a/integration-tests/tests/imports/index.test.ts b/integration-tests/tests/imports/index.test.ts index c96b109422..f200aa8374 100644 --- a/integration-tests/tests/imports/index.test.ts +++ b/integration-tests/tests/imports/index.test.ts @@ -2,7 +2,6 @@ import { afterAll, expect, it } from 'vitest'; import 'zx/globals'; import * as fs from 'fs'; import path from 'path'; - $.verbose = false; const IMPORTS_FOLDER = 'tests/imports/files'; @@ -10,64 +9,81 @@ const IMPORTS_FOLDER = 'tests/imports/files'; const folderPath = '../drizzle-orm/dist/package.json'; const pj = JSON.parse(fs.readFileSync(folderPath, 'utf8')); -if (!fs.existsSync(IMPORTS_FOLDER)) { - fs.mkdirSync(IMPORTS_FOLDER); -} - -it('dynamic imports check for CommonJS', async () => { - const promises: ProcessPromise[] = []; - for (const [i, key] of Object.keys(pj['exports']).entries()) { - const o1 = path.join('drizzle-orm', key); - if ( - o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/pglite') - || o1.startsWith('drizzle-orm/expo-sqlite') || o1.startsWith('drizzle-orm/libsql/wasm') - || o1.startsWith('drizzle-orm/bun-sql') || o1.startsWith('drizzle-orm/tursodatabase/wasm') - ) { - continue; - } - fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'requ'); - fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'ire("' + o1 + '");\n', {}); +fs.mkdirSync(IMPORTS_FOLDER, { recursive: true }); - // fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); - // fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); +afterAll(() => { + fs.rmdirSync(IMPORTS_FOLDER, { recursive: true }); +}); - promises.push( - $`node ${IMPORTS_FOLDER}/imports_${i}.cjs`.nothrow(), - // $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), - ); +function chunk(arr: T[], size: number): T[][] { + const chunks: T[][] = []; + for (let i = 0; i < arr.length; i += size) { + chunks.push(arr.slice(i, i + size)); } - const results = await Promise.all(promises); + return chunks; +} - for (const result of results) { - expect(result.exitCode, result.message).toBe(0); +const promisesCJS: ProcessPromise[] = []; +for (const [i, key] of Object.keys(pj['exports']).entries()) { + const o1 = path.join('drizzle-orm', key); + if ( + o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/pglite') + || o1.startsWith('drizzle-orm/expo-sqlite') || o1.startsWith('drizzle-orm/libsql/wasm') + || o1.startsWith('drizzle-orm/bun-sql') || o1.startsWith('drizzle-orm/tursodatabase/wasm') + || o1.startsWith('drizzle-orm/prisma') + ) { + continue; } -}); + fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'requ'); + fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'ire("' + o1 + '");\n', {}); + + // fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); + // fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); + + promisesCJS.push( + $`node ${IMPORTS_FOLDER}/imports_${i}.cjs`.nothrow(), + // $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), + ); +} -it('dynamic imports check for ESM', async () => { - const promises: ProcessPromise[] = []; - for (const [i, key] of Object.keys(pj['exports']).entries()) { - const o1 = path.join('drizzle-orm', key); - if ( - o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite') - || o1.startsWith('drizzle-orm/bun-sql') || o1.startsWith('drizzle-orm/tursodatabase/wasm') - ) { - continue; +const chunksCJS = chunk(promisesCJS, 20); + +for (const c of chunksCJS) { + it.concurrent('dynamic imports check for CommonJS chunk', async () => { + const results = await Promise.all(c); + + for (const result of results) { + expect(result.exitCode, result.message).toBe(0); } - fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); - fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); - promises.push( - $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), - $`node --import import-in-the-middle/hook.mjs ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), - ); + }); +} + +const promises: ProcessPromise[] = []; +for (const [i, key] of Object.keys(pj['exports']).entries()) { + const o1 = path.join('drizzle-orm', key); + if ( + o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite') + || o1.startsWith('drizzle-orm/bun-sql') || o1.startsWith('drizzle-orm/tursodatabase/wasm') + || o1.startsWith('drizzle-orm/prisma') + ) { + continue; } + fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); + fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); + promises.push( + $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), + $`node --import import-in-the-middle/hook.mjs ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), + ); +} - const results = await Promise.all(promises); +const chunksESM = chunk(promises, 20); - for (const result of results) { - expect(result.exitCode, result.message).toBe(0); - } -}); +for (const c of chunksESM) { + it.concurrent('dynamic imports check for ESM chunk', async () => { + const results = await Promise.all(c); -afterAll(() => { - fs.rmdirSync(IMPORTS_FOLDER, { recursive: true }); -}); + for (const result of results) { + expect(result.exitCode, result.message).toBe(0); + } + }); +} diff --git a/integration-tests/tests/mssql/instrumentation.ts b/integration-tests/tests/mssql/instrumentation.ts new file mode 100644 index 0000000000..5c62796af0 --- /dev/null +++ b/integration-tests/tests/mssql/instrumentation.ts @@ -0,0 +1,121 @@ +import { randomUUID } from 'crypto'; +import Docker from 'dockerode'; +import { defineRelations } from 'drizzle-orm'; +import type { NodeMsSqlDatabase } from 'drizzle-orm/node-mssql'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import getPort from 'get-port'; +import mssql from 'mssql'; +import { test as base } from 'vitest'; +import * as schema from './mssql.schema'; + +export async function createDockerDB(): Promise<{ close: () => Promise; url: string }> { + const docker = new Docker(); + const port = await getPort({ port: 1433 }); + const image = 'mcr.microsoft.com/azure-sql-edge'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + const mssqlContainer = await docker.createContainer({ + Image: image, + Env: ['ACCEPT_EULA=1', 'MSSQL_SA_PASSWORD=drizzle123PASSWORD!'], + name: `drizzle-integration-tests-${randomUUID()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '1433/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mssqlContainer.start(); + + const close = async () => { + await mssqlContainer.remove(); + }; + + return { + url: `mssql://SA:drizzle123PASSWORD!@localhost:${port}?encrypt=true&trustServerCertificate=true`, + close, + }; +} + +export function parseMssqlUrl(urlString: string) { + const url = new URL(urlString); + return { + user: url.username, + password: url.password, + server: url.hostname, + port: Number.parseInt(url.port, 10), + database: url.pathname.replace(/^\//, ''), + options: { + encrypt: url.searchParams.get('encrypt') === 'true', + trustServerCertificate: url.searchParams.get('trustServerCertificate') === 'true', + }, + }; +} + +export const createClient = async () => { + const envurl = process.env['MSSQL_CONNECTION_STRING']; + const { url, close } = envurl ? { url: envurl, close: () => Promise.resolve() } : await createDockerDB(); + const params = parseMssqlUrl(url); + + const url2 = `Server=localhost,${params.port};User Id=SA;Password=drizzle123PASSWORD!;TrustServerCertificate=True;`; + + const client = await mssql.connect(params); + const id = `db${randomUUID().split('-')[0]}`; + await client.query('select 1'); + await client.query(`create database ${id}`); + await client.query(`use ${id}`); + const db = drizzle({ client, schema, relations: defineRelations(schema) }); + return { client, close, url, url2, db }; +}; + +export const test = base.extend< + { + connection: { client: mssql.ConnectionPool; url: string; url2: string; db: NodeMsSqlDatabase }; + client: mssql.ConnectionPool; + url: string; + url2: string; + db: NodeMsSqlDatabase; + } +>({ + connection: [ + // oxlint-disable-next-line no-empty-pattern + async ({}, use) => { + const { client, close, url, url2, db } = await createClient(); + try { + await use({ client, url, url2, db }); + } finally { + await close(); + } + }, + { scope: 'file' }, + ], + client: [ + async ({ connection }, use) => { + await use(connection.client); + }, + { scope: 'file' }, + ], + url: [ + async ({ connection }, use) => { + await use(connection.url); + }, + { scope: 'file' }, + ], + url2: [ + async ({ connection }, use) => { + await use(connection.url2); + }, + { scope: 'file' }, + ], + db: [ + async ({ connection }, use) => { + await use(connection.db); + }, + { scope: 'file' }, + ], +}); diff --git a/integration-tests/tests/mssql/mssql.custom.test.ts b/integration-tests/tests/mssql/mssql.custom.test.ts new file mode 100644 index 0000000000..56b8f80b11 --- /dev/null +++ b/integration-tests/tests/mssql/mssql.custom.test.ts @@ -0,0 +1,638 @@ +import 'dotenv/config'; +import { asc, DefaultLogger, eq, Name, sql } from 'drizzle-orm'; +import { + alias, + customType, + date, + datetime2, + int, + mssqlTable, + mssqlTableCreator, + time, + varchar, +} from 'drizzle-orm/mssql-core'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import type { NodeMsSqlDatabase } from 'drizzle-orm/node-mssql'; +import { migrate } from 'drizzle-orm/node-mssql/migrator'; +import type { ConnectionPool } from 'mssql'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { createClient } from './instrumentation'; + +const ENABLE_LOGGING = false; + +let db: NodeMsSqlDatabase; +let client: ConnectionPool; +let close: () => Promise; + +beforeAll(async () => { + const res = await createClient(); + client = res.client; + close = res.close; + db = drizzle({ client, logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); +}); + +afterAll(async () => { + await close?.(); + await client?.close().catch(console.error); +}); + +const customText = customType<{ data: string }>({ + dataType() { + return 'varchar(50)'; + }, +}); + +const customBoolean = customType<{ data: boolean }>({ + dataType() { + return 'bit'; + }, + fromDriver(value) { + if (typeof value === 'boolean') { + return value; + } + return value === 1; + }, +}); + +const customJson = (name: string) => + customType<{ data: TData; driverData: string }>({ + dataType() { + return 'nvarchar(50)'; + }, + toDriver(value: TData): string { + return JSON.stringify(value); + }, + fromDriver(value: string): TData { + return JSON.parse(value); + }, + })(name); + +const customTimestamp = customType< + { data: Date; driverData: string; config: { fsp: number } } +>({ + dataType(config) { + const precision = config?.fsp === undefined ? '' : ` (${config.fsp})`; + return `datetime2${precision}`; + }, + fromDriver(value: string): Date { + return new Date(value); + }, +}); + +const customBinary = customType<{ data: Buffer; driverData: Buffer; config: { length: number } }>({ + dataType(config) { + return config?.length === undefined + ? `binary` + : `binary(${config.length})`; + }, +}); + +const usersTable = mssqlTable('userstest', { + id: int('id').identity().primaryKey(), + name: customText('name').notNull(), + verified: customBoolean('verified').notNull().default(false), + jsonb: customJson('jsonb'), + createdAt: customTimestamp('created_at', { fsp: 2 }).notNull().default(sql`CURRENT_TIMESTAMP`), +}); + +const datesTable = mssqlTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { precision: 1 }), + datetime: datetime2('datetime', { precision: 2 }), + datetimeAsString: datetime2('datetime_as_string', { precision: 2, mode: 'string' }), +}); + +export const testTable = mssqlTable('test_table', { + id: customBinary('id', { length: 32 }).primaryKey(), + rawId: varchar('raw_id', { length: 64 }), +}); + +const usersMigratorTable = mssqlTable('users12', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 50 }).notNull(), + email: varchar('email', { length: 50 }).notNull(), +}); + +beforeEach(async () => { + await db.execute(sql`drop table if exists [userstest]`); + await db.execute(sql`drop table if exists [datestable]`); + await db.execute(sql`drop table if exists [test_table]`); + // await ctx.db.execute(sql`create schema public`); + await db.execute( + sql` + create table [userstest] ( + [id] int identity primary key, + [name] varchar(50) not null, + [verified] bit not null default 0, + [jsonb] nvarchar(50), + [created_at] datetime2 not null default CURRENT_TIMESTAMP + ) + `, + ); + + await db.execute( + sql` + create table [datestable] ( + [date] date, + [date_as_string] date, + [time] time, + [datetime] datetime, + [datetime_as_string] datetime, + ) + `, + ); + + await db.execute( + sql` + create table [test_table] ( + [id] binary(32) primary key, + [raw_id] varchar(64) + ) + `, + ); +}); + +test('select all fields', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('insert returning sql', async () => { + const result = await db.insert(usersTable).values({ name: 'John' }); + + expect(result.rowsAffected[0]).toEqual(1); +}); + +test('delete returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users.rowsAffected[0]).toBe(1); +}); + +test('update returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users.rowsAffected[0]).toBe(1); +}); + +test('update with returning all fields', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers.rowsAffected[0]).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers.rowsAffected[0]).toEqual(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('delete with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('insert + select', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async () => { + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async () => { + await db.insert(usersTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async () => { + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async () => { + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result.rowsAffected[0]).toBe(4); +}); + +test('select with group by as field', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by as sql', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by as sql + column', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .offset(0).fetch(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async () => { + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select [id], [name] from [userstest] group by [userstest].[id], [userstest].[name]`, + params: [], + }); +}); + +test('insert sql', async () => { + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async () => { + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ name: 'Ivan' }, { name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 2)) + .where(eq(usersTable.id, 1)); + + expect(result).toEqual([{ + user: { id: 1, name: 'Ivan' }, + customer: { id: 2, name: 'Hans' }, + }]); +}); + +test('full join with alias', async () => { + const mysqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 50 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name varchar(50) not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async () => { + const mysqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: int('id').primaryKey(), + name: varchar('name', { length: 50 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name varchar(50) not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async () => { + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async () => { + const stmt = db.insert(usersTable).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async () => { + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists [drizzle].[__drizzle_migrations]`); + + await migrate(db, { migrationsFolder: './drizzle2/mssql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table [drizzle].[__drizzle_migrations]`); +}); + +test('insert via db.execute + select via db.execute', async () => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result.recordset).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async () => { + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted.rowsAffected[0]).toBe(1); +}); + +test('insert + select all possible dates', async () => { + const date = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: new Date('1970-01-01T12:12:12.000Z'), + datetime: date, + datetimeAsString: '2022-11-11T12:12:12.000Z', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + expect(res[0]?.datetime).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + expect(typeof res[0]?.dateAsString).toEqual('string'); + expect(typeof res[0]?.datetimeAsString).toEqual('string'); + + expect(res).toEqual([{ + date: new Date('2022-11-11'), + dateAsString: '2022-11-11', + time: new Date('1970-01-01T12:12:12.000Z'), + datetime: new Date('2022-11-11'), + datetimeAsString: '2022-11-11T12:12:12.000Z', + }]); +}); + +const tableWithEnums = mssqlTable('enums_test_case', { + id: int('id').primaryKey(), + enum1: varchar('enum1', { enum: ['a', 'b', 'c'], length: 50 }).notNull(), + enum2: varchar('enum2', { enum: ['a', 'b', 'c'], length: 50 }).default('a'), + enum3: varchar('enum3', { enum: ['a', 'b', 'c'], length: 50 }).notNull().default('b'), +}); + +test('Mssql enum test case #1', async () => { + await db.execute(sql`drop table if exists [enums_test_case]`); + + await db.execute(sql` + create table [enums_test_case] ( + [id] int primary key, + [enum1] varchar(50) not null, + [enum2] varchar(50) default 'a', + [enum3] varchar(50) not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + await db.execute(sql`drop table [enums_test_case]`); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('custom binary', async () => { + const id = uuid().replace(/-/g, ''); + await db.insert(testTable).values({ + id: Buffer.from(id), + rawId: id, + }); + + const res = await db.select().from(testTable); + + expect(res).toEqual([{ + id: Buffer.from(id), + rawId: id, + }]); +}); diff --git a/integration-tests/tests/mysql/mysql-prefixed.test.ts b/integration-tests/tests/mssql/mssql.prefixed.test.ts similarity index 64% rename from integration-tests/tests/mysql/mysql-prefixed.test.ts rename to integration-tests/tests/mssql/mssql.prefixed.test.ts index ac2c6755b6..a50ea03f2d 100644 --- a/integration-tests/tests/mysql/mysql-prefixed.test.ts +++ b/integration-tests/tests/mssql/mssql.prefixed.test.ts @@ -1,94 +1,28 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; -import type { Equal } from 'drizzle-orm'; +import 'dotenv/config'; + import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; import { alias, - boolean, date, - datetime, + datetime2, getViewConfig, int, - json, - mysqlEnum, - mysqlTable as mysqlTableRaw, - mysqlTableCreator, - mysqlView, - serial, + mssqlTable, + mssqlTable as mssqlTableRaw, + mssqlTableCreator, + mssqlView, text, time, - timestamp, uniqueIndex, - year, -} from 'drizzle-orm/mysql-core'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; -import { migrate } from 'drizzle-orm/mysql2/migrator'; -import * as mysql from 'mysql2/promise'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { Expect, toLocalDate } from '~/utils'; -import { createDockerDB } from './mysql-common'; - -const ENABLE_LOGGING = false; - -let db: MySql2Database; -let client: mysql.Connection; -let container: Docker.Container | undefined; - -beforeAll(async () => { - let connectionString; - if (process.env['MYSQL_CONNECTION_STRING']) { - connectionString = process.env['MYSQL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - client = await retry(async () => { - client = await mysql.createConnection(connectionString); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING }); -}); - -afterAll(async () => { - await client?.end(); - await container?.stop().catch(console.error); -}); - -const tablePrefix = 'drizzle_tests_'; - -const mysqlTable = mysqlTableCreator((name) => `${tablePrefix}${name}`); -const usersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2Table = mysqlTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesTable = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -beforeEach(async () => { + varchar, +} from 'drizzle-orm/mssql-core'; +import { migrate } from 'drizzle-orm/node-mssql/migrator'; +import { expect } from 'vitest'; +import { type Equal, Expect } from '~/utils'; +import { test } from './instrumentation'; +import { citiesTable, users2Table, usersTable } from './schema'; + +test.beforeEach(async ({ db }) => { await db.execute(sql`drop table if exists ${usersTable}`); await db.execute(sql`drop table if exists ${users2Table}`); await db.execute(sql`drop table if exists ${citiesTable}`); @@ -96,46 +30,46 @@ beforeEach(async () => { await db.execute( sql` create table ${usersTable} ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() + [id] int identity primary key, + [name] varchar(30) not null, + [verified] bit not null default 0, + [jsonb] text, + [created_at] datetime not null default current_timestamp ) `, ); await db.execute( sql` - create table ${users2Table} ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references ${citiesTable}(\`id\`) + create table ${citiesTable} ( + [id] int primary key, + [name] varchar(30) not null ) `, ); await db.execute( sql` - create table ${citiesTable} ( - \`id\` serial primary key, - \`name\` text not null + create table ${users2Table} ( + [id] int primary key, + [name] varchar(30) not null, + [city_id] int null foreign key references ${citiesTable}([id]) ) `, ); }); -test('select all fields', async () => { +test('select all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); - expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(result[0]!.createdAt).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('select sql', async () => { +test('select sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -144,7 +78,7 @@ test('select sql', async () => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('select typed sql', async () => { +test('select typed sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -153,14 +87,14 @@ test('select typed sql', async () => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('select distinct', async () => { - const usersDistinctTable = mysqlTable('users_distinct', { +test('select distinct', async ({ db }) => { + const usersDistinctTable = mssqlTable('users_distinct', { id: int('id').notNull(), - name: text('name').notNull(), + name: varchar('name', { length: 100 }).notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name varchar(100))`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, @@ -178,41 +112,41 @@ test('select distinct', async () => { expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); -test('insert returning sql', async () => { - const [result, _] = await db.insert(usersTable).values({ name: 'John' }); +test('insert returning sql', async ({ db }) => { + const result = await db.insert(usersTable).values({ name: 'John' }); - expect(result.insertId).toBe(1); + expect(result.rowsAffected[0]).toEqual(1); }); -test('delete returning sql', async () => { +test('delete returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - expect(users[0].affectedRows).toBe(1); + expect(users.rowsAffected[0]).toBe(1); }); -test('update returning sql', async () => { +test('update returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - expect(users[0].changedRows).toBe(1); + expect(users.rowsAffected[0]).toBe(1); }); -test('update with returning all fields', async () => { +test('update with returning all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - expect(updatedUsers[0].changedRows).toBe(1); + expect(updatedUsers.rowsAffected[0]).toBe(1); - expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(users[0]!.createdAt).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test('update with returning partial', async () => { +test('update with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); @@ -220,26 +154,26 @@ test('update with returning partial', async () => { eq(usersTable.id, 1), ); - expect(updatedUsers[0].changedRows).toBe(1); + expect(updatedUsers.rowsAffected[0]).toEqual(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test('delete with returning all fields', async () => { +test('delete with returning all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - expect(deletedUser[0].affectedRows).toBe(1); + expect(deletedUser.rowsAffected[0]).toBe(1); }); -test('delete with returning partial', async () => { +test('delete with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - expect(deletedUser[0].affectedRows).toBe(1); + expect(deletedUser.rowsAffected[0]).toBe(1); }); -test('insert + select', async () => { +test('insert + select', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); @@ -252,7 +186,7 @@ test('insert + select', async () => { ]); }); -test('json insert', async () => { +test('json insert', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, @@ -263,14 +197,14 @@ test('json insert', async () => { expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test('insert with overridden default values', async () => { +test('insert with overridden default values', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('insert many', async () => { +test('insert many', async ({ db }) => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -292,7 +226,7 @@ test('insert many', async () => { ]); }); -test('insert many with returning', async () => { +test('insert many with returning', async ({ db }) => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -300,28 +234,28 @@ test('insert many with returning', async () => { { name: 'Austin', verified: true }, ]); - expect(result[0].affectedRows).toBe(4); + expect(result.rowsAffected[0]).toBe(4); }); -test('select with group by as field', async () => { +test('select with group by as field', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test('select with group by as sql', async () => { +test('select with group by as sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test('select with group by as sql + column', async () => { +test('select with group by as sql + column', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -330,7 +264,7 @@ test('select with group by as sql + column', async () => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test('select with group by as column + sql', async () => { +test('select with group by as column + sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -339,93 +273,40 @@ test('select with group by as column + sql', async () => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test('select with group by complex query', async () => { +test('select with group by complex query', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) - .limit(1); + .offset(0).fetch(1); expect(result).toEqual([{ name: 'Jane' }]); }); -test('build query', async () => { +test('build query', async ({ db }) => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ - sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ + sql: `select [id], [name] from [${getTableName(usersTable)}] group by [${getTableName(usersTable)}].[id], [${ getTableName(usersTable) - }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, + }].[name]`, params: [], }); }); -test('build query insert with onDuplicate', async () => { - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: `insert into \`${ - getTableName(usersTable) - }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, - params: ['John', '["foo","bar"]', 'John1'], - }); -}); - -test('insert with onDuplicate', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John1' }]); -}); - -test('insert conflict', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); - - await expect((async () => { - db.insert(usersTable).values({ id: 1, name: 'John1' }); - })()).resolves.not.toThrowError(); -}); - -test('insert conflict with ignore', async () => { - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert sql', async () => { +test('insert sql', async ({ db }) => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('partial join with alias', async () => { +test('partial join with alias', async ({ db }) => { const customerAlias = alias(usersTable, 'customer'); - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + await db.insert(usersTable).values([{ name: 'Ivan' }, { name: 'Hans' }]); const result = await db .select({ user: { @@ -437,25 +318,25 @@ test('partial join with alias', async () => { name: customerAlias.name, }, }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); + .leftJoin(customerAlias, eq(customerAlias.id, 2)) + .where(eq(usersTable.id, 1)); expect(result).toEqual([{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, + user: { id: 1, name: 'Ivan' }, + customer: { id: 2, name: 'Hans' }, }]); }); -test('full join with alias', async () => { - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); +test('full join with alias', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `prefixed_${name}`); - const users = mysqlTable('users', { - id: serial('id').primaryKey(), + const users = mssqlTable('users', { + id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); const customers = alias(users, 'customer'); @@ -479,16 +360,16 @@ test('full join with alias', async () => { await db.execute(sql`drop table ${users}`); }); -test('select from alias', async () => { - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); +test('select from alias', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `prefixed_${name}`); - const users = mysqlTable('users', { - id: serial('id').primaryKey(), + const users = mssqlTable('users', { + id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); @@ -514,14 +395,14 @@ test('select from alias', async () => { await db.execute(sql`drop table ${users}`); }); -test('insert with spaces', async () => { +test('insert with spaces', async ({ db }) => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test('prepared statement', async () => { +test('prepared statement', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, @@ -533,7 +414,7 @@ test('prepared statement', async () => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('prepared statement reuse', async () => { +test('prepared statement reuse', async ({ db }) => { const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), @@ -563,7 +444,7 @@ test('prepared statement reuse', async () => { ]); }); -test('prepared statement with placeholder in .where', async () => { +test('prepared statement with placeholder in .where', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, @@ -576,23 +457,21 @@ test('prepared statement with placeholder in .where', async () => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('migrator', async () => { - const usersMigratorTable = mysqlTableRaw('users12', { - id: serial('id').primaryKey(), +test('migrator', async ({ db }) => { + const usersMigratorTable = mssqlTableRaw('users12', { + id: int('id').identity().primaryKey(), name: text('name').notNull(), email: text('email').notNull(), - }, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; - }); + }, (table) => [ + uniqueIndex('').on(table.name), + ]); await db.execute(sql.raw(`drop table if exists cities_migration`)); await db.execute(sql.raw(`drop table if exists users_migration`)); await db.execute(sql.raw(`drop table if exists users12`)); - await db.execute(sql.raw(`drop table if exists __drizzle_migrations`)); + await db.execute(sql.raw(`drop table if exists [drizzle].[__drizzle_migrations]`)); - await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + await migrate(db, { migrationsFolder: './drizzle2/mssql' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); @@ -603,43 +482,41 @@ test('migrator', async () => { await db.execute(sql.raw(`drop table cities_migration`)); await db.execute(sql.raw(`drop table users_migration`)); await db.execute(sql.raw(`drop table users12`)); - await db.execute(sql.raw(`drop table __drizzle_migrations`)); + await db.execute(sql.raw(`drop table [drizzle].[__drizzle_migrations]`)); }); -test('insert via db.execute + select via db.execute', async () => { +test('insert via db.execute + select via db.execute', async ({ db }) => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - expect(result[0]).toEqual([{ id: 1, name: 'John' }]); + expect(result.recordset[0]).toEqual({ id: 1, name: 'John' }); }); -test('insert via db.execute w/ query builder', async () => { +test('insert via db.execute w/ query builder', async ({ db }) => { const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); - expect(inserted[0].affectedRows).toBe(1); + expect(inserted.rowsAffected[0]).toBe(1); }); -test('insert + select all possible dates', async () => { - const datesTable = mysqlTable('datestable', { +test('insert + select all possible dates', async ({ db }) => { + const datesTable = mssqlTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - year: year('year'), + time: time('time', { precision: 1 }), + datetime: datetime2('datetime', { precision: 2 }), + datetimeAsString: datetime2('datetime_as_string', { precision: 2, mode: 'string' }), }); await db.execute(sql`drop table if exists ${datesTable}`); await db.execute( sql` create table ${datesTable} ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`year\` year + [date] date, + [date_as_string] date, + [time] time(1), + [datetime] datetime2(2), + [datetime_as_string] datetime2(2) ) `, ); @@ -649,47 +526,45 @@ test('insert + select all possible dates', async () => { await db.insert(datesTable).values({ date: d, dateAsString: '2022-11-11', - time: '12:12:12', + time: new Date('1970-01-01T12:12:12.000Z'), datetime: d, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', + datetimeAsString: '2022-11-11T12:12:12.000Z', }); const res = await db.select().from(datesTable); - expect(res[0]?.date).toBeInstanceOf(Date); - expect(res[0]?.datetime).toBeInstanceOf(Date); - expect(typeof res[0]?.dateAsString).toBe('string'); - expect(typeof res[0]?.datetimeAsString).toBe('string'); + expect(res[0]?.date).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + expect(res[0]?.datetime).toBeInstanceOf(Date); // eslint-disable-line no-instanceof/no-instanceof + expect(typeof res[0]?.dateAsString).toEqual('string'); + expect(typeof res[0]?.datetimeAsString).toEqual('string'); expect(res).toEqual([{ - date: toLocalDate(new Date('2022-11-11')), + date: (new Date('2022-11-11')), dateAsString: '2022-11-11', - time: '12:12:12', + time: new Date('1970-01-01T12:12:12.000Z'), datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', + datetimeAsString: '2022-11-11T12:12:12.000Z', }]); await db.execute(sql`drop table ${datesTable}`); }); -test('Mysql enum test case #1', async () => { - const tableWithEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), +test('Mysql enum test case #1', async ({ db }) => { + const tableWithEnums = mssqlTable('enums_test_case', { + id: int('id').primaryKey(), + enum1: varchar('enum1', { enum: ['a', 'b', 'c'], length: 50 }).notNull(), + enum2: varchar('enum2', { enum: ['a', 'b', 'c'], length: 50 }).default('a'), + enum3: varchar('enum3', { enum: ['a', 'b', 'c'], length: 50 }).notNull().default('b'), }); await db.execute(sql`drop table if exists ${tableWithEnums}`); await db.execute(sql` create table ${tableWithEnums} ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + [id] int primary key, + [enum1] varchar(50) not null, + [enum2] varchar(50) default 'a', + [enum3] varchar(50) not null default 'b' ) `); @@ -710,11 +585,11 @@ test('Mysql enum test case #1', async () => { ]); }); -test('left join (flat object fields)', async () => { +test('left join (flat object fields)', async ({ db }) => { await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ userId: users2Table.id, @@ -730,11 +605,11 @@ test('left join (flat object fields)', async () => { ]); }); -test('left join (grouped fields)', async () => { +test('left join (grouped fields)', async ({ db }) => { await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ id: users2Table.id, @@ -764,11 +639,11 @@ test('left join (grouped fields)', async () => { ]); }); -test('left join (all fields)', async () => { +test('left join (all fields)', async ({ db }) => { await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); @@ -796,16 +671,16 @@ test('left join (all fields)', async () => { ]); }); -test('join subquery', async () => { - const coursesTable = mysqlTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), +test('join subquery', async ({ db }) => { + const coursesTable = mssqlTable('courses', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 50 }).notNull(), categoryId: int('category_id').references(() => courseCategoriesTable.id), }); - const courseCategoriesTable = mysqlTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), + const courseCategoriesTable = mssqlTable('course_categories', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 50 }).notNull(), }); await db.execute(sql`drop table if exists ${coursesTable}`); @@ -814,8 +689,8 @@ test('join subquery', async () => { await db.execute( sql` create table ${courseCategoriesTable} ( - \`id\` serial primary key, - \`name\` text not null + [id] int identity primary key, + [name] varchar(50) not null ) `, ); @@ -823,9 +698,9 @@ test('join subquery', async () => { await db.execute( sql` create table ${coursesTable} ( - \`id\` serial primary key, - \`name\` text not null, - \`category_id\` int references ${courseCategoriesTable}(\`id\`) + [id] int identity primary key, + [name] varchar(50) not null, + [category_id] int references ${courseCategoriesTable}([id]) ) `, ); @@ -848,7 +723,7 @@ test('join subquery', async () => { .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, + total: sql`count(${courseCategoriesTable.id})`.as('total'), }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) @@ -874,11 +749,11 @@ test('join subquery', async () => { ]); }); -test('with ... select', async () => { - const orders = mysqlTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), +test('with ... select', async ({ db }) => { + const orders = mssqlTable('orders', { + id: int('id').identity().primaryKey(), + region: varchar('region', { length: 50 }).notNull(), + product: varchar('product', { length: 50 }).notNull(), amount: int('amount').notNull(), quantity: int('quantity').notNull(), }); @@ -887,11 +762,11 @@ test('with ... select', async () => { await db.execute( sql` create table ${orders} ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null + [id] int identity primary key, + [region] varchar(50) not null, + [product] varchar(50) not null, + [amount] int not null, + [quantity] int not null ) `, ); @@ -940,8 +815,8 @@ test('with ... select', async () => { .select({ region: orders.region, product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, - productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + productUnits: sql`sum(${orders.quantity})`.as('product_units'), + productSales: sql`sum(${orders.amount})`.as('product_sales'), }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) @@ -978,11 +853,11 @@ test('with ... select', async () => { ]); }); -test('select from subquery sql', async () => { - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); +test('select from subquery sql', async ({ db }) => { + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); const sq = db - .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .select({ name: sql`concat(${users2Table.name}, ' modified')`.as('name') }) .from(users2Table) .as('sq'); @@ -991,17 +866,17 @@ test('select from subquery sql', async () => { expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); -test('select a field without joining its table', () => { +test('select a field without joining its table', ({ db }) => { expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); -test('select all fields from subquery without alias', () => { +test('select all fields from subquery without alias', ({ db }) => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); -test('select count()', async () => { +test('select count()', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); @@ -1009,25 +884,14 @@ test('select count()', async () => { expect(res).toEqual([{ count: 2 }]); }); -test('select for ...', () => { - { - const query = db.select().from(users2Table).for('update').toSQL(); - expect(query.sql).toMatch(/ for update$/); - } - { - const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - expect(query.sql).toMatch(/ for share skip locked$/); - } - { - const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - expect(query.sql).toMatch(/ for update nowait$/); - } -}); - -test('having', async () => { - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); +test('having', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, name: 'Jack', cityId: 2, }]); @@ -1040,8 +904,8 @@ test('having', async () => { }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) + .where(({ name }) => sql`len(${name}) >= 3`) + .groupBy(citiesTable.id, citiesTable.name) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); @@ -1059,30 +923,30 @@ test('having', async () => { ]); }); -test('view', async () => { - const newYorkers1 = mysqlView('new_yorkers') +test('view', async ({ db }) => { + const newYorkers1 = mssqlView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - const newYorkers2 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers2 = mssqlView('new_yorkers', { + id: int('id').identity().primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - const newYorkers3 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), + const newYorkers3 = mssqlView('new_yorkers', { + id: int('id').identity().primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, ]); { @@ -1120,7 +984,7 @@ test('view', async () => { await db.execute(sql`drop view ${newYorkers1}`); }); -test('select from raw sql', async () => { +test('select from raw sql', async ({ db }) => { const result = await db.select({ id: sql`id`, name: sql`name`, @@ -1133,7 +997,7 @@ test('select from raw sql', async () => { ]); }); -test('select from raw sql with joins', async () => { +test('select from raw sql with joins', async ({ db }) => { const result = await db .select({ id: sql`users.id`, @@ -1151,7 +1015,7 @@ test('select from raw sql with joins', async () => { ]); }); -test('join on aliased sql from select', async () => { +test('join on aliased sql from select', async ({ db }) => { const result = await db .select({ userId: sql`users.id`.as('userId'), @@ -1170,7 +1034,7 @@ test('join on aliased sql from select', async () => { ]); }); -test('join on aliased sql from with clause', async () => { +test('join on aliased sql from with clause', async ({ db }) => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), @@ -1209,10 +1073,10 @@ test('join on aliased sql from with clause', async () => { ]); }); -test('prefixed table', async () => { - const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); +test('prefixed table', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `myprefix_${name}`); - const users = mysqlTable('test_prefixed_table_with_unique_name', { + const users = mssqlTable('test_prefixed_table_with_unique_name', { id: int('id').primaryKey(), name: text('name').notNull(), }); @@ -1232,38 +1096,21 @@ test('prefixed table', async () => { await db.execute(sql`drop table ${users}`); }); -test('orderBy with aliased column', () => { +test('orderBy with aliased column', ({ db }) => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); -}); - -test('timestamp timezone', async () => { - const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); - - await db.insert(usersTable).values({ name: 'With default times' }); - await db.insert(usersTable).values({ - name: 'Without default times', - createdAt: date, - }); - const users = await db.select().from(usersTable); - - // check that the timestamps are set correctly for default times - expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); - - // check that the timestamps are set correctly for non default times - expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + expect(query.sql).toEqual(`select something as [test] from [${getTableName(users2Table)}] order by [test]`); }); -test('transaction', async () => { - const users = mysqlTable('users_transactions', { - id: serial('id').primaryKey(), +test('transaction', async ({ db }) => { + const users = mssqlTable('users_transactions', { + id: int('id').identity().primaryKey(), balance: int('balance').notNull(), }); - const products = mysqlTable('products_transactions', { - id: serial('id').primaryKey(), + const products = mssqlTable('products_transactions', { + id: int('id').identity().primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); @@ -1271,14 +1118,18 @@ test('transaction', async () => { await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); + await db.execute(sql`create table ${users} (id int identity not null primary key, balance int not null)`); await db.execute( - sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, + sql`create table ${products} (id int identity not null primary key, price int not null, stock int not null)`, ); - const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + await db.insert(users).values({ balance: 100 }); + const userId = (await db.select().from(users).then((rows) => rows[0]!))!.id; + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); - const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + + await db.insert(products).values({ price: 10, stock: 10 }); + const productId = (await db.select().from(products).then((rows) => rows[0]!))!.id; const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { @@ -1294,16 +1145,16 @@ test('transaction', async () => { expect(result).toEqual([{ id: 1, balance: 90 }]); }); -test('transaction rollback', async () => { - const users = mysqlTable('users_transactions_rollback', { - id: serial('id').primaryKey(), +test('transaction rollback', async ({ db }) => { + const users = mssqlTable('users_transactions_rollback', { + id: int('id').identity().primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, + sql`create table ${users} (id int identity not null primary key, balance int not null)`, ); await expect((async () => { @@ -1320,16 +1171,16 @@ test('transaction rollback', async () => { expect(result).toEqual([]); }); -test('nested transaction', async () => { - const users = mysqlTable('users_nested_transactions', { - id: serial('id').primaryKey(), +test('nested transaction', async ({ db }) => { + const users = mssqlTable('users_nested_transactions', { + id: int('id').identity().primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, + sql`create table ${users} (id int identity not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { @@ -1347,16 +1198,16 @@ test('nested transaction', async () => { expect(result).toEqual([{ id: 1, balance: 200 }]); }); -test('nested transaction rollback', async () => { - const users = mysqlTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), +test('nested transaction rollback', async ({ db }) => { + const users = mssqlTable('users_nested_transactions_rollback', { + id: int('id').identity().primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, balance int not null)`, + sql`create table ${users} (id int identity not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { @@ -1377,16 +1228,16 @@ test('nested transaction rollback', async () => { expect(result).toEqual([{ id: 1, balance: 100 }]); }); -test('join subquery with join', async () => { - const internalStaff = mysqlTable('internal_staff', { +test('join subquery with join', async ({ db }) => { + const internalStaff = mssqlTable('internal_staff', { userId: int('user_id').notNull(), }); - const customUser = mysqlTable('custom_user', { + const customUser = mssqlTable('custom_user', { id: int('id').notNull(), }); - const ticket = mysqlTable('ticket', { + const ticket = mssqlTable('ticket', { staffId: int('staff_id').notNull(), }); @@ -1426,20 +1277,20 @@ test('join subquery with join', async () => { }]); }); -test('subquery with view', async () => { - const users = mysqlTable('users_subquery_view', { - id: serial('id').primaryKey(), +test('subquery with view', async ({ db }) => { + const users = mssqlTable('users_subquery_view', { + id: int('id').identity().primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + const newYorkers = mssqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + sql`create table ${users} (id int identity not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); @@ -1462,20 +1313,20 @@ test('subquery with view', async () => { ]); }); -test('join view as subquery', async () => { - const users = mysqlTable('users_join_view', { - id: serial('id').primaryKey(), +test('join view as subquery', async ({ db }) => { + const users = mssqlTable('users_join_view', { + id: int('id').identity().primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + const newYorkers = mssqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + sql`create table ${users} (id int identity not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); @@ -1513,18 +1364,19 @@ test('join view as subquery', async () => { await db.execute(sql`drop table ${users}`); }); -test('select iterator', async () => { - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), +test('select iterator', async ({ db }) => { + const users = mssqlTable('users_iterator', { + id: int('id').identity().primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); + await db.execute(sql`create table ${users} (id int identity not null primary key)`); - await db.insert(users).values([{}, {}, {}]); + await db.insert(users).values({}); + await db.insert(users).values({}); + await db.insert(users).values({}); const iter = db.select().from(users).iterator(); - const result: typeof users.$inferSelect[] = []; for await (const row of iter) { @@ -1534,15 +1386,17 @@ test('select iterator', async () => { expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); -test('select iterator w/ prepared statement', async () => { - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), +test('select iterator w/ prepared statement', async ({ db }) => { + const users = mssqlTable('users_iterator', { + id: int('id').identity().primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); + await db.execute(sql`create table ${users} (id int identity not null primary key)`); - await db.insert(users).values([{}, {}, {}]); + await db.insert(users).values({}); + await db.insert(users).values({}); + await db.insert(users).values({}); const prepared = db.select().from(users).prepare(); const iter = prepared.iterator(); @@ -1555,16 +1409,16 @@ test('select iterator w/ prepared statement', async () => { expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); -test('insert undefined', async () => { - const users = mysqlTable('users', { - id: serial('id').primaryKey(), +test('insert undefined', async ({ db }) => { + const users = mssqlTable('users', { + id: int('id').identity().primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, + sql`create table ${users} (id int identity not null primary key, name text)`, ); await expect((async () => { @@ -1574,16 +1428,16 @@ test('insert undefined', async () => { await db.execute(sql`drop table ${users}`); }); -test('update undefined', async () => { - const users = mysqlTable('users', { - id: serial('id').primaryKey(), +test('update undefined', async ({ db }) => { + const users = mssqlTable('users', { + id: int('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, + sql`create table ${users} (id int not null primary key, name text)`, ); await expect((async () => { diff --git a/integration-tests/tests/relational/vercel-v1.test.ts b/integration-tests/tests/mssql/mssql.rels.test.ts similarity index 90% rename from integration-tests/tests/relational/vercel-v1.test.ts rename to integration-tests/tests/mssql/mssql.rels.test.ts index 24f8928afe..78f584432e 100644 --- a/integration-tests/tests/relational/vercel-v1.test.ts +++ b/integration-tests/tests/mssql/mssql.rels.test.ts @@ -1,164 +1,80 @@ import 'dotenv/config'; -import { createClient, type VercelClient } from '@vercel/postgres'; -import Docker from 'dockerode'; -import { desc, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; -import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; -import getPort from 'get-port'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './pg.schema.ts'; +import { desc, DrizzleError, eq, gt, gte, or, sql, TransactionRollbackError } from 'drizzle-orm'; +import { expect, expectTypeOf } from 'vitest'; +import { test } from './instrumentation'; +import * as schema from './mssql.schema'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; -const ENABLE_LOGGING = false; - /* Test cases: - querying nested relation without PK with additional fields */ -declare module 'vitest' { - export interface TestContext { - docker: Docker; - vpgContainer: Docker.Container; - vpgDb: VercelPgDatabase; - vpgClient: VercelClient; - } -} - -let globalDocker: Docker; -let pgContainer: Docker.Container; -let db: VercelPgDatabase; -let client: VercelClient; - -async function createDockerDB(): Promise { - const docker = (globalDocker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: [ - 'POSTGRES_PASSWORD=postgres', - 'POSTGRES_USER=postgres', - 'POSTGRES_DB=postgres', - ], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -beforeAll(async () => { - const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = createClient({ connectionString }); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); -}); - -beforeEach(async (ctx) => { - ctx.vpgDb = db; - ctx.vpgClient = client; - ctx.docker = globalDocker; - ctx.vpgContainer = pgContainer; +test.beforeEach(async ({ db }) => { + await db.execute(sql`drop table if exists [users_to_groups]`); + await db.execute(sql`drop table if exists [comment_likes]`); + await db.execute(sql`drop table if exists [comments]`); + await db.execute(sql`drop table if exists [posts]`); + await db.execute(sql`drop table if exists [groups]`); + await db.execute(sql`drop table if exists [users]`); - await ctx.vpgDb.execute(sql`drop schema public cascade`); - await ctx.vpgDb.execute(sql`create schema public`); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE "users" ( - "id" serial PRIMARY KEY NOT NULL, - "name" text NOT NULL, - "verified" boolean DEFAULT false NOT NULL, - "invited_by" int REFERENCES "users"("id") + CREATE TABLE [users] ( + [id] int PRIMARY KEY NOT NULL, + [name] varchar(100) NOT NULL, + [verified] bit DEFAULT 0 NOT NULL, + [invited_by] int null foreign key REFERENCES [users]([id]) ); `, ); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE IF NOT EXISTS "groups" ( - "id" serial PRIMARY KEY NOT NULL, - "name" text NOT NULL, - "description" text + CREATE TABLE [groups] ( + [id] int PRIMARY KEY NOT NULL, + [name] varchar(100) NOT NULL, + [description] varchar(100) ); `, ); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE IF NOT EXISTS "users_to_groups" ( - "id" serial PRIMARY KEY NOT NULL, - "user_id" int REFERENCES "users"("id"), - "group_id" int REFERENCES "groups"("id") + CREATE TABLE [users_to_groups] ( + [id] int identity PRIMARY KEY NOT NULL, + [user_id] int foreign key REFERENCES [users]([id]), + [group_id] int foreign key REFERENCES [groups]([id]) ); `, ); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE IF NOT EXISTS "posts" ( - "id" serial PRIMARY KEY NOT NULL, - "content" text NOT NULL, - "owner_id" int REFERENCES "users"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL + CREATE TABLE [posts] ( + [id] int identity PRIMARY KEY NOT NULL, + [content] varchar(100) NOT NULL, + [owner_id] int null foreign key REFERENCES [users]([id]), + [created_at] datetime DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE IF NOT EXISTS "comments" ( - "id" serial PRIMARY KEY NOT NULL, - "content" text NOT NULL, - "creator" int REFERENCES "users"("id"), - "post_id" int REFERENCES "posts"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL + CREATE TABLE [comments] ( + [id] int identity PRIMARY KEY NOT NULL, + [content] varchar(100) NOT NULL, + [creator] int null foreign key REFERENCES [users]([id]), + [post_id] int null foreign key REFERENCES [posts]([id]), + [created_at] datetime DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); - await ctx.vpgDb.execute( + await db.execute( sql` - CREATE TABLE IF NOT EXISTS "comment_likes" ( - "id" serial PRIMARY KEY NOT NULL, - "creator" int REFERENCES "users"("id"), - "comment_id" int REFERENCES "comments"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL + CREATE TABLE [comment_likes] ( + [id] int identity PRIMARY KEY NOT NULL, + [creator] int null foreign key REFERENCES [users]([id]), + [comment_id] int null foreign key REFERENCES [comments]([id]), + [created_at] datetime DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); @@ -168,9 +84,7 @@ beforeEach(async (ctx) => { [Find Many] One relation users+posts */ -test('[Find Many] Get users with posts', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -232,9 +146,7 @@ test('[Find Many] Get users with posts', async (t) => { }); }); -test('[Find Many] Get users with posts + limit posts', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -305,9 +217,7 @@ test('[Find Many] Get users with posts + limit posts', async (t) => { }); }); -test('[Find Many] Get users with posts + limit posts and users', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -370,9 +280,7 @@ test('[Find Many] Get users with posts + limit posts and users', async (t) => { }); }); -test('[Find Many] Get users with posts + custom fields', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -463,9 +371,7 @@ test('[Find Many] Get users with posts + custom fields', async (t) => { }); }); -test('[Find Many] Get users with posts + custom fields + limits', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -521,9 +427,7 @@ test('[Find Many] Get users with posts + custom fields + limits', async (t) => { }); }); -test('[Find Many] Get users with posts + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -605,9 +509,7 @@ test('[Find Many] Get users with posts + orderBy', async (t) => { }); }); -test('[Find Many] Get users with posts + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -655,9 +557,7 @@ test('[Find Many] Get users with posts + where', async (t) => { }); }); -test('[Find Many] Get users with posts + where + partial', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -707,9 +607,7 @@ test('[Find Many] Get users with posts + where + partial', async (t) => { }); }); -test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -759,9 +657,7 @@ test('[Find Many] Get users with posts + where + partial. Did not select posts i }); }); -test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -808,9 +704,7 @@ test('[Find Many] Get users with posts + where + partial(true + false)', async ( }); }); -test('[Find Many] Get users with posts + where + partial(false)', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -861,9 +755,7 @@ test('[Find Many] Get users with posts + where + partial(false)', async (t) => { }); }); -test('[Find Many] Get users with posts in transaction', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts in transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -926,9 +818,7 @@ test('[Find Many] Get users with posts in transaction', async (t) => { }); }); -test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts in rollbacked transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -985,9 +875,7 @@ test('[Find Many] Get users with posts in rollbacked transaction', async (t) => }); // select only custom -test('[Find Many] Get only custom fields', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1031,29 +919,40 @@ test('[Find Many] Get only custom fields', async (t) => { expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1' }, { - lowerName: 'post1.2', - }, { lowerName: 'post1.3' }], + expect(usersWithPosts[0]?.lowerName).toEqual('dan'); + expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); + expect(usersWithPosts[2]?.lowerName).toEqual('alex'); + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1', }); - expect(usersWithPosts).toContainEqual({ - lowerName: 'andrew', - posts: [{ lowerName: 'post2' }, { - lowerName: 'post2.1', - }], + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1.2', }); - expect(usersWithPosts).toContainEqual({ - lowerName: 'alex', - posts: [{ lowerName: 'post3' }, { - lowerName: 'post3.1', - }], + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1.3', }); -}); -test('[Find Many] Get only custom fields + where', async (t) => { - const { vpgDb: db } = t; + expect(usersWithPosts[1]?.posts).toContainEqual({ + lowerName: 'post2', + }); + + expect(usersWithPosts[1]?.posts).toContainEqual({ + lowerName: 'post2.1', + }); + + expect(usersWithPosts[2]?.posts).toContainEqual({ + lowerName: 'post3', + }); + + expect(usersWithPosts[2]?.posts).toContainEqual({ + lowerName: 'post3.1', + }); +}); +test('[Find Many] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1103,9 +1002,7 @@ test('[Find Many] Get only custom fields + where', async (t) => { }); }); -test('[Find Many] Get only custom fields + where + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1156,9 +1053,7 @@ test('[Find Many] Get only custom fields + where + limit', async (t) => { }); }); -test('[Find Many] Get only custom fields + where + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1210,9 +1105,7 @@ test('[Find Many] Get only custom fields + where + orderBy', async (t) => { }); // select only custom find one -test('[Find One] Get only custom fields', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1255,17 +1148,22 @@ test('[Find One] Get only custom fields', async (t) => { expect(usersWithPosts?.posts.length).toEqual(3); - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1' }, { - lowerName: 'post1.2', - }, { lowerName: 'post1.3' }], + expect(usersWithPosts?.lowerName).toEqual('dan'); + + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1', + }); + + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1.2', }); -}); -test('[Find One] Get only custom fields + where', async (t) => { - const { vpgDb: db } = t; + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1.3', + }); +}); +test('[Find One] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1316,9 +1214,7 @@ test('[Find One] Get only custom fields + where', async (t) => { }); }); -test('[Find One] Get only custom fields + where + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1370,9 +1266,7 @@ test('[Find One] Get only custom fields + where + limit', async (t) => { }); }); -test('[Find One] Get only custom fields + where + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1425,51 +1319,38 @@ test('[Find One] Get only custom fields + where + orderBy', async (t) => { }); // columns {} -test('[Find Many] Get select {}', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); - const users = await db._query.usersTable.findMany({ - columns: {}, - }); - - expectTypeOf(users).toEqualTypeOf<{}[]>(); - - expect(users.length).toBe(3); - - expect(users[0]).toEqual({}); - expect(users[1]).toEqual({}); - expect(users[2]).toEqual({}); + await expect( + async () => + await db._query.usersTable.findMany({ + columns: {}, + }), + ).rejects.toThrow(DrizzleError); }); // columns {} -test('[Find One] Get select {}', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); - const users = await db._query.usersTable.findFirst({ - columns: {}, - }); - - expectTypeOf(users).toEqualTypeOf<{} | undefined>(); - - expect(users).toEqual({}); + await expect(async () => + await db._query.usersTable.findFirst({ + columns: {}, + }) + ).rejects.toThrow(DrizzleError); }); // deep select {} -test('[Find Many] Get deep select {}', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1482,28 +1363,20 @@ test('[Find Many] Get deep select {}', async (t) => { { ownerId: 3, content: 'Post3' }, ]); - const users = await db._query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, + await expect(async () => + await db._query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + }, }, - }, - }); - - expectTypeOf(users).toEqualTypeOf<{ posts: {}[] }[]>(); - - expect(users.length).toBe(3); - - expect(users[0]).toEqual({ posts: [{}] }); - expect(users[1]).toEqual({ posts: [{}] }); - expect(users[2]).toEqual({ posts: [{}] }); + }) + ).rejects.toThrow(DrizzleError); }); // deep select {} -test('[Find One] Get deep select {}', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1516,26 +1389,22 @@ test('[Find One] Get deep select {}', async (t) => { { ownerId: 3, content: 'Post3' }, ]); - const users = await db._query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, + await expect(async () => + await db._query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + }, }, - }, - }); - - expectTypeOf(users).toEqualTypeOf<{ posts: {}[] } | undefined>(); - - expect(users).toEqual({ posts: [{}] }); + }) + ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ -test('[Find Many] Get users with posts + prepared limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + prepared limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1555,10 +1424,10 @@ test('[Find Many] Get users with posts + prepared limit', async (t) => { const prepared = db._query.usersTable.findMany({ with: { posts: { - limit: placeholder('limit'), + limit: sql.placeholder('limit'), }, }, - }).prepare('query1'); + }).prepare(); const usersWithPosts = await prepared.execute({ limit: 1 }); @@ -1603,9 +1472,7 @@ test('[Find Many] Get users with posts + prepared limit', async (t) => { }); }); -test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + prepared limit + offset', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1623,14 +1490,14 @@ test('[Find Many] Get users with posts + prepared limit + offset', async (t) => ]); const prepared = db._query.usersTable.findMany({ - limit: placeholder('uLimit'), - offset: placeholder('uOffset'), + limit: sql.placeholder('uLimit'), + offset: sql.placeholder('uOffset'), with: { posts: { - limit: placeholder('pLimit'), + limit: sql.placeholder('pLimit'), }, }, - }).prepare('query2'); + }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); @@ -1667,9 +1534,7 @@ test('[Find Many] Get users with posts + prepared limit + offset', async (t) => }); }); -test('[Find Many] Get users with posts + prepared where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + prepared where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1684,13 +1549,13 @@ test('[Find Many] Get users with posts + prepared where', async (t) => { ]); const prepared = db._query.usersTable.findMany({ - where: (({ id }, { eq }) => eq(id, placeholder('id'))), + where: (({ id }, { eq }) => eq(id, sql.placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, - }).prepare('query3'); + }).prepare(); const usersWithPosts = await prepared.execute({ id: 1 }); @@ -1719,9 +1584,7 @@ test('[Find Many] Get users with posts + prepared where', async (t) => { }); }); -test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with posts + prepared + limit + offset + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1739,16 +1602,16 @@ test('[Find Many] Get users with posts + prepared + limit + offset + where', asy ]); const prepared = db._query.usersTable.findMany({ - limit: placeholder('uLimit'), - offset: placeholder('uOffset'), - where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), + limit: sql.placeholder('uLimit'), + offset: sql.placeholder('uOffset'), + where: (({ id }, { eq, or }) => or(eq(id, sql.placeholder('id')), eq(id, 3))), with: { posts: { - where: (({ id }, { eq }) => eq(id, placeholder('pid'))), - limit: placeholder('pLimit'), + where: (({ id }, { eq }) => eq(id, sql.placeholder('pid'))), + limit: sql.placeholder('pLimit'), }, }, - }).prepare('query4'); + }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); @@ -1781,9 +1644,7 @@ test('[Find Many] Get users with posts + prepared + limit + offset + where', asy [Find One] One relation users+posts */ -test('[Find One] Get users with posts', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1828,9 +1689,7 @@ test('[Find One] Get users with posts', async (t) => { }); }); -test('[Find One] Get users with posts + limit posts', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1881,9 +1740,7 @@ test('[Find One] Get users with posts + limit posts', async (t) => { }); }); -test('[Find One] Get users with posts no results found', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts no results found', async ({ db }) => { const usersWithPosts = await db._query.usersTable.findFirst({ with: { posts: { @@ -1910,9 +1767,7 @@ test('[Find One] Get users with posts no results found', async (t) => { expect(usersWithPosts).toBeUndefined(); }); -test('[Find One] Get users with posts + limit posts and users', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1963,9 +1818,7 @@ test('[Find One] Get users with posts + limit posts and users', async (t) => { }); }); -test('[Find One] Get users with posts + custom fields', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2009,24 +1862,35 @@ test('[Find One] Get users with posts + custom fields', async (t) => { expect(usersWithPosts!.posts.length).toEqual(3); - expect(usersWithPosts).toEqual({ + expect(usersWithPosts?.lowerName).toEqual('dan'); + expect(usersWithPosts?.id).toEqual(1); + expect(usersWithPosts?.verified).toEqual(false); + expect(usersWithPosts?.invitedBy).toEqual(null); + expect(usersWithPosts?.name).toEqual('Dan'); + + expect(usersWithPosts?.posts).toContainEqual({ id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - lowerName: 'dan', - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.2', - createdAt: usersWithPosts?.posts[1]?.createdAt, - }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt }], + ownerId: 1, + content: 'Post1', + createdAt: usersWithPosts?.posts[0]?.createdAt, }); -}); -test('[Find One] Get users with posts + custom fields + limits', async (t) => { - const { vpgDb: db } = t; + expect(usersWithPosts?.posts).toContainEqual({ + id: 2, + ownerId: 1, + content: 'Post1.2', + createdAt: usersWithPosts?.posts[1]?.createdAt, + }); + expect(usersWithPosts?.posts).toContainEqual({ + id: 3, + ownerId: 1, + content: 'Post1.3', + createdAt: usersWithPosts?.posts[2]?.createdAt, + }); +}); + +test('[Find One] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2082,9 +1946,7 @@ test('[Find One] Get users with posts + custom fields + limits', async (t) => { }); }); -test('[Find One] Get users with posts + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2141,9 +2003,7 @@ test('[Find One] Get users with posts + orderBy', async (t) => { }); }); -test('[Find One] Get users with posts + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2192,9 +2052,7 @@ test('[Find One] Get users with posts + where', async (t) => { }); }); -test('[Find One] Get users with posts + where + partial', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2245,9 +2103,7 @@ test('[Find One] Get users with posts + where + partial', async (t) => { }); }); -test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2298,9 +2154,7 @@ test('[Find One] Get users with posts + where + partial. Did not select posts id }); }); -test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2348,9 +2202,7 @@ test('[Find One] Get users with posts + where + partial(true + false)', async (t }); }); -test('[Find One] Get users with posts + where + partial(false)', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2406,9 +2258,7 @@ test('[Find One] Get users with posts + where + partial(false)', async (t) => { One relation users+users. Self referencing */ -test('Get user with invitee', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2475,9 +2325,7 @@ test('Get user with invitee', async (t) => { }); }); -test('Get user + limit with invitee', async (t) => { - const { vpgDb: db } = t; - +test('Get user + limit with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, @@ -2529,9 +2377,7 @@ test('Get user + limit with invitee', async (t) => { }); }); -test('Get user with invitee and custom fields', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2607,9 +2453,7 @@ test('Get user with invitee and custom fields', async (t) => { }); }); -test('Get user with invitee and custom fields + limits', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2677,9 +2521,7 @@ test('Get user with invitee and custom fields + limits', async (t) => { }); }); -test('Get user with invitee + order by', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + order by', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2745,9 +2587,7 @@ test('Get user with invitee + order by', async (t) => { }); }); -test('Get user with invitee + where', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2797,9 +2637,7 @@ test('Get user with invitee + where', async (t) => { }); }); -test('Get user with invitee + where + partial', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2850,9 +2688,7 @@ test('Get user with invitee + where + partial', async (t) => { }); }); -test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + where + partial. Did not select users id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2899,9 +2735,7 @@ test('Get user with invitee + where + partial. Did not select users id, but use }); }); -test('Get user with invitee + where + partial(true+false)', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + where + partial(true+false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2954,9 +2788,7 @@ test('Get user with invitee + where + partial(true+false)', async (t) => { }); }); -test('Get user with invitee + where + partial(false)', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3013,9 +2845,7 @@ test('Get user with invitee + where + partial(false)', async (t) => { Two first-level relations users+users and users+posts */ -test('Get user with invitee and posts', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3099,9 +2929,7 @@ test('Get user with invitee and posts', async (t) => { }); }); -test('Get user with invitee and posts + limit posts and users', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3182,9 +3010,7 @@ test('Get user with invitee and posts + limit posts and users', async (t) => { }); }); -test('Get user with invitee and posts + limits + custom fields in each', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + limits + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3274,9 +3100,7 @@ test('Get user with invitee and posts + limits + custom fields in each', async ( }); }); -test('Get user with invitee and posts + custom fields in each', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3325,6 +3149,10 @@ test('Get user with invitee and posts + custom fields in each', async (t) => { response.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); @@ -3393,9 +3221,7 @@ test('Get user with invitee and posts + custom fields in each', async (t) => { }); }); -test('Get user with invitee and posts + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3498,9 +3324,7 @@ test('Get user with invitee and posts + orderBy', async (t) => { }); }); -test('Get user with invitee and posts + where', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3568,9 +3392,7 @@ test('Get user with invitee and posts + where', async (t) => { }); }); -test('Get user with invitee and posts + limit posts and users + where', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + limit posts and users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3630,9 +3452,7 @@ test('Get user with invitee and posts + limit posts and users + where', async (t }); }); -test('Get user with invitee and posts + orderBy + where + custom', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + orderBy + where + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3717,9 +3537,7 @@ test('Get user with invitee and posts + orderBy + where + custom', async (t) => }); }); -test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { - const { vpgDb: db } = t; - +test('Get user with invitee and posts + orderBy + where + partial + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3815,9 +3633,7 @@ test('Get user with invitee and posts + orderBy + where + partial + custom', asy One two-level relation users+posts+comments */ -test('Get user with posts and posts with comments', async (t) => { - const { vpgDb: db } = t; - +test('Get user with posts and posts with comments', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3825,9 +3641,9 @@ test('Get user with posts and posts with comments', async (t) => { ]); await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ @@ -3972,9 +3788,7 @@ test('Get user with posts and posts with comments', async (t) => { One three-level relation users+posts+comments+comment_owner */ -test('Get user with posts and posts with comments and comments with owner', async (t) => { - const { vpgDb: db } = t; - +test('Get user with posts and posts with comments and comments with owner', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3982,9 +3796,9 @@ test('Get user with posts and posts with comments and comments with owner', asyn ]); await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ @@ -4116,9 +3930,7 @@ test('Get user with posts and posts with comments and comments with owner', asyn Users+users_to_groups+groups */ -test('[Find Many] Get users with groups', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4220,9 +4032,7 @@ test('[Find Many] Get users with groups', async (t) => { }); }); -test('[Find Many] Get groups with users', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4325,9 +4135,7 @@ test('[Find Many] Get groups with users', async (t) => { }); }); -test('[Find Many] Get users with groups + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4410,9 +4218,7 @@ test('[Find Many] Get users with groups + limit', async (t) => { }); }); -test('[Find Many] Get groups with users + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4495,9 +4301,7 @@ test('[Find Many] Get groups with users + limit', async (t) => { }); }); -test('[Find Many] Get users with groups + limit + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4566,9 +4370,7 @@ test('[Find Many] Get users with groups + limit + where', async (t) => { }); }); -test('[Find Many] Get groups with users + limit + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4638,9 +4440,7 @@ test('[Find Many] Get groups with users + limit + where', async (t) => { }); }); -test('[Find Many] Get users with groups + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4717,9 +4517,7 @@ test('[Find Many] Get users with groups + where', async (t) => { }); }); -test('[Find Many] Get groups with users + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4795,9 +4593,7 @@ test('[Find Many] Get groups with users + where', async (t) => { }); }); -test('[Find Many] Get users with groups + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4899,9 +4695,7 @@ test('[Find Many] Get users with groups + orderBy', async (t) => { }); }); -test('[Find Many] Get groups with users + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5004,9 +4798,7 @@ test('[Find Many] Get groups with users + orderBy', async (t) => { }); }); -test('[Find Many] Get users with groups + orderBy + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find Many] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5095,9 +4887,7 @@ test('[Find Many] Get users with groups + orderBy + limit', async (t) => { Users+users_to_groups+groups */ -test('[Find One] Get users with groups', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5161,9 +4951,7 @@ test('[Find One] Get users with groups', async (t) => { }); }); -test('[Find One] Get groups with users', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5227,9 +5015,7 @@ test('[Find One] Get groups with users', async (t) => { }); }); -test('[Find One] Get users with groups + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5294,9 +5080,7 @@ test('[Find One] Get users with groups + limit', async (t) => { }); }); -test('[Find One] Get groups with users + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5361,9 +5145,7 @@ test('[Find One] Get groups with users + limit', async (t) => { }); }); -test('[Find One] Get users with groups + limit + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5429,9 +5211,7 @@ test('[Find One] Get users with groups + limit + where', async (t) => { }); }); -test('[Find One] Get groups with users + limit + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5498,9 +5278,7 @@ test('[Find One] Get groups with users + limit + where', async (t) => { }); }); -test('[Find One] Get users with groups + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5560,9 +5338,7 @@ test('[Find One] Get users with groups + where', async (t) => { }); }); -test('[Find One] Get groups with users + where', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5628,9 +5404,7 @@ test('[Find One] Get groups with users + where', async (t) => { }); }); -test('[Find One] Get users with groups + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5702,9 +5476,7 @@ test('[Find One] Get users with groups + orderBy', async (t) => { }); }); -test('[Find One] Get groups with users + orderBy', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5770,9 +5542,7 @@ test('[Find One] Get groups with users + orderBy', async (t) => { }); }); -test('[Find One] Get users with groups + orderBy + limit', async (t) => { - const { vpgDb: db } = t; - +test('[Find One] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5839,9 +5609,7 @@ test('[Find One] Get users with groups + orderBy + limit', async (t) => { }); }); -test('Get groups with users + orderBy + limit', async (t) => { - const { vpgDb: db } = t; - +test('Get groups with users + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5926,9 +5694,7 @@ test('Get groups with users + orderBy + limit', async (t) => { }); }); -test('Get users with groups + custom', async (t) => { - const { vpgDb: db } = t; - +test('Get users with groups + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6048,9 +5814,7 @@ test('Get users with groups + custom', async (t) => { }); }); -test('Get groups with users + custom', async (t) => { - const { vpgDb: db } = t; - +test('Get groups with users + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6171,7 +5935,7 @@ test('Get groups with users + custom', async (t) => { }); }); -test('.toSQL()', () => { +test('.toSQL()', ({ db }) => { const query = db._query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); diff --git a/integration-tests/tests/mssql/mssql.schema.ts b/integration-tests/tests/mssql/mssql.schema.ts new file mode 100644 index 0000000000..e1e7fa50d7 --- /dev/null +++ b/integration-tests/tests/mssql/mssql.schema.ts @@ -0,0 +1,103 @@ +import { type AnyMsSqlColumn, bit, datetime, int, mssqlTable, primaryKey, varchar } from 'drizzle-orm/mssql-core'; + +import { sql } from 'drizzle-orm'; +import { relations } from 'drizzle-orm/_relations'; + +export const usersTable = mssqlTable('users', { + id: int('id').primaryKey().notNull(), + name: varchar('name', { length: 100 }).notNull(), + verified: bit('verified').notNull().default(false), + invitedBy: int('invited_by').references((): AnyMsSqlColumn => usersTable.id), +}); +export const usersConfig = relations(usersTable, ({ one, many }) => ({ + invitee: one(usersTable, { + fields: [usersTable.invitedBy], + references: [usersTable.id], + }), + usersToGroups: many(usersToGroupsTable), + posts: many(postsTable), +})); + +export const groupsTable = mssqlTable('groups', { + id: int('id').primaryKey().notNull(), + name: varchar('name', { length: 100 }).notNull(), + description: varchar('description', { length: 100 }), +}); +export const groupsConfig = relations(groupsTable, ({ many }) => ({ + usersToGroups: many(usersToGroupsTable), +})); + +export const usersToGroupsTable = mssqlTable( + 'users_to_groups', + { + id: int('id').primaryKey().identity().notNull(), + userId: int('user_id').notNull().references(() => usersTable.id), + groupId: int('group_id').notNull().references(() => groupsTable.id), + }, + (t) => [ + primaryKey({ name: 'pk_1', columns: [t.userId, t.groupId] }), + ], +); +export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ + group: one(groupsTable, { + fields: [usersToGroupsTable.groupId], + references: [groupsTable.id], + }), + user: one(usersTable, { + fields: [usersToGroupsTable.userId], + references: [usersTable.id], + }), +})); + +export const postsTable = mssqlTable('posts', { + id: int('id').primaryKey().identity().notNull(), + content: varchar('content', { length: 100 }).notNull(), + ownerId: int('owner_id').references(() => usersTable.id), + createdAt: datetime('created_at') + .notNull().default(sql`current_timestamp`), +}); +export const postsConfig = relations(postsTable, ({ one, many }) => ({ + author: one(usersTable, { + fields: [postsTable.ownerId], + references: [usersTable.id], + }), + comments: many(commentsTable), +})); + +export const commentsTable = mssqlTable('comments', { + id: int('id').primaryKey().identity().notNull(), + content: varchar('content', { length: 100 }).notNull(), + creator: int('creator').references(() => usersTable.id), + postId: int('post_id').references(() => postsTable.id), + createdAt: datetime('created_at') + .notNull().default(sql`current_timestamp`), +}); +export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ + post: one(postsTable, { + fields: [commentsTable.postId], + references: [postsTable.id], + }), + author: one(usersTable, { + fields: [commentsTable.creator], + references: [usersTable.id], + }), + likes: many(commentLikesTable), +})); + +export const commentLikesTable = mssqlTable('comment_likes', { + id: int('id').primaryKey().identity().notNull(), + creator: int('creator').references(() => usersTable.id), + commentId: int('comment_id').references(() => commentsTable.id), + createdAt: datetime('created_at') + .notNull().default(sql`current_timestamp`), +}); +export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ + comment: one(commentsTable, { + fields: [commentLikesTable.commentId], + references: [commentsTable.id], + }), + author: one(usersTable, { + fields: [commentLikesTable.creator], + references: [usersTable.id], + }), +})); diff --git a/integration-tests/tests/mssql/mssql.test.ts b/integration-tests/tests/mssql/mssql.test.ts new file mode 100644 index 0000000000..a70f7e91d6 --- /dev/null +++ b/integration-tests/tests/mssql/mssql.test.ts @@ -0,0 +1,3945 @@ +import { + asc, + avg, + avgDistinct, + count, + countDistinct, + desc, + eq, + getTableColumns, + gt, + gte, + inArray, + max, + min, + Name, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import { + alias, + except, + foreignKey, + getTableConfig, + getViewConfig, + int, + intersect, + mssqlTable, + mssqlTableCreator, + mssqlView, + primaryKey, + text, + union, + unionAll, + unique, + varchar, +} from 'drizzle-orm/mssql-core'; +import type { NodeMsSqlDatabase } from 'drizzle-orm/node-mssql'; +import { migrate } from 'drizzle-orm/node-mssql/migrator'; +import { expect } from 'vitest'; +import { type Equal, Expect } from '~/utils'; +import { test } from './instrumentation'; +import { + aggregateTable, + allPossibleColumns, + citiesSchemaTable, + citiesTable, + courseCategoriesTable, + coursesTable, + datesTable, + departments, + employees, + mySchema, + nvarcharWithJsonTable, + orders, + tableWithEnums, + users2SchemaTable, + users2Table, + usersMigratorTable, + usersOnUpdate, + usersSchemaTable, + usersTable, +} from './schema'; + +// const ENABLE_LOGGING = true; + +test.beforeEach(async ({ client }) => { + await client.query(`drop table if exists [userstest]`); + await client.query(`drop table if exists [nvarchar_with_json]`); + await client.query(`drop table if exists [users2]`); + await client.query(`drop table if exists [cities]`); + await client.query(`drop table if exists [mySchema].[userstest]`); + await client.query(`drop table if exists [mySchema].[users2]`); + await client.query(`drop table if exists [mySchema].[cities]`); + await client.query(`drop schema if exists [mySchema]`); + await client.query(`create schema [mySchema]`); + + await client.query(` + create table [userstest] ( + [id] int identity primary key, + [name] varchar(30) not null, + [verified] bit not null default 0, + [jsonb] text, + [created_at] datetime not null default current_timestamp + ) +`); + + await client.query(` + create table [nvarchar_with_json] ( + [id] int identity primary key, + [json] nvarchar(max) + );`); + + await client.query(` + create table [cities] ( + [id] int primary key, + [name] varchar(30) not null + )`); + + await client.query(` + create table [users2] ( + [id] int primary key, + [name] varchar(30) not null, + [city_id] int null foreign key references [cities]([id]) + )`); + + await client.query(` + create table [mySchema].[userstest] ( + [id] int identity primary key, + [name] varchar(100) not null, + [verified] bit not null default 0, + [jsonb] nvarchar(100), + [created_at] datetime2(2) not null default current_timestamp + )`); + + await client.query(` + create table [mySchema].[cities] ( + [id] int identity primary key, + [name] varchar(100) not null + )`); + + await client.query(` + create table [mySchema].[users2] ( + [id] int identity primary key, + [name] varchar(100) not null, + [city_id] int references [mySchema].[cities]([id]) + )`); +}); + +async function setupSetOperationTest(db: NodeMsSqlDatabase) { + await db.execute(sql`drop table if exists [users2]`); + await db.execute(sql`drop table if exists [cities]`); + await db.execute(sql` + create table [cities] ( + [id] int primary key, + [name] varchar(30) not null + ) + `); + + await db.execute(sql` + create table [users2] ( + [id] int primary key, + [name] varchar(30) not null, + [city_id] int foreign key references [cities]([id]) + ) + `); + + await db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); +} + +async function setupAggregateFunctionsTest(db: NodeMsSqlDatabase) { + await db.execute(sql`drop table if exists [aggregate_table]`); + await db.execute( + sql` + create table [aggregate_table] ( + [id] int identity primary key not null, + [name] varchar(30) not null, + [a] int, + [b] int, + [c] int, + [null_only] int + ); + `, + ); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); +} + +test('table config: columns', async () => { + const table = mssqlTable('cities', { + id: int().primaryKey().identity(), + id1: int().primaryKey().identity({ increment: 2, seed: 3 }), + }, (t) => [ + foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + ]); + + const tableConfig = getTableConfig(table); + + // @ts-ignore + // Drizzle ORM gives this value in runtime, but not in types. + // After sync with Andrew, we decided to fix this with Dan later + // That's due to architecture problems we have in columns and complex abstraction we should avoid + // for now we are sure this value is here + // If it's undefined - than users didn't provide any identity + // If it's an object with seed/increment and a) both are undefined - use default identity startegy + // b) some of them have values - use them + // Note: you can't have only one value. Either both are undefined or both are defined + // console.log(tableConfig.identity); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); +}); + +test('table config: foreign keys name', async () => { + const table = mssqlTable('cities', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [ + foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + ]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); +}); + +test('table config: primary keys name', async () => { + const table = mssqlTable('cities', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [ + primaryKey({ columns: [t.id, t.name] }), + ]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe(undefined); +}); + +test('table configs: unique third param', async () => { + const cities1Table = mssqlTable('cities1', { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [ + unique().on(t.name, t.state), + unique('custom_name1').on(t.name, t.state), + ]); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe(undefined); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); +}); + +test('table configs: unique in column', async () => { + const cities1Table = mssqlTable('cities1', { + id: int('id').primaryKey(), + name: text('name').notNull().unique('unique_name'), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe('unique_name'); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); +}); + +test('select all fields', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select distinct', async ({ db }) => { + const usersDistinctTable = mssqlTable('users_distinct', { + id: int('id').notNull(), + name: varchar('name', { length: 30 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name varchar(30))`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); +}); + +test('insert returning sql', async ({ db }) => { + const result = await db.insert(usersTable).values({ name: 'John' }); + + expect(result.rowsAffected[0]).toEqual(1); +}); + +test('delete returning sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users.rowsAffected[0]).toBe(1); +}); + +test('update returning sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users.rowsAffected[0]).toBe(1); +}); + +test('update with returning all fields', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers.rowsAffected[0]).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('update with returning partial', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers.rowsAffected[0]).toEqual(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('delete with returning partial', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('insert + select', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('insert with overridden default values', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('insert many', async ({ db }) => { + await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async ({ db }) => { + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result.rowsAffected[0]).toBe(4); +}); + +test('select with group by as field', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name).orderBy(usersTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by as sql', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`).orderBy(usersTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('$default function', async ({ db }) => { + await db.execute(sql`drop table if exists [orders]`); + await db.execute( + sql` + create table [orders] ( + [id] int primary key, + [region] text not null, + [product] text not null, + [amount] int not null, + [quantity] int not null + ) + `, + ); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); +}); + +test('$default with empty array', async ({ db }) => { + await db.execute(sql`drop table if exists [s_orders]`); + await db.execute( + sql` + create table [s_orders] ( + [id] int identity primary key, + [region] text default ('Ukraine'), + [product] text not null + ) + `, + ); + + const users = mssqlTable('s_orders', { + id: int('id').identity().primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + + await db.insert(users).values({}); + const selectedOrder = await db.select().from(users); + + expect(selectedOrder).toEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); +}); + +test('select with group by as sql + column', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by as column + sql', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('select with group by complex query', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .offset(0).fetch(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async ({ db }) => { + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select [id], [name] from [userstest] group by [userstest].[id], [userstest].[name]`, + params: [], + }); +}); + +test('Query check: Insert all defaults in 1 row', async ({ db }) => { + const users = mssqlTable('users', { + id: int('id').identity().primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into [users] ([name], [state]) values (default, default)', + params: [], + }); +}); + +test('Query check: Insert all defaults in multiple rows', async ({ db }) => { + const users = mssqlTable('users', { + id: int('id').identity().primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into [users] ([name], [state]) values (default, default), (default, default)', + params: [], + }); +}); + +test('Insert all defaults in 1 row', async ({ db }) => { + const users = mssqlTable('empty_insert_single', { + id: int('id').identity().primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id int identity primary key, name text default ('Dan'), state text)`, + ); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); +}); + +test('Insert all defaults in multiple rows', async ({ db }) => { + const users = mssqlTable('empty_insert_multiple', { + id: int('id').identity().primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id int identity primary key, name text default ('Dan'), state text)`, + ); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); +}); + +test('insert sql', async ({ db }) => { + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async ({ db }) => { + const users = mssqlTable('usersForTest', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); + + const customerAlias = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: users.id, + name: users.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('full join with alias', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mssqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async ({ db }) => { + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable).orderBy() + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement reuse', async ({ db }) => { + const stmt = db.insert(usersTable).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('migrator', async ({ db }) => { + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists [drizzle].[__drizzle_migrations]`); + + await migrate(db, { migrationsFolder: './drizzle2/mssql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table [drizzle].[__drizzle_migrations]`); +}); + +test('migrator : --init', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)};`); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)};`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mssql', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT + CASE + WHEN EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = ${getTableConfig(usersMigratorTable).schema ?? 'dbo'} AND TABLE_NAME = ${ + getTableConfig(usersMigratorTable).name + }) + THEN 1 + ELSE 0 + END AS ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res.recordset[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)};`); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)};`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mssql-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT + CASE + WHEN EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = ${getTableConfig(usersMigratorTable).schema ?? 'dbo'} AND TABLE_NAME = ${ + getTableConfig(usersMigratorTable).name + }) + THEN 1 + ELSE 0 + END AS ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res.recordset[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)};`); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)};`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { + migrationsFolder: './drizzle2/mssql-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mssql', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT + CASE + WHEN EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = ${getTableConfig(usersMigratorTable).schema ?? 'dbo'} AND TABLE_NAME = ${ + getTableConfig(usersMigratorTable).name + }) + THEN 1 + ELSE 0 + END AS ${sql.identifier('tableExists')};`); + + console.log(res); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(2); + expect(!!res.recordset[0]?.tableExists).toStrictEqual(true); +}); + +test('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result.recordset).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert via db.execute w/ query builder', async ({ db }) => { + const inserted = await db.execute( + db.insert(usersTable).values({ name: 'John' }), + ); + expect(inserted.rowsAffected[0]).toBe(1); +}); + +test('insert + select all possible dates', async ({ db }) => { + await db.execute(sql`drop table if exists [datestable]`); + await db.execute( + sql` + create table [datestable] ( + [date] date, + [date_as_string] date, + [time] time, + [time_as_string] time, + [datetime] datetime, + [datetime_as_string] datetime, + ) + `, + ); + + const date = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: date, + timeAsString: '12:12:12', + datetime: date, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: new Date('2022-11-11'), + dateAsString: '2022-11-11', + time: new Date('1970-01-01T00:00:00Z'), + datetime: new Date('2022-11-11'), + datetimeAsString: '2022-11-11T12:12:12.000Z', + timeAsString: '12:12:12.000', + }]); + + await db.execute(sql`drop table if exists [datestable]`); +}); + +test('Mssql enum test case #1', async ({ db }) => { + await db.execute(sql`drop table if exists [enums_test_case]`); + + await db.execute(sql` + create table [enums_test_case] ( + [id] int primary key, + [enum1] text not null, + [enum2] text default 'a', + [enum3] text not null default 'b' + ) + `); + + const tableWithEnums = mssqlTable('enums_test_case', { + id: int('id').primaryKey(), + enum1: varchar('enum1', { enum: ['a', 'b', 'c'] }).notNull(), + enum2: varchar('enum2', { enum: ['a', 'b', 'c'] }).default('a'), + enum3: varchar('enum3', { enum: ['a', 'b', 'c'] }).notNull().default('b'), + }); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + await db.execute(sql`drop table [enums_test_case]`); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('left join (flat object fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); +}); + +test('left join (grouped fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); +}); + +test('left join (all fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); +}); + +test('join subquery', async ({ db }) => { + await db.execute(sql`drop table if exists [courses]`); + await db.execute(sql`drop table if exists [course_categories]`); + + await db.execute( + sql` + create table [course_categories] ( + [id] int identity primary key, + [name] varchar(50) not null + ) + `, + ); + + await db.execute( + sql` + create table [courses] ( + [id] int identity primary key, + [name] varchar(50) not null, + [category_id] int references [course_categories]([id]) + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`.as('count'), + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + + await db.execute(sql`drop table if exists [courses]`); + await db.execute(sql`drop table if exists [course_categories]`); +}); + +test('with ... select', async ({ db }) => { + await db.execute(sql`drop table if exists [orders]`); + await db.execute( + sql` + create table [orders] ( + [id] int primary key, + [region] varchar(50) not null, + [product] varchar(50) not null, + [amount] int not null, + [quantity] int not null + ) + `, + ); + + await db.insert(orders).values([ + { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, + { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, + { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, + { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as int)`, + productSales: sql`cast(sum(${orders.amount}) as int)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); +}); + +test('select from subquery sql', async ({ db }) => { + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, ' modified')`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); +}); + +test('select a field without joining its table', ({ db }) => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); +}); + +test('select all fields from subquery without alias', ({ db }) => { + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); +}); + +test('select count()', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); +}); + +test('having', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`len(${name}) >= 3`) + .groupBy(citiesTable.id, citiesTable.name) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); +}); + +test('view', async ({ db }) => { + const newYorkers1 = mssqlView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = mssqlView('new_yorkers', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = mssqlView('new_yorkers', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +test('select from raw sql', async ({ db }) => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); +}); + +test('select from raw sql with joins', async ({ db }) => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from select', async ({ db }) => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from with clause', async ({ db }) => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('prefixed table', async ({ db }) => { + const mssqlTable = mssqlTableCreator((name) => `myprefix_${name}`); + + const users = mssqlTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('orderBy with aliased column', ({ db }) => { + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toEqual('select something as [test] from [users2] order by [test]'); +}); + +test('timestamp timezone', async ({ db }) => { + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ name: 'With default times' }); + await db.insert(usersTable).values({ + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); +}); + +test('transaction', async ({ db }) => { + const users = mssqlTable('users_transactions', { + id: int('id').identity().primaryKey(), + balance: int('balance').notNull(), + }); + const products = mssqlTable('products_transactions', { + id: int('id').identity().primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute( + sql`create table users_transactions (id int identity not null primary key, balance int not null)`, + ); + await db.execute( + sql`create table products_transactions (id int identity not null primary key, price int not null, stock int not null)`, + ); + + await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, 1)).then((rows) => rows[0]!); + await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, 1)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); +}); + +test('transaction rollback', async ({ db }) => { + const users = mssqlTable('users_transactions_rollback', { + id: int('id').identity().primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id int identity not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); +}); + +test('nested transaction', async ({ db }) => { + const users = mssqlTable('users_nested_transactions', { + id: int('id').identity().primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions (id int identity not null primary key, balance int not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('nested transaction rollback', async ({ db }) => { + const users = mssqlTable('users_nested_transactions_rollback', { + id: int('id').identity().primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions_rollback (id int identity not null primary key, balance int not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('join subquery with join', async ({ db }) => { + const internalStaff = mssqlTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = mssqlTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = mssqlTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); +}); + +test('subquery with view', async ({ db }) => { + const users = mssqlTable('users_subquery_view', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mssqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id int identity not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); + +test('join view as subquery', async ({ db }) => { + const users = mssqlTable('users_join_view', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mssqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id int identity not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); + +test('select iterator', async ({ db }) => { + const users = mssqlTable('users_iterator', { + id: int('id').identity().primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int identity not null primary key)`); + + await db.insert(users).values({}); + await db.insert(users).values({}); + await db.insert(users).values({}); + + const iter = db.select().from(users).iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('select iterator w/ prepared statement', async ({ db }) => { + const users = mssqlTable('users_iterator', { + id: int('id').identity({ increment: 1, seed: 1 }).primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int identity not null primary key)`); + + await db.insert(users).values({}); + await db.insert(users).values({}); + await db.insert(users).values({}); + + const prepared = db.select().from(users).prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test('insert undefined', async ({ db }) => { + const users = mssqlTable('usersForTests', { + id: int('id').identity().primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id int identity not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +test('update undefined', async ({ db }) => { + const users = mssqlTable('usersForTests', { + id: int('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id int not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +// test('utc config for datetime', async ({ db }) => { +// +// +// await db.execute(sql`drop table if exists [datestable]`); +// await db.execute( +// sql` +// create table [datestable] ( +// [datetime_utc] datetime, +// [datetime] datetime, +// [datetime_as_string] datetime +// ) +// `, +// ); +// const datesTable = mssqlTable('datestable', { +// datetimeUTC: datetime('datetime_utc', { mode: 'date' }), +// datetime: datetime('datetime'), +// datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), +// }); +// +// const dateObj = new Date('2022-11-11'); +// const dateUtc = new Date('2022-11-11T12:12:12.122Z'); +// +// await db.insert(datesTable).values({ +// datetimeUTC: dateUtc, +// datetime: dateObj, +// datetimeAsString: '2022-11-11 12:12:12', +// }); +// +// const res = await db.select().from(datesTable); +// +// const rawSelect = await db.execute(sql`select [datetime_utc] from [datestable]`); +// const selectedRow = rawSelect.recordset[0]; +// +// expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122'); +// expect(new Date(selectedRow.datetime_utc.replace(' ').toEqual('T') + 'Z'), dateUtc); +// +// t.assert(res[0]?.datetime instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(res[0]?.datetimeUTC instanceof Date); // eslint-disable-line no-instanceof/no-instanceof +// t.assert(typeof res[0]?.datetimeAsString === 'string'); +// +// expect(res).toEqual([{ +// datetimeUTC: dateUtc, +// datetime: new Date('2022-11-11'), +// datetimeAsString: '2022-11-11 12:12:12', +// }]); +// +// await db.execute(sql`drop table if exists [datestable]`); +// }); + +test('set operations (union) from query builder with subquery', async ({ db }) => { + await setupSetOperationTest(db); + const sq = db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).as('sq'); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db.select().from(sq), + ).orderBy(asc(citiesTable.name)).offset(0).fetch(8); + + expect(result).toHaveLength(8); + + expect(result).toEqual([ + { id: 5, name: 'Ben' }, + { id: 3, name: 'Jack' }, + { id: 2, name: 'Jane' }, + { id: 6, name: 'Jill' }, + { id: 1, name: 'John' }, + { id: 2, name: 'London' }, + { id: 7, name: 'Mary' }, + { id: 1, name: 'New York' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (union) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(sql`name`); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (union all) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + ).orderBy(asc(citiesTable.id)).offset(1).fetch(5); + + expect(result).toHaveLength(5); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (union all) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(1); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(1); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(1); + })()).rejects.toThrowError(); +}); + +test('set operations (except) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); +}); + +test('set operations (except) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(3); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(citiesTable.id).offset(0).fetch(3); + })()).rejects.toThrowError(); +}); + +test('set operations (mixed) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (mixed all) as function with subquery', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).orderBy(sq.id).offset(1).fetch(4); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('test $onUpdateFn and $onUpdate works as $default', async ({ db }) => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id int identity not null primary key, + [name] text not null, + update_counter integer default 1 not null, + updated_at datetime, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 250; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } +}); + +test('test $onUpdateFn and $onUpdate works updating', async ({ db }) => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id int identity not null primary key, + [name] text not null, + update_counter integer default 1 not null, + updated_at datetime, + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 250; + + expect(initial[0]?.updatedAt?.valueOf()).not.toEqual(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } +}); + +test('aggregate function: count', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toEqual(7); + expect(result2[0]?.value).toEqual(5); + expect(result3[0]?.value).toEqual(6); +}); + +test('aggregate function: avg', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toEqual('33'); + expect(result2[0]?.value).toEqual(null); + expect(result3[0]?.value).toEqual('42'); +}); + +test('aggregate function: sum', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toEqual('200'); + expect(result2[0]?.value).toEqual(null); + expect(result3[0]?.value).toEqual('170'); +}); + +test('aggregate function: max', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toEqual(90); + expect(result2[0]?.value).toEqual(null); +}); + +test('aggregate function: min', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toEqual(10); + expect(result2[0]?.value).toEqual(null); +}); + +test('mySchema :: select all fields', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersSchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('mySchema :: select sql', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersSchemaTable.name})`, + }).from(usersSchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: select typed sql', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersSchemaTable.name})`, + }).from(usersSchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: select distinct', async ({ db }) => { + const usersDistinctTable = mssqlTable('users_distinct', { + id: int('id').notNull(), + name: varchar('name', { length: 30 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name varchar(30))`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); +}); + +test('mySchema :: insert returning sql', async ({ db }) => { + const result = await db.insert(usersSchemaTable).values({ name: 'John' }); + + expect(result.rowsAffected[0]).toEqual(1); +}); + +test('mySchema :: delete returning sql', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const result = await db.delete(usersSchemaTable).where(eq(usersTable.name, 'John')); + + expect(result.rowsAffected[0]).toBe(1); +}); + +test('mySchema :: update returning sql', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const result = await db.update(usersSchemaTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(result.rowsAffected[0]).toBe(1); +}); + +test('mySchema :: update with returning all fields', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersSchemaTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersSchemaTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers.rowsAffected[0]).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('mySchema :: update with returning partial', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersSchemaTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersSchemaTable.id, name: usersTable.name }).from(usersSchemaTable).where( + eq(usersSchemaTable.id, 1), + ); + + expect(updatedUsers.rowsAffected[0]).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('mySchema :: delete with returning all fields', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersSchemaTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('mySchema :: delete with returning partial', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersSchemaTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser.rowsAffected[0]).toBe(1); +}); + +test('mySchema :: insert + select', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersSchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersSchemaTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersSchemaTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('mySchema :: json insert', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersSchemaTable.id, + name: usersSchemaTable.name, + jsonb: usersSchemaTable.jsonb, + }).from(usersSchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('mySchema :: insert with overridden default values', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersSchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('mySchema :: insert many', async ({ db }) => { + await db.insert(usersSchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersSchemaTable.id, + name: usersSchemaTable.name, + jsonb: usersSchemaTable.jsonb, + verified: usersSchemaTable.verified, + }).from(usersSchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('mySchema :: insert many with returning', async ({ db }) => { + const result = await db.insert(usersSchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result.rowsAffected[0]).toBe(4); +}); + +test('mySchema :: select with group by as field', async ({ db }) => { + await db.insert(usersSchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(usersSchemaTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('mySchema :: select with group by as sql', async ({ db }) => { + await db.insert(usersSchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(sql`${usersSchemaTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('mySchema :: select with group by as sql + column', async ({ db }) => { + await db.insert(usersSchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(sql`${usersSchemaTable.name}`, usersSchemaTable.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('mySchema :: select with group by as column + sql', async ({ db }) => { + await db.insert(usersSchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(usersSchemaTable.id, sql`${usersSchemaTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test('mySchema :: select with group by complex query', async ({ db }) => { + await db.insert(usersSchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(usersSchemaTable.id, sql`${usersSchemaTable.name}`) + .orderBy(asc(usersSchemaTable.name)) + .offset(0) + .fetch(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('mySchema :: build query', async ({ db }) => { + const query = db.select({ id: usersSchemaTable.id, name: usersSchemaTable.name }).from(usersSchemaTable) + .groupBy(usersSchemaTable.id, usersSchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select [id], [name] from [mySchema].[userstest] group by [mySchema].[userstest].[id], [mySchema].[userstest].[name]`, + params: [], + }); +}); + +test('mySchema :: insert sql', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersSchemaTable.id, name: usersSchemaTable.name }).from(usersSchemaTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('mySchema :: partial join with alias', async ({ db }) => { + const customerAlias = alias(usersSchemaTable, 'customer'); + + await db.insert(usersSchemaTable).values([{ name: 'Ivan' }, { name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersSchemaTable.id, + name: usersSchemaTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersSchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 2)) + .where(eq(usersSchemaTable.id, 1)); + + expect(result).toEqual([{ + user: { id: 1, name: 'Ivan' }, + customer: { id: 2, name: 'Hans' }, + }]); +}); + +test('mySchema :: full join with alias', async ({ db }) => { + const mysqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('mySchema :: select from alias', async ({ db }) => { + const mysqlTable = mssqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id int primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('mySchema :: insert with spaces', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersSchemaTable.id, name: usersSchemaTable.name }).from(usersSchemaTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('mySchema :: prepared statement', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const statement = db.select({ + id: usersSchemaTable.id, + name: usersSchemaTable.name, + }).from(usersSchemaTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('mySchema :: prepared statement reuse', async ({ db }) => { + const stmt = db.insert(usersSchemaTable).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: usersSchemaTable.id, + name: usersSchemaTable.name, + verified: usersSchemaTable.verified, + }).from(usersSchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('mySchema :: prepared statement with placeholder in .where', async ({ db }) => { + await db.insert(usersSchemaTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersSchemaTable.id, + name: usersSchemaTable.name, + }).from(usersSchemaTable) + .where(eq(usersSchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('mySchema :: insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute(sql`insert into ${usersSchemaTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersSchemaTable}`); + expect(result.recordset).toEqual([{ id: 1, name: 'John' }]); +}); + +test('mySchema :: insert via db.execute w/ query builder', async ({ db }) => { + const inserted = await db.execute( + db.insert(usersSchemaTable).values({ name: 'John' }), + ); + expect(inserted.rowsAffected[0]).toBe(1); +}); + +test('mySchema :: select from tables with same name from different schema using alias', async ({ db }) => { + await db.execute(sql`drop table if exists [userstest]`); + await db.execute( + sql` + create table [userstest] ( + [id] int identity primary key, + [name] varchar(100) not null, + [verified] bit not null default 0, + [jsonb] nvarchar(100), + [created_at] datetime2(2) not null default current_timestamp + ) + `, + ); + + await db.insert(usersSchemaTable).values({ name: 'Ivan' }); + await db.insert(usersTable).values({ name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersSchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 1)) + .where(eq(usersSchemaTable.id, 1)); + + expect(result).toEqual([{ + userstest: { + id: 1, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]?.userstest.createdAt, + }, + customer: { + id: 1, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]?.customer!.createdAt, + }, + }]); +}); + +test('mySchema :: Mysql enum test case #1', async ({ db }) => { + await db.execute(sql` + create table ${tableWithEnums} ( + [id] int primary key, + [enum1] varchar not null, + [enum2] varchar default 'a', + [enum3] varchar not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + await db.execute(sql`drop table ${tableWithEnums}`); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test('mySchema :: view', async ({ db }) => { + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2SchemaTable).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2SchemaTable} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesSchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2SchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +test('all possible columns', async ({ db }) => { + await db.execute(sql`DROP TABLE IF EXISTS [all_possible_columns]`); + // eslint-disable-next-line unicorn/template-indent + await db.execute(sql` + CREATE TABLE [all_possible_columns] ( + bigintBigint bigint, + bigintString bigint, + bigintNumber bigint, + bigintBigintDefault bigint DEFAULT 123, + bigintStringDefault bigint DEFAULT 123, + bigintNumberDefault bigint DEFAULT 123, + + binary binary, + binaryLength binary(1), + binaryDefault binary DEFAULT 0x01, + + bit bit, + bitDefault bit DEFAULT 0, + + char char, + charWithConfig char(3), + charDefault char DEFAULT '4', + + date date, + dateModeDate date, + dateModeString date, + dateDefault date DEFAULT '2025-04-18T00:00:00.000Z', + dateModeStringDefault date DEFAULT '2025-04-18T00:00:00.000Z', + + dateTime datetime, + dateTimeModeDate datetime, + dateTimeModeString datetime, + dateTimeDefault datetime DEFAULT '2025-04-18T00:00:00.000Z', + dateTimeModeStringDefault datetime DEFAULT '2025-04-18T00:00:00.000Z', + + dateTime2 datetime2, + dateTime2ModeDate datetime2, + dateTime2ModeString datetime2, + dateTime2WithPrecision datetime2(5), + dateTime2Default datetime2 DEFAULT '2025-04-18T00:00:00.000Z', + dateTime2ModeStringDefault datetime2 DEFAULT '2025-04-18T00:00:00.000Z', + dateTime2ModeStringWithPrecisionDefault datetime2(1) DEFAULT '2025-04-18T00:00:00.000Z', + + datetimeOffset datetimeoffset, + datetimeOffsetModeDate datetimeoffset, + datetimeOffsetModeString datetimeoffset, + datetimeOffsetDefault datetimeoffset DEFAULT '2025-04-18 03:00:00.000+3:00', + datetimeOffsetModeStringDefault datetimeoffset DEFAULT '2025-04-18 03:00:00.000+3:00', + datetimeOffsetModeStringWithPrecisionDefault datetimeoffset DEFAULT '2025-04-18 03:00:00.000+3:00', + + decimal decimal, + decimalWithPrecision decimal(3), + decimalWithConfig decimal(10,8), + decimalDefaultString decimal DEFAULT 1.312, + decimalDefaultNumber decimal DEFAULT 1.312, + + float float, + floatWithPrecision float(3), + floatDefault float DEFAULT 32.412, + + int int, + intDefault int DEFAULT 43, + + numeric decimal, + numericWithPrecision numeric(3), + numericWithConfig numeric(10,8), + numericDefault numeric DEFAULT 1.312, + numericDefaultNumber numeric DEFAULT 1.312, + + real real, + realDefault real DEFAULT 5231.4123, + + text text, + textEnum text, + textDefault text DEFAULT 'hello, world', + nText ntext, + nTextEnum ntext, + nTextDefault ntext DEFAULT 'hello, world', + + time time, + timeModeDate time, + timeModeString time, + timeWithPrecision time(3), + timeDefault time DEFAULT '2025-04-18T00:00:00.000Z', + timeModeDateDefault time DEFAULT '2025-04-18T00:00:00.000Z', + timeModeStringDefault time DEFAULT '00:00:00.000', + + smallint smallint, + smallintDefault smallint DEFAULT 331, + + tinyint tinyint, + tinyintDefault tinyint DEFAULT 23, + + varbinary varbinary, + varbinaryWithLength varbinary(100), + varbinaryDefault varbinary DEFAULT 0x01, + + varchar varchar, + varcharWithEnum varchar(3), + varcharWithLength varchar(3), + varcharDefault varchar, + varcharWithEnumDefault varchar DEFAULT '1', + + nchar nchar, + ncharWithEnum nchar(12), + ncharLength nchar(231), + ncharDefault nchar DEFAULT 'h', + + nvarchar nvarchar, + nvarcharWithEnum nvarchar(12), + nvarcharLength nvarchar(231), + nvarcharDefault nvarchar DEFAULT 'h', + nvarcharJson nvarchar(max) +);`); + + const currentDate = new Date('2025-04-18T00:00:00.000Z'); + // insert + await db.insert(allPossibleColumns).values({ + bigintBigint: BigInt(100), + bigintString: '100', + bigintNumber: 100, + bigintBigintDefault: undefined, + bigintStringDefault: undefined, + bigintNumberDefault: undefined, + + binary: Buffer.from('1'), + binaryLength: Buffer.from([0x01]), + binaryDefault: undefined, + + bit: true, + bitDefault: undefined, + + char: 'a', + charWithConfig: '342', + charDefault: undefined, + + date: currentDate, + dateModeDate: currentDate, + dateModeString: currentDate.toISOString(), + dateDefault: undefined, + dateModeStringDefault: undefined, + dateTime: currentDate, + dateTimeModeDate: currentDate, + dateTimeModeString: currentDate.toISOString(), + dateTimeDefault: undefined, + dateTimeModeStringDefault: undefined, + dateTime2: currentDate, + dateTime2ModeDate: currentDate, + dateTime2ModeString: currentDate.toISOString(), + dateTime2WithPrecision: currentDate, + dateTime2Default: undefined, + dateTime2ModeStringDefault: undefined, + dateTime2ModeStringWithPrecisionDefault: undefined, + datetimeOffset: currentDate, + datetimeOffsetModeDate: currentDate, + datetimeOffsetModeString: currentDate.toISOString(), + datetimeOffsetDefault: undefined, + datetimeOffsetModeStringDefault: undefined, + datetimeOffsetModeStringWithPrecisionDefault: undefined, + + decimal: '1.33', + decimalWithPrecision: '4.11', + decimalWithConfig: '41.34234526', + decimalDefaultString: undefined, + decimalDefaultNumber: undefined, + + float: 5234.132, + floatWithPrecision: 1.23, + floatDefault: undefined, + + int: 140, + intDefault: undefined, + + numeric: '33.2', + numericWithPrecision: '33.4', + numericWithConfig: '41.34512', + numericDefault: undefined, + numericDefaultNumber: undefined, + + real: 421.4, + realDefault: undefined, + + text: 'hello', + textEnum: 'this', + textDefault: undefined, + nText: 'hello', + nTextEnum: 'this', + nTextDefault: undefined, + + time: currentDate, + timeModeDate: currentDate, + timeModeString: '00:00:00.000', + timeWithPrecision: currentDate, + timeDefault: undefined, + timeModeDateDefault: undefined, + timeModeStringDefault: undefined, + + smallint: 1312, + smallintDefault: undefined, + + tinyint: 31, + tinyintDefault: undefined, + + varbinary: Buffer.from('1'), + varbinaryWithLength: Buffer.from([0x01]), + varbinaryDefault: undefined, + + varchar: 'v', + varcharWithEnum: '123', + varcharWithLength: '301', + varcharDefault: undefined, + varcharWithEnumDefault: undefined, + nvarcharJson: { hello: 'world' }, + nchar: 'n', + ncharWithEnum: 'hello, world', + ncharLength: 'some value', + ncharDefault: undefined, + + nvarchar: 'n', + nvarcharWithEnum: 'hello, world', + nvarcharLength: 'some value', + nvarcharDefault: undefined, + }); + + const res = await db.select().from(allPossibleColumns); + + expect(res.length).toBe(1); + expect(Buffer.isBuffer(res[0]?.binary)).toBe(true); + expect(Buffer.isBuffer(res[0]?.binaryLength)).toBe(true); + expect(Buffer.isBuffer(res[0]?.binaryDefault)).toBe(true); + expect(Buffer.isBuffer(res[0]?.varbinary)).toBe(true); + expect(Buffer.isBuffer(res[0]?.varbinaryWithLength)).toBe(true); + expect(Buffer.isBuffer(res[0]?.varbinaryDefault)).toBe(true); + + expect( + res.map((it) => ({ + ...it, + binary: it.binary ? it.binary.toString() : null, + binaryLength: it.binaryLength ? it.binaryLength.toString('hex') : null, + binaryDefault: it.binaryDefault ? it.binaryDefault.toString('hex') : null, + varbinary: it.varbinary ? it.varbinary.toString() : null, + varbinaryDefault: it.varbinaryDefault ? it.varbinaryDefault.toString('hex') : null, + varbinaryWithLength: it.varbinaryWithLength ? it.varbinaryWithLength.toString('hex') : null, + })), + ).toStrictEqual([ + { + bigintBigint: 100n, + bigintString: '100', + bigintNumber: 100, + bigintBigintDefault: 123n, + bigintStringDefault: '123', + bigintNumberDefault: 123, + + binary: '1', + binaryLength: '01', + binaryDefault: '01', + + bit: true, + bitDefault: false, + char: 'a', + charWithConfig: '342', + charDefault: '4', + date: currentDate, + dateModeDate: currentDate, + dateModeString: `${currentDate.getFullYear()}-${ + (currentDate.getMonth() + 1).toString().padStart(2, '0') + }-${currentDate.getDate()}`, + dateDefault: currentDate, + dateModeStringDefault: `${currentDate.getFullYear()}-${ + (currentDate.getMonth() + 1).toString().padStart(2, '0') + }-${currentDate.getDate()}`, + dateTime: currentDate, + dateTimeModeDate: currentDate, + dateTimeModeString: currentDate.toISOString(), + dateTimeDefault: currentDate, + dateTimeModeStringDefault: currentDate.toISOString(), + dateTime2: currentDate, + dateTime2ModeDate: currentDate, + dateTime2ModeString: currentDate.toISOString(), + dateTime2WithPrecision: currentDate, + dateTime2Default: currentDate, + dateTime2ModeStringDefault: currentDate.toISOString(), + dateTime2ModeStringWithPrecisionDefault: currentDate.toISOString(), + datetimeOffset: currentDate, + datetimeOffsetModeDate: currentDate, + datetimeOffsetModeString: currentDate.toISOString(), + datetimeOffsetDefault: currentDate, + datetimeOffsetModeStringDefault: currentDate.toISOString(), + datetimeOffsetModeStringWithPrecisionDefault: currentDate.toISOString(), + decimal: '1', + decimalWithPrecision: '4', + decimalWithConfig: '41.34234526', + decimalDefaultNumber: 1, + decimalDefaultString: '1', + float: 5234.132, + floatWithPrecision: 1.2300000190734863, + floatDefault: 32.412, + int: 140, + intDefault: 43, + numeric: '33', + numericWithPrecision: '33', + numericWithConfig: '41.34512', + numericDefault: '1', + numericDefaultNumber: 1, + real: 421.3999938964844, + realDefault: 5231.412109375, + text: 'hello', + textEnum: 'this', + textDefault: 'hello, world', + nText: 'hello', + nTextEnum: 'this', + nTextDefault: 'hello, world', + time: new Date(`1970-01-01T00:00:00.000Z`), // mssql returns date, and sets only hours:mm:ss for 1970 year + timeModeDate: new Date(`1970-01-01T00:00:00.000Z`), + timeModeString: `00:00:00.000`, + timeWithPrecision: new Date(`1970-01-01T00:00:00.000Z`), + timeDefault: new Date(`1970-01-01T00:00:00.000Z`), + timeModeDateDefault: new Date(`1970-01-01T00:00:00.000Z`), + timeModeStringDefault: '00:00:00.000', + smallint: 1312, + smallintDefault: 331, + tinyint: 31, + tinyintDefault: 23, + + varbinary: '1', + varbinaryWithLength: '01', + varbinaryDefault: '01', + + varchar: 'v', + varcharWithEnum: '123', + varcharWithLength: '301', + varcharDefault: null, + varcharWithEnumDefault: '1', + nchar: 'n', + ncharWithEnum: 'hello, world', + ncharLength: + 'some value ', + ncharDefault: 'h', + nvarchar: 'n', + nvarcharWithEnum: 'hello, world', + nvarcharLength: 'some value', + nvarcharDefault: 'h', + nvarcharJson: { hello: 'world' }, + }, + ]); +}); + +test('inner join', async ({ db }) => { + await db.execute(sql`DROP TABLE IF EXISTS ${employees};`); + await db.execute(sql`DROP TABLE IF EXISTS ${departments};`); + + await db.execute(sql` + CREATE TABLE employees ( + employeeID INT PRIMARY KEY IDENTITY(1,1), + name NVARCHAR(100), + departmentID INT + ); + `); + await db.execute(sql` + CREATE TABLE departments ( + departmentId INT PRIMARY KEY IDENTITY(1,1), + departmentName NVARCHAR(100) + ); + `); + + await db.insert(departments).values({ departmentName: 'Drizzle1' }); + await db.insert(departments).values({ departmentName: 'Drizzle2' }); + await db.insert(departments).values({ departmentName: 'Drizzle3' }); + await db.insert(departments).values({ departmentName: 'Drizzle4' }); + await db.insert(employees).values({ departmentId: 1, name: 'Andrew1' }); + await db.insert(employees).values({ departmentId: 2, name: 'Andrew2' }); + await db.insert(employees).values({ departmentId: 5, name: 'Andrew3' }); + + const res = await db.select({ employeeName: employees.name, department: departments.departmentName }).from( + employees, + ).innerJoin(departments, eq(departments.departmentId, employees.departmentId)); + + expect(res).toStrictEqual([{ employeeName: 'Andrew1', department: 'Drizzle1' }, { + employeeName: 'Andrew2', + department: 'Drizzle2', + }]); +}); + +test('right join', async ({ db }) => { + await db.execute(sql`DROP TABLE IF EXISTS ${employees};`); + await db.execute(sql`DROP TABLE IF EXISTS ${departments};`); + + await db.execute(sql` + CREATE TABLE employees ( + employeeID INT PRIMARY KEY IDENTITY(1,1), + name NVARCHAR(100), + departmentID INT + ); + `); + await db.execute(sql` + CREATE TABLE departments ( + departmentId INT PRIMARY KEY IDENTITY(1,1), + departmentName NVARCHAR(100) + ); + `); + + await db.insert(departments).values({ departmentName: 'Drizzle1' }); + await db.insert(departments).values({ departmentName: 'Drizzle2' }); + await db.insert(departments).values({ departmentName: 'Drizzle3' }); + await db.insert(departments).values({ departmentName: 'Drizzle4' }); + await db.insert(employees).values({ departmentId: 1, name: 'Andrew1' }); + await db.insert(employees).values({ departmentId: 2, name: 'Andrew2' }); + await db.insert(employees).values({ departmentId: 5, name: 'Andrew3' }); + + const res = await db.select({ employeeName: employees.name, department: departments.departmentName }).from( + employees, + ).rightJoin(departments, eq(departments.departmentId, employees.departmentId)); + + expect(res).toStrictEqual([{ employeeName: 'Andrew1', department: 'Drizzle1' }, { + employeeName: 'Andrew2', + department: 'Drizzle2', + }, { + employeeName: null, + department: 'Drizzle3', + }, { + employeeName: null, + department: 'Drizzle4', + }]); +}); + +test('full join', async ({ db }) => { + await db.execute(sql`DROP TABLE IF EXISTS ${employees};`); + await db.execute(sql`DROP TABLE IF EXISTS ${departments};`); + + await db.execute(sql` + CREATE TABLE employees ( + employeeID INT PRIMARY KEY IDENTITY(1,1), + name NVARCHAR(100), + departmentID INT + ); + `); + await db.execute(sql` + CREATE TABLE departments ( + departmentId INT PRIMARY KEY IDENTITY(1,1), + departmentName NVARCHAR(100) + ); + `); + + await db.insert(departments).values({ departmentName: 'Drizzle1' }); + await db.insert(departments).values({ departmentName: 'Drizzle2' }); + await db.insert(departments).values({ departmentName: 'Drizzle3' }); + await db.insert(departments).values({ departmentName: 'Drizzle4' }); + await db.insert(employees).values({ departmentId: 1, name: 'Andrew1' }); + await db.insert(employees).values({ departmentId: 2, name: 'Andrew2' }); + await db.insert(employees).values({ departmentId: 5, name: 'Andrew3' }); + + const res = await db.select({ employeeName: employees.name, department: departments.departmentName }).from( + employees, + ).fullJoin(departments, eq(departments.departmentId, employees.departmentId)); + + expect(res).toStrictEqual([ + { employeeName: 'Andrew1', department: 'Drizzle1' }, + { employeeName: 'Andrew2', department: 'Drizzle2' }, + { employeeName: 'Andrew3', department: null }, + { employeeName: null, department: 'Drizzle3' }, + { employeeName: null, department: 'Drizzle4' }, + ]); +}); + +test('select top', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().top(4).from(citiesTable); + + expect(query.toSQL()).toStrictEqual({ + sql: `select top(@par0) [id], [name] from [cities]`, + params: [4], + }); + + const res = await query; + + expect(res.length).toBe(4); + expect(res).toStrictEqual( + [ + { id: 1, name: 'city1' }, + { id: 2, name: 'city2' }, + { id: 3, name: 'city3' }, + { id: 4, name: 'city4' }, + ], + ); +}); + +test('select top prepared query', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().top(sql.placeholder('top')).from(citiesTable); + + const res = await query.execute({ top: 4 }); + + expect(res.length).toBe(4); + expect(res).toStrictEqual( + [ + { id: 1, name: 'city1' }, + { id: 2, name: 'city2' }, + { id: 3, name: 'city3' }, + { id: 4, name: 'city4' }, + ], + ); +}); + +test('select offset', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().from(citiesTable).orderBy(desc(citiesTable.id)).offset(9); + + expect(query.toSQL()).toStrictEqual({ + sql: `select [id], [name] from [cities] order by [cities].[id] desc offset @par0 rows`, + params: [9], + }); + + const res = await query; + + expect(res.length).toBe(1); + expect(res).toStrictEqual( + [ + { id: 1, name: 'city1' }, + ], + ); +}); + +test('select offset prepared query', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().from(citiesTable).orderBy(desc(citiesTable.id)).offset(sql.placeholder('offset')); + + const res = await query.execute({ offset: 9 }); + + expect(res.length).toBe(1); + expect(res).toStrictEqual( + [ + { id: 1, name: 'city1' }, + ], + ); +}); + +test('select offset and fetch', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().from(citiesTable).orderBy(desc(citiesTable.id)).offset(5).fetch(2); + + expect(query.toSQL()).toStrictEqual({ + sql: `select [id], [name] from [cities] order by [cities].[id] desc offset @par0 rows fetch next @par1 rows only`, + params: [5, 2], + }); + + const res = await query; + + expect(res.length).toBe(2); + expect(res).toStrictEqual( + [ + { id: 5, name: 'city5' }, + { id: 4, name: 'city4' }, + ], + ); +}); + +test('select offset and fetch prepared query', async ({ db }) => { + await db.insert(citiesTable).values({ id: 1, name: 'city1' }); + await db.insert(citiesTable).values({ id: 2, name: 'city2' }); + await db.insert(citiesTable).values({ id: 3, name: 'city3' }); + await db.insert(citiesTable).values({ id: 4, name: 'city4' }); + await db.insert(citiesTable).values({ id: 5, name: 'city5' }); + await db.insert(citiesTable).values({ id: 6, name: 'city6' }); + await db.insert(citiesTable).values({ id: 7, name: 'city7' }); + await db.insert(citiesTable).values({ id: 8, name: 'city8' }); + await db.insert(citiesTable).values({ id: 9, name: 'city9' }); + await db.insert(citiesTable).values({ id: 10, name: 'city10' }); + + const query = db.select().from(citiesTable).orderBy(desc(citiesTable.id)).offset(sql.placeholder('offset')).fetch( + sql.placeholder('fetch'), + ); + + const res = await query.execute({ offset: 5, fetch: 2 }); + + expect(res.length).toBe(2); + expect(res).toStrictEqual( + [ + { id: 5, name: 'city5' }, + { id: 4, name: 'city4' }, + ], + ); +}); + +test('insert with output', async ({ db }) => { + const fullOutput = await db.insert(citiesTable).output().values({ id: 1, name: 'city1' }); + const partialOutput = await db.insert(citiesTable).output({ + name: sql`${citiesTable.name} + 'hey'`, + id: citiesTable.id, + }) + .values({ + id: 2, + name: 'city1', + }); + + expect(fullOutput).toStrictEqual( + [ + { id: 1, name: 'city1' }, + ], + ); + + expect(partialOutput).toStrictEqual( + [ + { id: 2, name: 'city1hey' }, + ], + ); +}); + +test('delete with output', async ({ db }) => { + await db.insert(citiesTable).output().values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + const partialDeleteOutput = await db.delete(citiesTable).output({ + name: sql`${citiesTable.name} + 'hey'`, + id: citiesTable.id, + }).where(eq(citiesTable.id, 3)); + + expect(partialDeleteOutput).toStrictEqual( + [ + { id: 3, name: 'city3hey' }, + ], + ); + + const fullDeleteOutput = await db.delete(citiesTable).output(); + + expect(fullDeleteOutput).toStrictEqual( + [ + { id: 1, name: 'city1' }, + { id: 2, name: 'city2' }, + ], + ); +}); + +test('update with output', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + + const updateOutput = await db.update(citiesTable).set({ + name: sql`${citiesTable.name} + 'hey'`, + }).output().where(eq(citiesTable.id, 3)); + + expect(updateOutput).toStrictEqual( + [ + { id: 3, name: 'city3hey' }, + ], + ); +}); + +test('update with output inserted true', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + + const updateOutput = await db.update(citiesTable).set({ + name: sql`${citiesTable.name} + 'hey'`, + }).output({ inserted: true }).where(eq(citiesTable.id, 3)); + + expect(updateOutput).toStrictEqual( + [ + { inserted: { id: 3, name: 'city3hey' } }, + ], + ); +}); + +test('update with output deleted true', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + + const updateOutput = await db.update(citiesTable).set({ + name: sql`${citiesTable.name} + 'hey'`, + }).output({ deleted: true }).where(eq(citiesTable.id, 3)); + + expect(updateOutput).toStrictEqual( + [ + { deleted: { id: 3, name: 'city3' } }, + ], + ); +}); + +test('update with output with both true', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + + const updateOutput = await db.update(citiesTable).set({ + name: sql`${citiesTable.name} + 'hey'`, + }).output({ deleted: true, inserted: true }).where(eq(citiesTable.id, 3)); + + expect(updateOutput).toStrictEqual( + [ + { deleted: { id: 3, name: 'city3' }, inserted: { id: 3, name: 'city3hey' } }, + ], + ); +}); + +test('update with output with partial select', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'city1' }, { id: 2, name: 'city2' }, { + id: 3, + name: 'city3', + }]); + + const updateOutput = await db.update(citiesTable).set({ + name: sql`${citiesTable.name} + 'hey'`, + }).output({ deleted: { id: citiesTable.id }, inserted: { name: citiesTable.name } }).where(eq(citiesTable.id, 3)); + + expect(updateOutput).toStrictEqual( + [ + { deleted: { id: 3 }, inserted: { name: 'city3hey' } }, + ], + ); +}); + +test('nvarchar with json mode', async ({ db }) => { + await db.insert(nvarcharWithJsonTable).values([{ json: { hello: 'world' } }]); + + const res = await db.select().from(nvarcharWithJsonTable); + + expect(res).toStrictEqual( + [ + { id: 1, json: { hello: 'world' } }, + ], + ); +}); + +test('column.as', async ({ db }) => { + const users = mssqlTable('users_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => cities.id), + }); + + const cities = mssqlTable('cities_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + const ucView = mssqlView('cities_users_column_as_view').as((qb) => + qb.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)) + ); + + await db.execute(sql`CREATE TABLE ${cities} ( + [id] INT PRIMARY KEY, + [name] TEXT NOT NULL + );`); + + await db.execute(sql`CREATE TABLE ${users} ( + [id] INT PRIMARY KEY, + [name] TEXT NOT NULL, + [city_id] INT REFERENCES ${cities}([id]) + );`); + + await db.execute( + sql`CREATE VIEW ${ucView} AS SELECT ${users.id} as [user_id], ${cities.id} as [city_id], ${users.name} as [user_name], ${cities.name} as [city_name] FROM ${users} LEFT JOIN ${cities} ON ${ + eq(cities.id, users.cityId) + };`, + ); + + const citiesInsRet = await db.insert(cities).output({ + cityId: cities.id.as('city_id'), + cityName: cities.name.as('city_name'), + }).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]); + + expect(citiesInsRet).toStrictEqual(expect.arrayContaining([{ + cityId: 1, + cityName: 'Firstistan', + }, { + cityId: 2, + cityName: 'Secondaria', + }])); + + const usersInsRet = await db.insert(users).output({ + userId: users.id.as('user_id'), + userName: users.name.as('users_name'), + userCityId: users.cityId, + }).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]); + + expect(usersInsRet).toStrictEqual(expect.arrayContaining([{ userId: 1, userName: 'First', userCityId: 1 }, { + userId: 2, + userName: 'Second', + userCityId: 2, + }, { + userId: 3, + userName: 'Third', + userCityId: null, + }])); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewSelectReturn = await db.select().from(ucView); + + expect(viewSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewJoinReturn = await db.select({ + userId: ucView.userId.as('user_id_ucv'), + cityId: cities.id.as('city_id'), + userName: ucView.userName.as('user_name_ucv'), + cityName: cities.name.as('city_name'), + }).from(ucView).leftJoin(cities, eq(cities.id, ucView.cityId)); + + expect(viewJoinReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); +}); diff --git a/integration-tests/tests/mssql/replicas.test.ts b/integration-tests/tests/mssql/replicas.test.ts new file mode 100644 index 0000000000..09e81c8800 --- /dev/null +++ b/integration-tests/tests/mssql/replicas.test.ts @@ -0,0 +1,827 @@ +import { sql } from 'drizzle-orm'; +import { bit, datetime2, int, mssqlTable, text, varchar, withReplicas } from 'drizzle-orm/mssql-core'; +import { drizzle } from 'drizzle-orm/node-mssql'; +import { describe, expect, it, vi } from 'vitest'; + +const usersTable = mssqlTable('users', { + id: int('id' as string).primaryKey(), + name: text('name').notNull(), + verified: bit('verified').notNull().default(false), + jsonb: varchar('jsonb').$type(), + createdAt: datetime2('created_at').notNull().defaultGetDate(), +}); + +const users = mssqlTable('users', { + id: int('id' as string).primaryKey(), +}); + +describe('[select] read replicas postgres', () => { + it('primary select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query = db.$primary.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(query.toSQL().sql).toEqual('select [id] from [users]'); + + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(query1.toSQL().sql).toEqual('select count(*) as [count] from [users]'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select [id] from [users]'); + }); + + it('single read replica select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select [id] from [users]'); + + const query2 = db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select [id] from [users]'); + }); + + it('single read replica select + primary select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + + const query1 = db.select({ id: users.id }).from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select [id] from [users]'); + + const query2 = db.$primary.select().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select [id] from [users]'); + }); + + it('always first read select', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'select'); + const spyRead1 = vi.spyOn(read1, 'select'); + const spyRead2 = vi.spyOn(read2, 'select'); + + const query1 = db.select().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select [id] from [users]'); + + const query2 = db.select().from(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select [id] from [users]'); + }); +}); + +describe('[selectDistinct] read replicas postgres', () => { + it('primary selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query = db.$primary.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query.toSQL().sql).toEqual('select distinct [id] from [users]'); + }); + + it('random replica selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct [id] from [users]'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct [id] from [users]'); + }); + + it('single read replica selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct [id] from [users]'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(query2.toSQL().sql).toEqual('select distinct [id] from [users]'); + }); + + it('single read replica selectDistinct + primary selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query1.toSQL().sql).toEqual('select distinct [id] from [users]'); + + const query2 = db.$primary.selectDistinct().from(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual('select distinct [id] from [users]'); + }); + + it('always first read selectDistinct', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); + const spyRead1 = vi.spyOn(read1, 'selectDistinct'); + const spyRead2 = vi.spyOn(read2, 'selectDistinct'); + + const query1 = db.selectDistinct().from(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('select distinct [id] from [users]'); + + const query2 = db.selectDistinct().from(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('select distinct [id] from [users]'); + }); +}); + +describe('[with] read replicas postgres', () => { + it('primary with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + const obj4 = {} as any; + + db.$primary.with(obj1, obj2, obj3, obj4); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); + }); + + it('random replica with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + }); + + it('single read replica with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.with(); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica with + primary with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + + db.with(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.with(); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read with', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, 'with'); + const spyRead1 = vi.spyOn(read1, 'with'); + const spyRead2 = vi.spyOn(read2, 'with'); + const obj1 = {} as any; + const obj2 = {} as any; + const obj3 = {} as any; + + db.with(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj1); + + db.with(obj2, obj3); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); + }); +}); + +describe('[update] replicas postgres', () => { + it('primary update', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'update'); + const spyRead1 = vi.spyOn(read1, 'update'); + const spyRead2 = vi.spyOn(read2, 'update'); + + const query1 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual('update [users] set [id] = @par0'); + + const query2 = db.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query2.toSQL().sql).toEqual('update [users] set [id] = @par0'); + + const query3 = db.$primary.update(users).set({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query3.toSQL().sql).toEqual('update [users] set [id] = @par0'); + }); +}); + +describe('[delete] replicas postgres', () => { + it('primary delete', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'delete'); + const spyRead1 = vi.spyOn(read1, 'delete'); + const spyRead2 = vi.spyOn(read2, 'delete'); + + const query1 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query1.toSQL().sql).toEqual('delete from [users]'); + + const query2 = db.delete(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + expect(query2.toSQL().sql).toEqual('delete from [users]'); + + db.$primary.delete({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[insert] replicas postgres', () => { + it('primary insert', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'insert'); + const spyRead1 = vi.spyOn(read1, 'insert'); + const spyRead2 = vi.spyOn(read2, 'insert'); + + const query = db.insert(users).values({ id: 1 }); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(users); + expect(query.toSQL().sql).toEqual('insert into [users] ([id]) values (@par0)'); + + db.insert(users); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, users); + + db.$primary.insert({} as any); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[execute] replicas postgres', () => { + it('primary execute', async () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'execute'); + const spyRead1 = vi.spyOn(read1, 'execute'); + const spyRead2 = vi.spyOn(read2, 'execute'); + + expect(db.execute(sql``)).rejects.toThrow(); + + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + + expect(db.execute(sql``)).rejects.toThrow(); + // try { + // db.execute(sql``); + // } catch { /* empty */ } + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[transaction] replicas postgres', () => { + it('primary transaction', async () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, 'transaction'); + const spyRead1 = vi.spyOn(read1, 'transaction'); + const spyRead2 = vi.spyOn(read2, 'transaction'); + const txFn1 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn1)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(txFn1); + + const txFn2 = async (tx: any) => { + tx.select().from({} as any); + }; + + expect(db.transaction(txFn2)).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); + + expect(db.transaction(async (tx) => { + tx.select().from({} as any); + })).rejects.toThrow(); + + expect(spyPrimary).toHaveBeenCalledTimes(3); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[findFirst] read replicas postgres', () => { + it('primary findFirst', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findFirst'); + const obj = {} as any; + + db.$primary._query.usersTable.findFirst(obj); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj); + }); + + it('random replica findFirst', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findFirst'); + const par1 = {} as any; + + db._query.usersTable.findFirst(par1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(par1); + + const query = db._query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query.toSQL().sql).toEqual( + 'select top(@par0) [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + }); + + it('single read replica findFirst', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findFirst'); + + db._query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db._query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica findFirst + primary findFirst', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findFirst'); + + db._query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary._query.usersTable.findFirst(); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read findFirst', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findFirst'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findFirst'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findFirst'); + + db._query.usersTable.findFirst(); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db._query.usersTable.findFirst(); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); + +describe('[findMany] read replicas postgres', () => { + it('primary findMany', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findMany'); + const obj = {} as any; + + const query = db.$primary._query.usersTable.findMany(obj); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyPrimary).toHaveBeenCalledWith(obj); + expect(query.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + }); + + it('random replica findMany', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db._query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(query1.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + expect(spyRead1).toHaveBeenCalledWith(obj1); + + const query2 = db._query.usersTable.findMany(obj2); + + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + expect(query2.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + expect(spyRead2).toHaveBeenCalledWith(obj2); + }); + + it('single read replica findMany', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db._query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + + const query2 = db._query.usersTable.findMany(obj2); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); + expect(query2.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + }); + + it('single read replica findMany + primary findMany', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db._query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + + const query2 = db.$primary._query.usersTable.findMany(obj2); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); + expect(query2.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + }); + + it('always first read findMany', () => { + const primaryDb = drizzle.mock({ schema: { usersTable } }); + const read1 = drizzle.mock({ schema: { usersTable } }); + const read2 = drizzle.mock({ schema: { usersTable } }); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb['_query']['usersTable'], 'findMany'); + const spyRead1 = vi.spyOn(read1['_query']['usersTable'], 'findMany'); + const spyRead2 = vi.spyOn(read2['_query']['usersTable'], 'findMany'); + const obj1 = {} as any; + const obj2 = {} as any; + + const query1 = db._query.usersTable.findMany(obj1); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledWith(obj1); + expect(query1.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + + const query2 = db._query.usersTable.findMany(obj2); + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); + expect(query2.toSQL().sql).toEqual( + 'select [id], [name], [verified], [jsonb], [created_at] from [users] [usersTable]', + ); + }); +}); diff --git a/integration-tests/tests/mssql/schema.ts b/integration-tests/tests/mssql/schema.ts new file mode 100644 index 0000000000..b41a337395 --- /dev/null +++ b/integration-tests/tests/mssql/schema.ts @@ -0,0 +1,263 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + binary, + bit, + char, + date, + datetime, + datetime2, + datetimeoffset, + decimal, + float, + int, + mssqlSchema, + mssqlTable, + nchar, + ntext, + numeric, + nvarchar, + real, + smallint, + text, + time, + tinyint, + uniqueIndex, + varbinary, + varchar, +} from 'drizzle-orm/mssql-core'; + +export const usersTable = mssqlTable('userstest', { + id: int('id').identity().primaryKey(), + name: varchar('name', { mode: 'text' }).notNull(), + verified: bit('verified').notNull().default(false), + jsonb: nvarchar('jsonb', { length: 300, mode: 'json' }).$type(), + createdAt: datetime('created_at').notNull().default(sql`CURRENT_TIMESTAMP`), +}); + +export const nvarcharWithJsonTable = mssqlTable('nvarchar_with_json', { + id: int('id').identity().primaryKey(), + json: nvarchar({ mode: 'json', length: 'max' }), +}); + +export const users2Table = mssqlTable('users2', { + id: int('id').primaryKey(), + name: varchar('name', { length: 30 }).notNull(), + cityId: int('city_id').default(sql`null`).references(() => citiesTable.id), +}); + +export const citiesTable = mssqlTable('cities', { + id: int().primaryKey(), + name: varchar({ length: 30 }).notNull(), +}); + +export const usersOnUpdate = mssqlTable('users_on_update', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date' }).$onUpdate(() => new Date()), + // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper([name])`), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value +}); + +export const datesTable = mssqlTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { precision: 1 }), + timeAsString: time('time_as_string', { mode: 'string', precision: 1 }), + datetime: datetime('datetime'), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), +}); + +export const coursesTable = mssqlTable('courses', { + id: int().identity().primaryKey(), + name: text().notNull(), + categoryId: int('category_id').references(() => courseCategoriesTable.id), +}); + +export const courseCategoriesTable = mssqlTable('course_categories', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), +}); + +export const orders = mssqlTable('orders', { + id: int('id').primaryKey(), + region: varchar('region', { length: 50 }).notNull(), + product: varchar('product', { length: 50 }).notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +export const usersMigratorTable = mssqlTable('users12', { + id: int('id').identity().primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => [ + uniqueIndex('').on(table.name), +]); + +// To test aggregate functions +export const aggregateTable = mssqlTable('aggregate_table', { + id: int('id').identity().notNull(), + name: varchar('name', { length: 30 }).notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), +}); + +export const mySchema = mssqlSchema('mySchema'); + +export const usersSchemaTable = mySchema.table('userstest', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + verified: bit('verified').notNull().default(false), + jsonb: nvarchar('jsonb', { mode: 'json', length: 100 }).$type(), + createdAt: datetime2('created_at', { precision: 2 }).notNull().defaultGetDate(), +}); + +export const users2SchemaTable = mySchema.table('users2', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + cityId: int('city_id').references(() => citiesTable.id), +}); + +export const citiesSchemaTable = mySchema.table('cities', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 100 }).notNull(), +}); + +export const tableWithEnums = mySchema.table('enums_test_case', { + id: int('id').primaryKey(), + enum1: varchar('enum1', { enum: ['a', 'b', 'c'] }).notNull(), + enum2: varchar('enum2', { enum: ['a', 'b', 'c'] }).default('a'), + enum3: varchar('enum3', { enum: ['a', 'b', 'c'] }).notNull().default('b'), +}); + +export const employees = mssqlTable('employees', { + employeeId: int().identity({ increment: 1, seed: 1 }).primaryKey(), + name: nvarchar({ length: 100 }), + departmentId: int(), +}); + +export const departments = mssqlTable('departments', { + departmentId: int().primaryKey().identity({ increment: 1, seed: 1 }), + departmentName: nvarchar({ length: 100 }), +}); + +export const allPossibleColumns = mssqlTable('all_possible_columns', { + bigintBigint: bigint({ mode: 'bigint' }), + bigintString: bigint({ mode: 'string' }), + bigintNumber: bigint({ mode: 'number' }), + bigintBigintDefault: bigint({ mode: 'bigint' }).default(BigInt(123)), + bigintStringDefault: bigint({ mode: 'string' }).default('123'), + bigintNumberDefault: bigint({ mode: 'number' }).default(123), + binary: binary(), + binaryLength: binary({ length: 1 }), + binaryDefault: binary().default(Buffer.from([0x01])), + + bit: bit(), + bitDefault: bit().default(false), + + char: char(), + charWithConfig: char({ enum: ['123', '342'], length: 3 }), + charDefault: char().default('4'), + + nchar: nchar(), + ncharWithEnum: nchar({ enum: ['hello, world'], length: 12 }), + ncharLength: nchar({ length: 231 }), + ncharDefault: nchar().default('h'), + + date: date(), + dateModeDate: date({ mode: 'date' }), + dateModeString: date({ mode: 'string' }), + dateDefault: date().default(new Date('2025-04-17')), + dateModeStringDefault: date({ mode: 'string' }).default('2025-04-17'), + + dateTime: datetime(), + dateTimeModeDate: datetime({ mode: 'date' }), + dateTimeModeString: datetime({ mode: 'string' }), + dateTimeDefault: datetime().default(new Date('2025-04-17 13:54:28.227')), + dateTimeModeStringDefault: datetime({ mode: 'string' }).default(new Date('2025-04-17 13:54:28.227').toISOString()), + + dateTime2: datetime2(), + dateTime2ModeDate: datetime2({ mode: 'date' }), + dateTime2ModeString: datetime2({ mode: 'string' }), + dateTime2WithPrecision: datetime2({ precision: 5 }), + dateTime2Default: datetime2().default(new Date('2025-04-17 13:55:07.530')), + dateTime2ModeStringDefault: datetime2({ mode: 'string' }).default( + '2025-04-17 13:55:07.5300000', + ), + dateTime2ModeStringWithPrecisionDefault: datetime2({ mode: 'string', precision: 1 }).default( + '2025-04-17 13:55:07.5300000', + ), + + datetimeOffset: datetimeoffset(), + datetimeOffsetModeDate: datetimeoffset({ mode: 'date' }), + datetimeOffsetModeString: datetimeoffset({ mode: 'string' }), + datetimeOffsetDefault: datetimeoffset().default(new Date('2025-04-18 11:47:41.000+3:00')), + datetimeOffsetModeStringDefault: datetimeoffset({ mode: 'string' }).default('2025-04-18 11:47:41.000+3:00'), + datetimeOffsetModeStringWithPrecisionDefault: datetimeoffset({ mode: 'string', precision: 1 }).default( + '2025-04-18 11:47:41.000+3:00', + ), + + decimal: decimal(), + decimalWithPrecision: decimal({ precision: 3 }), + decimalWithConfig: decimal({ precision: 10, scale: 8 }), + decimalDefaultString: decimal().default('1.312'), + decimalDefaultNumber: decimal({ mode: 'number' }).default(1.3), + + float: float(), + floatWithPrecision: float({ precision: 3 }), + floatDefault: float().default(32.412), + + int: int(), + intDefault: int().default(43), + + numeric: numeric(), + numericWithPrecision: numeric({ precision: 3 }), + numericWithConfig: numeric({ precision: 10, scale: 8 }), + numericDefault: numeric().default('1.312'), + numericDefaultNumber: numeric({ mode: 'number' }).default(1.312), + + real: real(), + realDefault: real().default(5231.4123), + + text: text(), + textEnum: text({ enum: ['only', 'this', 'values'] }), + textDefault: text().default('hello, world'), + + nText: ntext(), + nTextEnum: ntext({ enum: ['only', 'this', 'values'] }), + nTextDefault: ntext().default('hello, world'), + + time: time(), + timeModeDate: time({ mode: 'date' }), + timeModeString: time({ mode: 'string' }), + timeWithPrecision: time({ precision: 3 }), + timeDefault: time().default(new Date('2025-10-10 14:17:56.470')), + timeModeDateDefault: time({ mode: 'date' }).default(new Date('2025-10-10 14:17:56.470')), + timeModeStringDefault: time({ mode: 'string' }).default('14:17:56.470'), + + smallint: smallint(), + smallintDefault: smallint().default(331), + + tinyint: tinyint(), + tinyintDefault: tinyint().default(23), + + varbinary: varbinary(), + varbinaryWithLength: varbinary({ length: 100 }), + varbinaryDefault: varbinary().default(Buffer.from([0x01])), + + varchar: varchar(), + varcharWithEnum: varchar({ enum: ['123', '312'], length: 3 }), + varcharWithLength: varchar({ length: 3 }), + varcharDefault: varchar().default('hello, world'), + varcharWithEnumDefault: varchar({ enum: ['1', '2'] }).default('1'), + + nvarchar: nvarchar(), + nvarcharWithEnum: nvarchar({ enum: ['hello, world'], length: 12 }), + nvarcharLength: nvarchar({ length: 231 }), + nvarcharDefault: nvarchar().default('h'), + nvarcharJson: nvarchar({ mode: 'json', length: 'max' }), +}); diff --git a/integration-tests/tests/mssql/schemaPrefixed.ts b/integration-tests/tests/mssql/schemaPrefixed.ts new file mode 100644 index 0000000000..5b502e74e1 --- /dev/null +++ b/integration-tests/tests/mssql/schemaPrefixed.ts @@ -0,0 +1,24 @@ +import { sql } from 'drizzle-orm'; +import { bit, datetime, int, mssqlTableCreator, nvarchar, varchar } from 'drizzle-orm/mssql-core'; + +const tablePrefix = 'drizzle_tests_'; +const mssqlTable = mssqlTableCreator((name) => `${tablePrefix}${name}`); + +export const usersTable = mssqlTable('userstest', { + id: int('id').identity().primaryKey(), + name: varchar('name', { length: 30 }).notNull(), + verified: bit('verified').notNull().default(false), + jsonb: nvarchar('jsonb', { length: 300, mode: 'json' }).$type(), + createdAt: datetime('created_at').notNull().default(sql`CURRENT_TIMESTAMP`), +}); + +export const users2Table = mssqlTable('users2', { + id: int('id').primaryKey(), + name: varchar('name', { length: 30 }).notNull(), + cityId: int('city_id').default(sql`null`).references(() => citiesTable.id), +}); + +export const citiesTable = mssqlTable('cities', { + id: int('id').primaryKey(), + name: varchar('name', { length: 30 }).notNull(), +}); diff --git a/integration-tests/tests/mysql/mysql-custom.test.ts b/integration-tests/tests/mysql/default/mysql-custom.test.ts similarity index 80% rename from integration-tests/tests/mysql/mysql-custom.test.ts rename to integration-tests/tests/mysql/default/mysql-custom.test.ts index 81cab6ce3f..d6806ac60d 100644 --- a/integration-tests/tests/mysql/mysql-custom.test.ts +++ b/integration-tests/tests/mysql/default/mysql-custom.test.ts @@ -1,5 +1,3 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; import { asc, eq, Name, sql } from 'drizzle-orm'; import { alias, @@ -16,58 +14,11 @@ import { varchar, year, } from 'drizzle-orm/mysql-core'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; -import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { toLocalDate } from '~/utils'; -import { createDockerDB } from './mysql-common'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: MySql2Database; -let client: mysql.Connection; -let container: Docker.Container | undefined; - -beforeAll(async () => { - let connectionString; - if (process.env['MYSQL_CONNECTION_STRING']) { - connectionString = process.env['MYSQL_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - client = await retry(async () => { - client = await mysql.createConnection(connectionString); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - await client?.end(); - await container?.stop().catch(console.error); -}); - -beforeEach((ctx) => { - ctx.mysql = { - db, - }; -}); +import { mysqlTest as test } from '../instrumentation'; const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { @@ -160,7 +111,7 @@ const usersMigratorTable = mysqlTable('users12', { email: text('email').notNull(), }); -beforeEach(async () => { +test.beforeEach(async ({ db }) => { await db.execute(sql`drop table if exists \`userstest\``); await db.execute(sql`drop table if exists \`datestable\``); await db.execute(sql`drop table if exists \`test_table\``); @@ -201,9 +152,7 @@ beforeEach(async () => { ); }); -test('select all fields', async (ctx) => { - const { db } = ctx.mysql; - +test('select all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); @@ -213,9 +162,7 @@ test('select all fields', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('select sql', async (ctx) => { - const { db } = ctx.mysql; - +test('select sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -224,9 +171,7 @@ test('select sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('select typed sql', async (ctx) => { - const { db } = ctx.mysql; - +test('select typed sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -235,35 +180,27 @@ test('select typed sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('insert returning sql', async (ctx) => { - const { db } = ctx.mysql; - +test('insert returning sql', async ({ db }) => { const [result, _] = await db.insert(usersTable).values({ name: 'John' }); expect(result.insertId).toBe(1); }); -test('delete returning sql', async (ctx) => { - const { db } = ctx.mysql; - +test('delete returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); -test('update returning sql', async (ctx) => { - const { db } = ctx.mysql; - +test('update returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); -test('update with returning all fields', async (ctx) => { - const { db } = ctx.mysql; - +test('update with returning all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); @@ -277,9 +214,7 @@ test('update with returning all fields', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test('update with returning partial', async (ctx) => { - const { db } = ctx.mysql; - +test('update with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); @@ -292,27 +227,21 @@ test('update with returning partial', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test('delete with returning all fields', async (ctx) => { - const { db } = ctx.mysql; - +test('delete with returning all fields', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); -test('delete with returning partial', async (ctx) => { - const { db } = ctx.mysql; - +test('delete with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); -test('insert + select', async (ctx) => { - const { db } = ctx.mysql; - +test('insert + select', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); @@ -325,9 +254,7 @@ test('insert + select', async (ctx) => { ]); }); -test('json insert', async (ctx) => { - const { db } = ctx.mysql; - +test('json insert', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, @@ -338,18 +265,14 @@ test('json insert', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test('insert with overridden default values', async (ctx) => { - const { db } = ctx.mysql; - +test('insert with overridden default values', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('insert many', async (ctx) => { - const { db } = ctx.mysql; - +test('insert many', async ({ db }) => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -371,9 +294,7 @@ test('insert many', async (ctx) => { ]); }); -test('insert many with returning', async (ctx) => { - const { db } = ctx.mysql; - +test('insert many with returning', async ({ db }) => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -384,9 +305,7 @@ test('insert many with returning', async (ctx) => { expect(result[0].affectedRows).toBe(4); }); -test('select with group by as field', async (ctx) => { - const { db } = ctx.mysql; - +test('select with group by as field', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -395,9 +314,7 @@ test('select with group by as field', async (ctx) => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test('select with group by as sql', async (ctx) => { - const { db } = ctx.mysql; - +test('select with group by as sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -406,9 +323,7 @@ test('select with group by as sql', async (ctx) => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); -test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.mysql; - +test('select with group by as sql + column', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -417,9 +332,7 @@ test('select with group by as sql + column', async (ctx) => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.mysql; - +test('select with group by as column + sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -428,9 +341,7 @@ test('select with group by as column + sql', async (ctx) => { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); -test('select with group by complex query', async (ctx) => { - const { db } = ctx.mysql; - +test('select with group by complex query', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -441,9 +352,7 @@ test('select with group by complex query', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }]); }); -test('build query', async (ctx) => { - const { db } = ctx.mysql; - +test('build query', async ({ db }) => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); @@ -454,9 +363,7 @@ test('build query', async (ctx) => { }); }); -test('build query insert with onDuplicate', async (ctx) => { - const { db } = ctx.mysql; - +test('build query insert with onDuplicate', async ({ db }) => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) @@ -469,9 +376,7 @@ test('build query insert with onDuplicate', async (ctx) => { }); }); -test('insert with onDuplicate', async (ctx) => { - const { db } = ctx.mysql; - +test('insert with onDuplicate', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); @@ -486,9 +391,7 @@ test('insert with onDuplicate', async (ctx) => { expect(res).toEqual([{ id: 1, name: 'John1' }]); }); -test('insert conflict', async (ctx) => { - const { db } = ctx.mysql; - +test('insert conflict', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); @@ -497,9 +400,7 @@ test('insert conflict', async (ctx) => { })()).resolves.not.toThrowError(); }); -test('insert conflict with ignore', async (ctx) => { - const { db } = ctx.mysql; - +test('insert conflict with ignore', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); @@ -514,16 +415,13 @@ test('insert conflict with ignore', async (ctx) => { expect(res).toEqual([{ id: 1, name: 'John' }]); }); -test('insert sql', async (ctx) => { - const { db } = ctx.mysql; - +test('insert sql', async ({ db }) => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('partial join with alias', async (ctx) => { - const { db } = ctx.mysql; +test('partial join with alias', async ({ db }) => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -547,9 +445,7 @@ test('partial join with alias', async (ctx) => { }]); }); -test('full join with alias', async (ctx) => { - const { db } = ctx.mysql; - +test('full join with alias', async ({ db }) => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { @@ -582,9 +478,7 @@ test('full join with alias', async (ctx) => { await db.execute(sql`drop table ${users}`); }); -test('select from alias', async (ctx) => { - const { db } = ctx.mysql; - +test('select from alias', async ({ db }) => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { @@ -619,18 +513,14 @@ test('select from alias', async (ctx) => { await db.execute(sql`drop table ${users}`); }); -test('insert with spaces', async (ctx) => { - const { db } = ctx.mysql; - +test('insert with spaces', async ({ db }) => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test('prepared statement', async (ctx) => { - const { db } = ctx.mysql; - +test('prepared statement', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, @@ -642,9 +532,7 @@ test('prepared statement', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('prepared statement reuse', async (ctx) => { - const { db } = ctx.mysql; - +test('prepared statement reuse', async ({ db }) => { const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), @@ -674,9 +562,7 @@ test('prepared statement reuse', async (ctx) => { ]); }); -test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.mysql; - +test('prepared statement with placeholder in .where', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, @@ -689,9 +575,7 @@ test('prepared statement with placeholder in .where', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('migrator', async (ctx) => { - const { db } = ctx.mysql; - +test('migrator', async ({ db }) => { await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); await db.execute(sql`drop table if exists users12`); @@ -711,27 +595,21 @@ test('migrator', async (ctx) => { await db.execute(sql`drop table __drizzle_migrations`); }); -test('insert via db.execute + select via db.execute', async (ctx) => { - const { db } = ctx.mysql; - +test('insert via db.execute + select via db.execute', async ({ db }) => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); -test('insert via db.execute w/ query builder', async (ctx) => { - const { db } = ctx.mysql; - +test('insert via db.execute w/ query builder', async ({ db }) => { const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); -test('insert + select all possible dates', async (ctx) => { - const { db } = ctx.mysql; - +test('insert + select all possible dates', async ({ db }) => { const date = new Date('2022-11-11'); await db.insert(datesTable).values({ @@ -767,9 +645,7 @@ const tableWithEnums = mysqlTable('enums_test_case', { enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); -test('Mysql enum test case #1', async (ctx) => { - const { db } = ctx.mysql; - +test('Mysql enum test case #1', async ({ db }) => { await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` @@ -798,9 +674,7 @@ test('Mysql enum test case #1', async (ctx) => { ]); }); -test('custom binary', async (ctx) => { - const { db } = ctx.mysql; - +test('custom binary', async ({ db }) => { const id = uuid().replace(/-/g, ''); await db.insert(testTable).values({ id, diff --git a/integration-tests/tests/mysql/default/mysql-prefixed.test.ts b/integration-tests/tests/mysql/default/mysql-prefixed.test.ts new file mode 100644 index 0000000000..5fb1080d9c --- /dev/null +++ b/integration-tests/tests/mysql/default/mysql-prefixed.test.ts @@ -0,0 +1,1759 @@ +import type { Equal } from 'drizzle-orm'; +import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; +import { + alias, + bigint, + boolean, + date, + datetime, + getViewConfig, + int, + json, + mysqlEnum, + mysqlTable as mysqlTableRaw, + mysqlTableCreator, + mysqlView, + serial, + text, + time, + timestamp, + uniqueIndex, + varchar, + year, +} from 'drizzle-orm/mysql-core'; +import { migrate } from 'drizzle-orm/mysql2/migrator'; +import { expect } from 'vitest'; +import { Expect, toLocalDate } from '~/utils'; +import { mysqlTest as test } from '../instrumentation'; + +const tablePrefix = 'drizzle_tests_'; + +const mysqlTable = mysqlTableCreator((name) => `${tablePrefix}${name}`); + +test.concurrent('select all fields', async ({ db, push }) => { + const users = mysqlTable('users_1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test.concurrent('select sql', async ({ db, push }) => { + const users = mysqlTable('users_sql', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(result).toEqual([{ name: 'JOHN' }]); +}); + +test.concurrent('select typed sql', async ({ db, push }) => { + const users = mysqlTable('users_typed_sql', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(result).toEqual([{ name: 'JOHN' }]); +}); + +test.concurrent('select distinct', async ({ db, push }) => { + const usersDistinctTable = mysqlTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await push({ usersDistinctTable }); + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const result = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + expect(result).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); +}); + +test.concurrent('insert returning sql', async ({ db, push }) => { + const users = mysqlTable('users_insert_returning', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const [result, _] = await db.insert(users).values({ name: 'John' }); + + expect(result.insertId).toBe(1); +}); + +test.concurrent('delete returning sql', async ({ db, push }) => { + const users = mysqlTable('users_delete_returning', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.delete(users).where(eq(users.name, 'John')); + + expect(result[0].affectedRows).toBe(1); +}); + +test.concurrent('update returning sql', async ({ db, push }) => { + const users = mysqlTable('users_update_returning', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + + expect(result[0].changedRows).toBe(1); +}); + +test.concurrent('update with returning all fields', async ({ db, push }) => { + const users = mysqlTable('users_update_all_fields', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const updatedUsers = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + + const result = await db.select().from(users).where(eq(users.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test.concurrent('update with returning partial', async ({ db, push }) => { + const users = mysqlTable('users_update_partial', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const updatedUsers = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + + const result = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(result).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test.concurrent('delete with returning all fields', async ({ db, push }) => { + const users = mysqlTable('users_delete_all_fields', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const deletedUser = await db.delete(users).where(eq(users.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test.concurrent('delete with returning partial', async ({ db, push }) => { + const users = mysqlTable('users_delete_partial', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const deletedUser = await db.delete(users).where(eq(users.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); +}); + +test.concurrent('insert + select', async ({ db, push }) => { + const users = mysqlTable('users_insert_select_249', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(users).values({ name: 'Jane' }); + const result2 = await db.select().from(users); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test.concurrent('json insert', async ({ db, push }) => { + const users = mysqlTable('users_json_insert_262', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + }).from(users); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test.concurrent('insert with overridden default values', async ({ db, push }) => { + const users = mysqlTable('users_override_defaults_273', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John', verified: true }); + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test.concurrent('insert many', async ({ db, push }) => { + const users = mysqlTable('users_insert_many_307', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + verified: users.verified, + }).from(users); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test.concurrent('insert many with returning', async ({ db, push }) => { + const users = mysqlTable('users_insert_many_returning_329', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const result = await db.insert(users).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); +}); + +test.concurrent('select with group by as field', async ({ db, push }) => { + const users = mysqlTable('users_group_by_field_249', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.name); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test.concurrent('select with group by as sql', async ({ db, push }) => { + const users = mysqlTable('users_group_by_sql_250', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(sql`${users.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); +}); + +test.concurrent('select with group by as sql + column', async ({ db, push }) => { + const users = mysqlTable('users_group_by_sql_col_251', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(sql`${users.name}`, users.id); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test.concurrent('select with group by as column + sql', async ({ db, push }) => { + const users = mysqlTable('users_group_by_col_sql_252', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.id, sql`${users.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +}); + +test.concurrent('select with group by complex query', async ({ db, push }) => { + const users = mysqlTable('users_group_by_complex_253', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.id, sql`${users.name}`) + .orderBy(asc(users.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test.concurrent('build query', async ({ db, push }) => { + const users = mysqlTable('users_build_query_254', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const query = db.select({ id: users.id, name: users.name }).from(users) + .groupBy(users.id, users.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`${getTableName(users)}\` group by \`${getTableName(users)}\`.\`id\`, \`${ + getTableName(users) + }\`.\`name\``, + params: [], + }); +}); + +test.concurrent('build query insert with onDuplicate', async ({ db, push }) => { + const users = mysqlTable('users_on_duplicate_255', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const query = db.insert(users) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: `insert into \`${ + getTableName(users) + }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test.concurrent('insert with onDuplicate', async ({ db, push }) => { + const users = mysqlTable('users_on_duplicate_test_256', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users) + .values({ name: 'John' }); + + await db.insert(users) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test.concurrent('insert conflict', async ({ db, push }) => { + const users = mysqlTable('users_conflict_257', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users) + .values({ name: 'John' }); + + await expect((async () => { + db.insert(users).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); +}); + +test.concurrent('insert conflict with ignore', async ({ db, push }) => { + const users = mysqlTable('users_conflict_ignore_258', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users) + .values({ name: 'John' }); + + await db.insert(users) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('insert sql', async ({ db, push }) => { + const users = mysqlTable('users_insert_sql_561', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: sql`${'John'}` }); + const result = await db.select({ id: users.id, name: users.name }).from(users); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('partial join with alias', async ({ db, push }) => { + const users = mysqlTable('users_partial_join_567', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const customerAlias = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: users.id, + name: users.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test.concurrent('full join with alias', async ({ db, push }) => { + const mysqlTableLocal = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTableLocal('users_full_join_591', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users_full_join_591: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); +}); + +test.concurrent('select from alias', async ({ db, push }) => { + const mysqlTableLocal = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTableLocal('users_select_alias_638', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); +}); + +test.concurrent('insert with spaces', async ({ db, push }) => { + const users = mysqlTable('users_insert_spaces_669', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: users.id, name: users.name }).from(users); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test.concurrent('prepared statement', async ({ db, push }) => { + const users = mysqlTable('users_prepared_676', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const statement = db.select({ + id: users.id, + name: users.name, + }).from(users) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('prepared statement reuse', async ({ db, push }) => { + const users = mysqlTable('users_prepared_reuse_688', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const stmt = db.insert(users).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: users.id, + name: users.name, + verified: users.verified, + }).from(users); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test.concurrent('prepared statement with placeholder in .where', async ({ db, push }) => { + const users = mysqlTable('users_745', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values({ name: 'John' }); + const stmt = db.select({ + id: users.id, + name: users.name, + }).from(users) + .where(eq(users.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('migrator', async ({ db, push }) => { + const usersMigratorTable = mysqlTableRaw('users12_758', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + email: text('email').notNull(), + }, (table) => [uniqueIndex('name_unique_idx').on(table.name).using('btree')]); + + await push({ usersMigratorTable }); + await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); +}); + +test.concurrent('insert via db.execute + select via db.execute', async ({ db, push }) => { + const users = mysqlTable('users_788', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.execute(sql`insert into ${users} (${new Name(users.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${users}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('insert via db.execute w/ query builder', async ({ db, push }) => { + const users = mysqlTable('users_795', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const inserted = await db.execute( + db.insert(users).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); +}); + +test.concurrent('insert + select all possible dates', async ({ db, push }) => { + const datesTable = mysqlTable('datestable_802', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 2 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), + year: year('year'), + }); + + await push({ datesTable }); + + const d = new Date('2022-11-11'); + + await db.insert(datesTable).values({ + date: d, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: d, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12.0', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12.00', + }]); +}); + +test.concurrent('Mysql enum test case #1', async ({ db, push }) => { + const tableWithEnums = mysqlTable('enums_test_case_856', { + id: serial('id').primaryKey(), + enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + await push({ tableWithEnums }); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); +}); + +test.concurrent('left join (flat object fields)', async ({ db, push }) => { + const users2 = mysqlTable('users2_892', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = mysqlTable('cities_892', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users2, cities }); + await db.insert(cities) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select({ + userId: users2.id, + userName: users2.name, + cityId: cities.id, + cityName: cities.name, + }).from(users2) + .leftJoin(cities, eq(users2.cityId, cities.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); +}); + +test.concurrent('left join (grouped fields)', async ({ db, push }) => { + const users2 = mysqlTable('users2_912', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = mysqlTable('cities_912', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users2, cities }); + await db.insert(cities) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select({ + id: users2.id, + user: { + name: users2.name, + nameUpper: sql`upper(${users2.name})`, + }, + city: { + id: cities.id, + name: cities.name, + nameUpper: sql`upper(${cities.name})`, + }, + }).from(users2) + .leftJoin(cities, eq(users2.cityId, cities.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); +}); + +test.concurrent('left join (all fields)', async ({ db, push }) => { + const users2 = mysqlTable('users2_946', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = mysqlTable('cities_946', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users2, cities }); + await db.insert(cities) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const res = await db.select().from(users2) + .leftJoin(cities, eq(users2.cityId, cities.id)); + + expect(res).toEqual([ + { + users2_946: { + id: 1, + name: 'John', + cityId: 1, + }, + cities_946: { + id: 1, + name: 'Paris', + }, + }, + { + users2_946: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities_946: null, + }, + ]); +}); + +test.concurrent('join subquery', async ({ db, push }) => { + const coursesTable = mysqlTable('courses_978', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: bigint('category_id', { mode: 'number', unsigned: true }).references(() => courseCategoriesTable.id), + }); + + const courseCategoriesTable = mysqlTable('course_categories_978', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ coursesTable, courseCategoriesTable }); + + await db.insert(courseCategoriesTable).values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); +}); + +test.concurrent('with ... select', async ({ db, push }) => { + const orders = mysqlTable('orders_1056', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + }); + + await push({ orders }); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); +}); + +test.concurrent('select from subquery sql', async ({ db, push }) => { + const users2 = mysqlTable('users2_1160', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + await push({ users2 }); + await db.insert(users2).values([{ name: 'John' }, { name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2.name}, " modified")`.as('name') }) + .from(users2) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); +}); + +test.concurrent('select a field without joining its table', ({ db }) => { + const users = mysqlTable('users_1173', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + const users2 = mysqlTable('users2_1173', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + expect(() => db.select({ name: users2.name }).from(users).prepare()).toThrowError(); +}); + +test.concurrent('select all fields from subquery without alias', ({ db }) => { + const users2 = mysqlTable('users2_1177', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2.name})` }).from(users2)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); +}); + +test.concurrent('select count()', async ({ db, push }) => { + const users = mysqlTable('users_1183', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(users); + + expect(res).toEqual([{ count: 2 }]); +}); + +test.concurrent('select for ...', ({ db }) => { + const users2 = mysqlTable('users2_1191', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + { + const query = db.select().from(users2).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update nowait$/); + } +}); + +test.concurrent('having', async ({ db, push }) => { + const users2 = mysqlTable('users2_1206', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = mysqlTable('cities_1206', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users2, cities }); + await db.insert(cities).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: cities.id, + name: sql`upper(${cities.name})`.as('upper_name'), + usersCount: sql`count(${users2.id})`.as('users_count'), + }) + .from(cities) + .leftJoin(users2, eq(users2.cityId, cities.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(cities.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); +}); + +test.concurrent('view', async ({ db, push }) => { + const users2 = mysqlTable('users2_1241', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = mysqlTable('cities_1241', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const newYorkers1 = mysqlView('new_yorkers_1241') + .as((qb) => qb.select().from(users2).where(eq(users2.cityId, 1))); + + const newYorkers2 = mysqlView('new_yorkers_1241', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2} where ${eq(users2.cityId, 1)}`); + + const newYorkers3 = mysqlView('new_yorkers_1241', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await push({ users2, cities }); + await db.execute(sql`create view new_yorkers_1241 as ${getViewConfig(newYorkers1).query}`); + + await db.insert(cities).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +test.concurrent('select from raw sql', async ({ db }) => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); +}); + +test.concurrent('select from raw sql with joins', async ({ db }) => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); +}); + +test.concurrent('join on aliased sql from select', async ({ db }) => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test.concurrent('join on aliased sql from with clause', async ({ db }) => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect>; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test.concurrent('prefixed table', async ({ db, push }) => { + const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); + + const users = mysqlTable('test_prefixed_table_with_unique_name_1450', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test.concurrent('orderBy with aliased column', ({ db }) => { + const users2 = mysqlTable('users2_1473', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2)}\` order by \`test\``); +}); + +test.concurrent('timestamp timezone', async ({ db, push }) => { + const users = mysqlTable('users_1481', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(users).values({ name: 'With default times' }); + await db.insert(users).values({ + name: 'Without default times', + createdAt: date, + }); + const usersResult = await db.select().from(users); + + // check that the timestamps are set correctly for default times + expect(Math.abs(usersResult[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(usersResult[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); +}); + +test('transaction', async ({ db, push }) => { + const users = mysqlTable('users_transactions_1498', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = mysqlTable('products_transactions_1498', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await push({ users, products }); + + const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); +}); + +test.concurrent('transaction rollback', async ({ db, push }) => { + const users = mysqlTable('users_transactions_rollback_1535', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); +}); + +test('nested transaction', async ({ db, push }) => { + const users = mysqlTable('users_nested_transactions_1561', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); +}); + +test('nested transaction rollback', async ({ db, push }) => { + const users = mysqlTable('users_nested_transactions_rollback_1588', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); +}); + +test.concurrent('join subquery with join', async ({ db, push }) => { + const internalStaff = mysqlTable('internal_staff_1618', { + userId: int('user_id').notNull(), + }); + + const customUser = mysqlTable('custom_user_1618', { + id: int('id').notNull(), + }); + + const ticket = mysqlTable('ticket_1618', { + staffId: int('staff_id').notNull(), + }); + + await push({ internalStaff, customUser, ticket }); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff_1618.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket_1618: { staffId: 1 }, + internal_staff: { + internal_staff_1618: { userId: 1 }, + custom_user_1618: { id: 1 }, + }, + }]); +}); + +test.concurrent('subquery with view', async ({ db, push }) => { + const users = mysqlTable('users_subquery_view_1667', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('new_yorkers_1667').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await push({ users }); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + await db.execute(sql`drop view ${newYorkers}`); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); +}); + +test.concurrent('join view as subquery', async ({ db, push }) => { + const users = mysqlTable('users_join_view_1703', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('new_yorkers_1703').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await push({ users }); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view_1703: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view_1703: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view_1703: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view_1703: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); +}); + +test.concurrent('select iterator', async ({ db, push }) => { + const users = mysqlTable('users_iterator_1754', { + id: serial('id').primaryKey(), + }); + + await push({ users }); + await db.insert(users).values([{}, {}, {}]); + + const iter = db.select().from(users).iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test.concurrent('select iterator w/ prepared statement', async ({ db, push }) => { + const users = mysqlTable('users_iterator_1775', { + id: serial('id').primaryKey(), + }); + + await push({ users }); + await db.insert(users).values([{}, {}, {}]); + + const prepared = db.select().from(users).prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); +}); + +test.concurrent('insert undefined', async ({ db, push }) => { + const users = mysqlTable('users_1796', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await push({ users }); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); +}); + +test.concurrent('update undefined', async ({ db, push }) => { + const users = mysqlTable('users_1815', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await push({ users }); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); +}); diff --git a/integration-tests/tests/mysql/default/mysql-proxy.test.ts b/integration-tests/tests/mysql/default/mysql-proxy.test.ts new file mode 100644 index 0000000000..6cd7983d3d --- /dev/null +++ b/integration-tests/tests/mysql/default/mysql-proxy.test.ts @@ -0,0 +1,178 @@ +import { sql } from 'drizzle-orm'; +import { getTableConfig } from 'drizzle-orm/mysql-core'; +import { migrate } from 'drizzle-orm/mysql-proxy/migrator'; +import { describe, expect } from 'vitest'; +import { proxyTest } from '../instrumentation'; +import { proxyTest as test } from '../instrumentation'; +import { tests } from '../mysql-common'; +import { usersMigratorTable } from '../schema2'; + +const omit = new Set([ + 'select iterator w/ prepared statement', + 'select iterator', + 'nested transaction rollback', + 'nested transaction', + 'transaction rollback', + 'transaction', + 'transaction with options (set isolationLevel)', + 'RQB v2 transaction find first - no rows', + 'RQB v2 transaction find first - multiple rows', + 'RQB v2 transaction find first - with relation', + 'RQB v2 transaction find first - placeholders', + 'RQB v2 transaction find many - no rows', + 'RQB v2 transaction find many - multiple rows', + 'RQB v2 transaction find many - with relation', + 'RQB v2 transaction find many - placeholders', +]); + +tests(proxyTest, omit); + +describe('migrator', () => { + test('migrator', async ({ db, simulator }) => { + await db.execute(sql`drop table if exists ${sql.identifier('__drizzle_migrations')}`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + }); + + test('migrator : --init', async ({ db, simulator }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db, simulator }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - db migrations error', async ({ db, simulator }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + }); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(true); + }); +}); diff --git a/integration-tests/tests/mysql/default/mysql.test.ts b/integration-tests/tests/mysql/default/mysql.test.ts new file mode 100644 index 0000000000..de7df1dbb2 --- /dev/null +++ b/integration-tests/tests/mysql/default/mysql.test.ts @@ -0,0 +1,129 @@ +import { sql } from 'drizzle-orm'; +import { getTableConfig } from 'drizzle-orm/mysql-core'; +import { migrate } from 'drizzle-orm/mysql2/migrator'; +import { describe, expect } from 'vitest'; +import { mysqlTest as test } from '../instrumentation'; +import { tests } from '../mysql-common'; +import { runTests } from '../mysql-common-cache'; +import { usersMigratorTable } from '../schema2'; + +runTests('mysql', test); +tests(test); + +describe('migrator', () => { + test('migrator', async ({ db }) => { + await db.execute(sql`drop table if exists ${sql.identifier('__drizzle_migrations')}`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + }); + + test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(true); + }); +}); diff --git a/integration-tests/tests/mysql/default/schema.test.ts b/integration-tests/tests/mysql/default/schema.test.ts new file mode 100644 index 0000000000..92066d55f6 --- /dev/null +++ b/integration-tests/tests/mysql/default/schema.test.ts @@ -0,0 +1,263 @@ +import { sql } from 'drizzle-orm'; +import { jsonb } from 'drizzle-orm/cockroach-core'; +import { + bigint, + boolean, + foreignKey, + getTableConfig, + index, + int, + json, + mediumint, + MySqlDialect, + mysqlTable, + mysqlTableCreator, + primaryKey, + serial, + smallint, + text, + timestamp, + tinyint, + unique, +} from 'drizzle-orm/mysql-core'; +import { drizzle } from 'drizzle-orm/mysql2'; +import { expect, test } from 'vitest'; + +const db = drizzle.mock(); + +test('table config: unsigned ints', async () => { + const unsignedInts = mysqlTable('cities1', { + bigint: bigint({ mode: 'number', unsigned: true }), + int: int({ unsigned: true }), + smallint: smallint({ unsigned: true }), + mediumint: mediumint({ unsigned: true }), + tinyint: tinyint({ unsigned: true }), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); + expect(intColumn.getSQLType()).toBe('int unsigned'); + expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); + expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); + expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); +}); + +test('table config: signed ints', async () => { + const unsignedInts = mysqlTable('cities1', { + bigint: bigint('bigint', { mode: 'number' }), + int: int('int'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + tinyint: tinyint('tinyint'), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint'); + expect(intColumn.getSQLType()).toBe('int'); + expect(smallintColumn.getSQLType()).toBe('smallint'); + expect(mediumintColumn.getSQLType()).toBe('mediumint'); + expect(tinyintColumn.getSQLType()).toBe('tinyint'); +}); + +test('table config: foreign keys name', async () => { + const table = mysqlTable('cities', { + id: serial().primaryKey(), + name: text().notNull(), + state: text(), + }, (t) => [foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' })]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); +}); + +test('table config: primary keys name', async () => { + const table = mysqlTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [primaryKey({ columns: [t.id, t.name] })]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); +}); + +test('table configs: unique third param', async () => { + const cities1Table = mysqlTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [unique('custom_name').on(t.name, t.state), unique('custom_name1').on(t.name, t.state)]); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); +}); + +test('table configs: unique in column', async () => { + const cities1Table = mysqlTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe(undefined); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); +}); + +test('prefixed', () => { + const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const dialect = new MySqlDialect(); + // await db.execute(`); + expect(dialect.sqlToQuery(sql`drop table if exists ${users}`)).toStrictEqual({ + sql: 'drop table if exists `prefixed_users`', + params: [], + }); + + expect(dialect.sqlToQuery(sql`create table ${users} (id serial primary key, name text not null)`)).toStrictEqual({ + sql: 'create table `prefixed_users` (id serial primary key, name text not null)', + params: [], + }); +}); + +test.concurrent('define constraints as array', async () => { + const table = mysqlTable('name', { + id: int(), + }, (t) => [ + index('name').on(t.id), + primaryKey({ columns: [t.id] }), + ]); + + const { indexes, primaryKeys } = getTableConfig(table); + + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); +}); + +test('define constraints as array inside third param', async () => { + const table = mysqlTable('name', { + id: int(), + }, (t) => [ + [index('name').on(t.id), primaryKey({ columns: [t.id] })], + ]); + + const { indexes, primaryKeys } = getTableConfig(table); + + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); +}); + +test.concurrent('build query', async () => { + const table = mysqlTable('table', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); + + const query = db.select({ id: table.id, name: table.name }).from(table) + .groupBy(table.id, table.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`table\` group by \`table\`.\`id\`, \`table\`.\`name\``, + params: [], + }); +}); + +test.concurrent('Query check: Insert all defaults in 1 row', async () => { + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', + params: [], + }); +}); + +test.concurrent('Query check: Insert all defaults in multiple rows', async () => { + const users = mysqlTable('table', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `table` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', + params: [], + }); +}); + +test.concurrent('build query insert with onDuplicate', async () => { + const users = mysqlTable('users', { + id: serial().primaryKey(), + name: text().default('Dan'), + verified: boolean().default(false), + jsonb: jsonb(), + }); + + const query = db.insert(users) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `users` (`id`, `name`, `verified`, `jsonb`) values (default, ?, default, ?) on duplicate key update `name` = ?', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); diff --git a/integration-tests/tests/mysql/instrumentation.ts b/integration-tests/tests/mysql/instrumentation.ts new file mode 100644 index 0000000000..b4fbe66495 --- /dev/null +++ b/integration-tests/tests/mysql/instrumentation.ts @@ -0,0 +1,435 @@ +import { Client } from '@planetscale/database'; +import { connect, type Connection } from '@tidbcloud/serverless'; +import { getTableName, is, Table } from 'drizzle-orm'; +import type { MutationOption } from 'drizzle-orm/cache/core'; +import { Cache } from 'drizzle-orm/cache/core'; +import type { CacheConfig } from 'drizzle-orm/cache/core/types'; +import type { MySqlDatabase, MySqlSchema, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; +import { drizzle as proxyDrizzle } from 'drizzle-orm/mysql-proxy'; +import type { AnyMySql2Connection } from 'drizzle-orm/mysql2'; +import { drizzle as mysql2Drizzle } from 'drizzle-orm/mysql2'; +import { drizzle as psDrizzle } from 'drizzle-orm/planetscale-serverless'; +import { drizzle as drizzleTidb } from 'drizzle-orm/tidb-serverless'; +import { type FunctionsVersioning, type InferCallbackType, seed } from 'drizzle-seed'; +import Keyv from 'keyv'; +import { createConnection } from 'mysql2/promise'; +import type * as mysql from 'mysql2/promise'; +import type { Mock } from 'vitest'; +import { test as base, vi } from 'vitest'; +import { relations } from './schema'; + +// eslint-disable-next-line drizzle-internal/require-entity-kind +export class TestCache extends Cache { + private globalTtl: number = 1000; + private usedTablesPerKey: Record = {}; + + constructor(private readonly strat: 'explicit' | 'all', private kv: Keyv = new Keyv()) { + super(); + } + + override strategy(): 'explicit' | 'all' { + return this.strat; + } + + override async get(key: string, _tables: string[], _isTag: boolean): Promise { + const res = await this.kv.get(key) ?? undefined; + return res; + } + override async put( + key: string, + response: any, + tables: string[], + isTag: boolean, + config?: CacheConfig, + ): Promise { + await this.kv.set(key, response, config ? config.ex : this.globalTtl); + for (const table of tables) { + const keys = this.usedTablesPerKey[table]; + if (keys === undefined) { + this.usedTablesPerKey[table] = [key]; + } else { + keys.push(key); + } + } + } + override async onMutate(params: MutationOption): Promise { + const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; + const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; + + const keysToDelete = new Set(); + + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + const keys = this.usedTablesPerKey[tableName] ?? []; + for (const key of keys) keysToDelete.add(key); + } + + if (keysToDelete.size > 0 || tagsArray.length > 0) { + for (const tag of tagsArray) { + await this.kv.delete(tag); + } + + for (const key of keysToDelete) { + await this.kv.delete(key); + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + this.usedTablesPerKey[tableName] = []; + } + } + } + } +} + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: mysql.Connection) {} + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + sql, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result[0] as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + sql, + values: params, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('START TRANSACTION'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +export type MysqlSchema = Record< + string, + MySqlTable | MySqlSchema | MySqlView +>; + +export type RefineCallbackT = ( + funcs: FunctionsVersioning, +) => InferCallbackType, Schema>; + +const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, + vendor: string, +) => { + const { diff } = await import('../../../drizzle-kit/tests/mysql/mocks' as string); + + const res = await diff({}, schema, []); + for (const s of res.sqlStatements) { + const patched = vendor === 'tidb' ? s.replace('(now())', '(now(2))') : s; + await query(patched, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +const _seed = async ( + db: MySqlDatabase, + schema: Schema, + refineCallback?: RefineCallbackT, +) => { + return refineCallback === undefined ? seed(db, schema) : seed(db, schema).refine(refineCallback); +}; + +const createProxyHandler = (client: mysql.Connection) => { + const serverSimulator = new ServerSimulator(client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + if (response.error !== undefined) { + throw response.error; + } + return { rows: response.data }; + } catch (e: any) { + console.error('Error from mysql proxy server:', e.message); + throw e; + } + }; + return proxyHandler; +}; +const prepareTest = (vendor: 'mysql' | 'planetscale' | 'tidb' | 'mysql-proxy') => { + return base.extend< + { + client: { + client: AnyMySql2Connection | Client | Connection; + query: (sql: string, params: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + // proxyHandler: (sql: string, params: any[], method: any) => Promise<{ + // rows: any; + // }>; + db: MySqlDatabase; + push: (schema: any) => Promise; + seed: ( + schema: Schema, + refineCallback?: (funcs: FunctionsVersioning) => InferCallbackType, Schema>, + ) => Promise; + drizzle: { + withCacheAll: { + db: MySqlDatabase; + put: Mock; + get: Mock; + onMutate: Mock; + invalidate: Mock; + }; + withCacheExplicit: { + db: MySqlDatabase; + put: Mock; + get: Mock; + onMutate: Mock; + invalidate: Mock; + }; + }; + } + >({ + client: [ + // oxlint-disable-next-line + async ({}, use) => { + if (vendor === 'mysql' || vendor === 'mysql-proxy') { + const envurl = process.env['MYSQL_CONNECTION_STRING']; + if (!envurl) throw new Error('No mysql url provided'); + const client = await createConnection({ + uri: envurl, + supportBigNumbers: true, + multipleStatements: true, + }); + await client.connect(); + await client.query('drop database drizzle; create database drizzle; use drizzle;'); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res[0] as any[]; + }; + const batch = async (statements: string[]) => { + return client.query(statements.map((x) => x.endsWith(';') ? x : `${x};`).join('\n')).then(() => '' as any); + }; + + await use({ client, query, batch }); + await client.end(); + client.destroy(); + return; + } + + if (vendor === 'planetscale') { + const envurl = process.env['PLANETSCALE_CONNECTION_STRING']; + if (!envurl) throw new Error('No mysql url provided'); + const client = new Client({ url: envurl }); + + const query = async (sql: string, params: any[] = []) => { + return client.execute(sql, params).then((x) => x.rows); + }; + + const batch = async (statements: string[]) => { + const queries = statements.map((x) => { + return client.execute(x); + }); + return Promise.all(queries).then(() => '' as any); + }; + + const tables = + (await query('SELECT table_name FROM information_schema.tables WHERE table_schema = DATABASE();')).map( + (x) => x['TABLE_NAME'], + ); + const views = + (await query('SELECT table_name FROM information_schema.views WHERE table_schema = DATABASE();')).map((x) => + x['TABLE_NAME'] + ); + + const dropViews = views.length === 0 + ? 'select 1;' + : `DROP VIEW IF EXISTS ${views.map((x) => `\`${x}\``).join(',')};`; + const dropTables = tables.length === 0 + ? 'select 1;' + : `DROP TABLE IF EXISTS ${tables.map((x) => `\`${x}\``).join(',')};`; + await query(dropViews); + await query('SET FOREIGN_KEY_CHECKS = 0;'); + await query(dropTables); + await query('SET FOREIGN_KEY_CHECKS = 1;'); + + await use({ client, query, batch }); + return; + } + + if (vendor === 'tidb') { + const connectionString = process.env['TIDB_CONNECTION_STRING']; + if (!connectionString) { + throw new Error('TIDB_CONNECTION_STRING is not set'); + } + + const tmpClient = connect({ url: connectionString }); + await tmpClient.execute('drop database if exists ci;'); + await tmpClient.execute('create database ci;'); + await tmpClient.execute('use ci;'); + + const client = connect({ url: connectionString, database: 'ci' }); + + const query = async (sql: string, params: any[] = []) => { + return client.execute(sql, params) as Promise; + }; + + const batch = async (statements: string[]) => { + const queries = statements.map((x) => { + return client.execute(x); + }); + return Promise.all(queries).then(() => '' as any); + }; + await use({ client, query, batch }); + return; + } + + throw new Error('error'); + }, + { scope: 'worker' }, + ], + db: [ + async ({ client }, use) => { + const db = vendor === 'mysql' + ? mysql2Drizzle({ client: client.client as AnyMySql2Connection, relations }) + : vendor === 'tidb' + ? drizzleTidb({ client: client.client as Connection, relations }) + : vendor === 'planetscale' + ? psDrizzle({ client: client.client as Client, relations }) + : proxyDrizzle(createProxyHandler(client.client as mysql.Connection), { + relations, + }); + + await use(db as any); + }, + { scope: 'worker' }, + ], + push: [ + async ({ client }, use) => { + const { query } = client; + const push = ( + schema: any, + ) => _push(query, schema, vendor); + + await use(push); + }, + { scope: 'worker' }, + ], + seed: [ + async ({ db }, use) => { + const seed = ( + schema: any, + refineCallback?: (funcs: FunctionsVersioning) => InferCallbackType, any>, + ) => _seed(db, schema, refineCallback); + + await use(seed); + }, + { scope: 'worker' }, + ], + drizzle: [ + async ({ client }, use) => { + const explicitCache = new TestCache('explicit'); + const allCache = new TestCache('all'); + const proxyHandler = createProxyHandler(client.client as mysql.Connection); + + const withCacheExplicit = vendor === 'mysql' + ? mysql2Drizzle({ client: client.client as any, cache: explicitCache }) + : vendor === 'tidb' + ? drizzleTidb({ client: client.client as Connection, relations, cache: explicitCache }) + : vendor === 'planetscale' + ? psDrizzle({ client: client.client as any, cache: explicitCache }) + : proxyDrizzle(proxyHandler, { cache: explicitCache }); + const withCacheAll = vendor === 'mysql' + ? mysql2Drizzle({ client: client.client as any, cache: allCache }) + : vendor === 'tidb' + ? drizzleTidb({ client: client.client as Connection, relations, cache: allCache }) + : vendor === 'planetscale' + ? psDrizzle({ client: client.client as any, cache: allCache }) + : proxyDrizzle(proxyHandler, { cache: allCache }); + + const drz = { + withCacheAll: { + db: withCacheAll, + put: vi.spyOn(allCache, 'put'), + get: vi.spyOn(allCache, 'get'), + onMutate: vi.spyOn(allCache, 'onMutate'), + invalidate: vi.spyOn(withCacheAll.$cache, 'invalidate'), + }, + withCacheExplicit: { + db: withCacheExplicit, + put: vi.spyOn(explicitCache, 'put'), + get: vi.spyOn(explicitCache, 'get'), + onMutate: vi.spyOn(explicitCache, 'onMutate'), + invalidate: vi.spyOn(withCacheExplicit.$cache, 'invalidate'), + }, + }; + + await use(drz as any); + + await withCacheAll.$cache.invalidate({}); + await withCacheExplicit.$cache.invalidate({}); + drz.withCacheAll.get.mockClear(); + drz.withCacheAll.put.mockClear(); + drz.withCacheAll.onMutate.mockClear(); + drz.withCacheAll.invalidate.mockClear(); + drz.withCacheExplicit.get.mockClear(); + drz.withCacheExplicit.put.mockClear(); + drz.withCacheExplicit.onMutate.mockClear(); + drz.withCacheExplicit.invalidate.mockClear(); + }, + { scope: 'test' }, + ], + }); +}; + +export const mysqlTest = prepareTest('mysql'); +export const planetscaleTest = prepareTest('planetscale'); +export const tidbTest = prepareTest('tidb'); +export const proxyTest = prepareTest('mysql-proxy').extend<{ simulator: ServerSimulator }>({ + simulator: [ + async ({ client: { client } }, use) => { + const simulator = new ServerSimulator(client as mysql.Connection); + await use(simulator); + }, + { scope: 'test' }, + ], +}); +export type Test = ReturnType; diff --git a/integration-tests/tests/mysql/mysql-common-1.ts b/integration-tests/tests/mysql/mysql-common-1.ts new file mode 100644 index 0000000000..85188aa436 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-1.ts @@ -0,0 +1,459 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { and, asc, eq, exists, inArray, notInArray, sql } from 'drizzle-orm'; +import { + alias, + boolean, + date, + datetime, + int, + json, + mysqlEnum, + mysqlTable, + serial, + text, + time, + timestamp, + year, +} from 'drizzle-orm/mysql-core'; +import { expect } from 'vitest'; +import { toLocalDate } from '~/utils'; +import type { Test } from './instrumentation'; +import { createUserTable } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test.concurrent('select all fields', async ({ db, push, seed }) => { + const users = createUserTable('users_1'); + + await push({ users }); + await db.insert(users).values({ id: 1, name: 'Agripina', createdAt: new Date() }); + + const result = await db.select().from(users); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toStrictEqual([{ + id: 1, + name: 'Agripina', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); + }); + + test.concurrent('select sql', async ({ db, push, seed }) => { + const users = mysqlTable('users_2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { mode: 'date', fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 1 } })); + + const result = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(result).toStrictEqual([{ name: 'AGRIPINA' }]); + }); + + test.concurrent('select typed sql', async ({ db, push, seed }) => { + const users = mysqlTable('users_3', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { mode: 'date', fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 1 } })); + + const result = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(result).toEqual([{ name: 'AGRIPINA' }]); + }); + + test.concurrent('select with empty array in inArray', async ({ db, push, seed }) => { + const users = mysqlTable('users_4', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { mode: 'date', fsp: 2 }).notNull().defaultNow(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const result = await db + .select({ + name: sql`upper(${users.name})`, + }) + .from(users) + .where(inArray(users.id, [])); + + expect(result).toEqual([]); + }); + + test.concurrent('select with empty array in notInArray', async ({ db, push, seed }) => { + const users = createUserTable('users_5'); + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const result = await db + .select({ + name: sql`upper(${users.name})`, + }) + .from(users) + .where(notInArray(users.id, [])); + + expect(result).toEqual([{ name: 'AGRIPINA' }, { name: 'CANDY' }, { name: 'ILSE' }]); + }); + + test.concurrent('select distinct', async ({ db, push, seed }) => { + const users = mysqlTable('users_6', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + await push({ users }); + await seed( + { users }, + (funcs: any) => ({ + users: { count: 3, columns: { id: funcs.valuesFromArray({ values: [1, 1, 2], isUnique: true }) } }, + }), + ); + + const result = await db.selectDistinct().from(users).orderBy( + users.id, + users.name, + ); + expect(result).toEqual([{ id: 1, name: 'Candy' }, { id: 1, name: 'Ilse' }, { id: 2, name: 'Agripina' }]); + }); + + test.concurrent('select with group by as field', async ({ db, push, seed }) => { + const users = createUserTable('users_7'); + await push({ users }); + await seed({ users }, (funcs: any) => ({ + users: { + count: 3, + columns: { name: funcs.valuesFromArray({ values: ['John', 'John', 'Jane'], isUnique: true }) }, + }, + })); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.name).orderBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with exists', async ({ db, push, seed }) => { + const users = createUserTable('users_8'); + const user = alias(users, 'user'); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const result = await db.select({ name: users.name }).from(users).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(users.name, 'Candy'), eq(user.id, users.id))), + ), + ); + + expect(result).toEqual([{ name: 'Candy' }]); + }); + + test.concurrent('select with group by as sql', async ({ db, push, seed }) => { + const users = createUserTable('users_9'); + await push({ users }); + await seed({ users }, (funcs: any) => ({ + users: { + columns: { name: funcs.valuesFromArray({ values: ['John', 'John', 'Jane'] }) }, + }, + })); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(sql`${users.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with group by as sql + column', async ({ db, push, seed }) => { + const users = createUserTable('users_10'); + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(sql`${users.name}`, users.id); + + expect(result).toEqual([{ name: 'Agripina' }, { name: 'Candy' }, { name: 'Ilse' }]); + }); + + test.concurrent('select with group by as column + sql', async ({ db, push, seed }) => { + const users = createUserTable('users_11'); + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.id, sql`${users.name}`); + + expect(result).toEqual([{ name: 'Agripina' }, { name: 'Candy' }, { name: 'Ilse' }]); + }); + + test.concurrent('select with group by complex query', async ({ db, push, seed }) => { + const users = createUserTable('users_12'); + await push({ users }); + await seed({ users }, (funcs: any) => ({ + users: { + count: 3, + columns: { name: funcs.valuesFromArray({ values: ['John', 'Jane', 'Jane'], isUnique: true }) }, + }, + })); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.id, sql`${users.name}`) + .orderBy(asc(users.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test.concurrent('partial join with alias', async ({ db, push, seed }) => { + const users = createUserTable('users_13'); + await push({ users }); + await seed({ users }, () => ({ users: { count: 2 } })); + + const customerAlias = alias(users, 'customer'); + const result = await db + .select({ + user: { + id: users.id, + name: users.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 2)) + .where(eq(users.id, 1)); + + expect(result).toEqual([{ + user: { id: 1, name: 'Agripina' }, + customer: { id: 2, name: 'Candy' }, + }]); + }); + + test.concurrent('prepared statement', async ({ db, push, seed }) => { + const users = createUserTable('users_14'); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 1 } })); + + const statement = db.select({ + id: users.id, + name: users.name, + }).from(users) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'Agripina' }]); + }); + + test.concurrent('prepared statement with placeholder in .where', async ({ db, push, seed }) => { + const users = createUserTable('users_15'); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 1 } })); + + const stmt = db.select({ + id: users.id, + name: users.name, + }).from(users) + .where(eq(users.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'Agripina' }]); + }); + + test.concurrent('prepared statement with placeholder in .limit', async ({ db, push, seed }) => { + const users = createUserTable('users_16'); + + await push({ users }); + await seed({ users }, (funcs: any) => ({ users: { count: 1 } })); + + const stmt = db + .select({ + id: users.id, + name: users.name, + }) + .from(users) + .where(eq(users.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare(); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'Agripina' }]); + expect(result).toHaveLength(1); + }); + + test.concurrent('prepared statement with placeholder in .offset', async ({ db, push, seed }) => { + const users = createUserTable('users_17'); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 3 } })); + + const stmt = db + .select({ + id: users.id, + name: users.name, + }) + .from(users) + .limit(sql.placeholder('limit')) + .offset(sql.placeholder('offset')) + .prepare(); + + const result = await stmt.execute({ limit: 1, offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'Candy' }]); + }); + + test.concurrent('prepared statement built using $dynamic', async ({ db, push, seed }) => { + const users = createUserTable('users_18'); + + await push({ users }); + await seed({ users }, (funcs: any) => ({ users: { count: 3 } })); + + function withLimitOffset(qb: any) { + return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); + } + + const stmt = db + .select({ + id: users.id, + name: users.name, + }) + .from(users) + .$dynamic(); + withLimitOffset(stmt).prepare('stmt_limit'); + + const result = await stmt.execute({ limit: 1, offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'Candy' }]); + }); + + test.concurrent('insert + select all possible dates', async ({ db, push }) => { + const datesTable = mysqlTable('datestable_1', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 2 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), + timestamp: timestamp('timestamp', { fsp: 3 }), + timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), + year: year('year'), + }); + + await push({ datesTable }); + + const testDate = new Date('2022-11-11'); + const testDateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); + + await db.insert(datesTable).values({ + date: testDate, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: testDate, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: testDateWithMilliseconds, + timestampAsString: '2022-11-11 12:12:12.123', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12.0', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12.00', + timestamp: new Date('2022-11-11 12:12:12.123'), + timestampAsString: '2022-11-11 12:12:12.123', + }]); + }); + + test.concurrent('Mysql enum as ts enum', async ({ db, push }) => { + enum Test { + a = 'a', + b = 'b', + c = 'c', + } + + const tableWithTsEnums = mysqlTable('enums_test_case_1', { + id: serial('id').primaryKey(), + enum1: mysqlEnum('enum1', Test).notNull(), + enum2: mysqlEnum('enum2', Test).default(Test.a), + enum3: mysqlEnum('enum3', Test).notNull().default(Test.b), + }); + + await push({ tableWithTsEnums }); + + await db.insert(tableWithTsEnums).values([ + { id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c }, + { id: 2, enum1: Test.a, enum3: Test.c }, + { id: 3, enum1: Test.a }, + ]); + + const res = await db.select().from(tableWithTsEnums); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + + test.concurrent('Mysql enum test case #1', async ({ db, push }) => { + const tableWithEnums = mysqlTable('enums_test_case_2', { + id: serial('id').primaryKey(), + enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + await push({ tableWithEnums }); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-2.ts b/integration-tests/tests/mysql/mysql-common-2.ts new file mode 100644 index 0000000000..ca25f66a5a --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-2.ts @@ -0,0 +1,689 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { eq, gt, inArray, lt, sql } from 'drizzle-orm'; +import { + boolean, + decimal, + getViewConfig, + int, + mysqlTable, + mysqlTableCreator, + mysqlView, + serial, + text, +} from 'drizzle-orm/mysql-core'; +import { expect } from 'vitest'; +import { Expect } from '~/utils'; +import type { Equal } from '~/utils'; +import type { Test } from './instrumentation'; +import { createOrdersTable } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test.concurrent('left join (flat object fields)', async ({ db, push, seed }) => { + const users = mysqlTable('users_19', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + const cities = mysqlTable('cities_19', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + await seed( + { users, cities }, + (funcs) => ({ + users: { count: 2, columns: { cityId: funcs.valuesFromArray({ values: [1, null as any] }) } }, + cities: { count: 1 }, + }), + ); + + const res = await db.select({ + userId: users.id, + userName: users.name, + cityId: cities.id, + cityName: cities.name, + }).from(users) + .leftJoin(cities, eq(users.cityId, cities.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'Agripina', cityId: 1, cityName: 'Lakeitha' }, + { userId: 2, userName: 'Candy', cityId: null, cityName: null }, + ]); + }); + + test.concurrent('left join (grouped fields)', async ({ db, push, seed }) => { + const users = mysqlTable('users_24', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + const cities = mysqlTable('cities_4', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + await seed( + { users, cities }, + (funcs) => ({ + users: { count: 2, columns: { cityId: funcs.valuesFromArray({ values: [1, null as any] }) } }, + cities: { count: 1 }, + }), + ); + + const res = await db.select({ + id: users.id, + user: { + name: users.name, + nameUpper: sql`upper(${users.name})`, + }, + city: { + id: cities.id, + name: cities.name, + nameUpper: sql`upper(${cities.name})`, + }, + }).from(users) + .leftJoin(cities, eq(users.cityId, cities.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'Agripina', nameUpper: 'AGRIPINA' }, + city: { id: 1, name: 'Lakeitha', nameUpper: 'LAKEITHA' }, + }, + { + id: 2, + user: { name: 'Candy', nameUpper: 'CANDY' }, + city: null, + }, + ]); + }); + + test.concurrent('left join (all fields)', async ({ db, push, seed }) => { + const users = mysqlTable('users_25', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + const cities = mysqlTable('cities_25', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + await seed( + { users, cities }, + (funcs) => ({ + users: { count: 2, columns: { cityId: funcs.valuesFromArray({ values: [1, null as any] }) } }, + cities: { count: 1 }, + }), + ); + + const res = await db.select().from(users) + .leftJoin(cities, eq(users.cityId, cities.id)); + + expect(res).toEqual([ + { + users_25: { + id: 1, + name: 'Agripina', + cityId: 1, + }, + cities_25: { + id: 1, + name: 'Lakeitha', + }, + }, + { + users_25: { + id: 2, + name: 'Candy', + cityId: null, + }, + cities_25: null, + }, + ]); + }); + + test.concurrent('join subquery', async ({ db, push }) => { + const courseCategories = mysqlTable('course_categories_1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const courses = mysqlTable('courses_1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), + }); + + await push({ courseCategories, courses }); + + await db.insert(courseCategories).values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db.insert(courses).values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategories.id, + category: courseCategories.name, + total: sql`count(${courseCategories.id})`, + }) + .from(courseCategories) + .groupBy(courseCategories.id, courseCategories.name) + .as('sq2'); + + const res = await db + .select({ + courseName: courses.name, + categoryId: sq2.categoryId, + }) + .from(courses) + .leftJoin(sq2, eq(courses.categoryId, sq2.categoryId)) + .orderBy(courses.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + }); + + test.concurrent('with ... select', async ({ db, push }) => { + const orders = createOrdersTable('orders_1'); + + await push({ orders }); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test.concurrent('with ... update', async ({ db, push }) => { + const products = mysqlTable('products', { + id: serial('id').primaryKey(), + price: decimal('price', { + precision: 15, + scale: 2, + }).notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await push({ products }); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)); + + const result = await db + .select({ + id: products.id, + }) + .from(products) + .where(eq(products.cheap, true)); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test.concurrent('with ... delete', async ({ db, push }) => { + const orders = createOrdersTable('orders_2'); + + await push({ orders }); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); + + const result = await db + .select({ + id: orders.id, + }) + .from(orders); + + expect(result).toEqual([ + { id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test.concurrent('select from subquery sql', async ({ db, push, seed }) => { + const users = mysqlTable('users_30', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 2 } })); + + const sq = db + .select({ name: sql`concat(${users.name}, " modified")`.as('name') }) + .from(users) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'Agripina modified' }, { name: 'Candy modified' }]); + }); + + test.concurrent('select a field without joining its table', ({ db, push }) => { + const users1 = mysqlTable('users_31', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2 = mysqlTable('users_32', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + push({ users1, users2 }); + + expect(() => db.select({ name: users2.name }).from(users1).prepare()).toThrowError(); + }); + + test.concurrent('select all fields from subquery without alias', async ({ db, push, seed }) => { + const users = mysqlTable('users_33', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 2 } })); + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users.name})` }).from(users)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test.concurrent('select count()', async ({ db, push, seed }) => { + const users = mysqlTable('users_34', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await seed({ users }, () => ({ users: { count: 2 } })); + + const res = await db.select({ count: sql`count(*)` }).from(users); + + expect(res).toEqual([{ count: 2 }]); + }); + + test.concurrent('select for ...', ({ db, push }) => { + const users = mysqlTable('users_35', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + push({ users }); + + { + const query = db.select().from(users).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update nowait$/); + } + }); + + test.concurrent('having', async ({ db, push, seed }) => { + const cities = mysqlTable('cities_37', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users = mysqlTable('users_37', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + await push({ cities, users }); + await seed({ cities, users }, (funcs: any) => ({ + cities: { count: 3 }, + users: { count: 3, columns: { cityId: funcs.valuesFromArray({ values: [1, 1, 2] }) } }, + })); + + const result = await db + .select({ + id: cities.id, + name: sql`upper(${cities.name})`.as('upper_name'), + usersCount: sql`count(${users.id})`.as('users_count'), + }) + .from(cities) + .leftJoin(users, eq(users.cityId, cities.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(cities.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 2, + name: 'HOVANES', + usersCount: 1, + }, + { + id: 1, + name: 'LAKEITHA', + usersCount: 2, + }, + ]); + }); + + test.concurrent('view', async ({ db, push, seed }) => { + const users = mysqlTable('users_39', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers1 = mysqlView('new_yorkers_1') + .as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + const newYorkers2 = mysqlView('new_yorkers_2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users} where ${eq(users.cityId, 1)}`); + + const newYorkers3 = mysqlView('new_yorkers_3', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await push({ users, newYorkers1, newYorkers2, newYorkers3 }); + await db.execute(sql`create view new_yorkers_3 as ${getViewConfig(newYorkers1).query}`); + await seed({ users }, (funcs: any) => ({ + users: { count: 3, columns: { cityId: funcs.valuesFromArray({ values: [1, 1, 2] }) } }, + })); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 2, name: 'Candy', cityId: 1 }, + { id: 3, name: 'Ilse', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 2, name: 'Candy', cityId: 1 }, + { id: 3, name: 'Ilse', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 2, name: 'Candy', cityId: 1 }, + { id: 3, name: 'Ilse', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'Candy' }, + { name: 'Ilse' }, + ]); + } + }); + + test.concurrent('select from raw sql', async ({ db }) => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test.concurrent('select from raw sql with joins', async ({ db }) => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test.concurrent('join on aliased sql from select', async ({ db }) => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test.concurrent('join on aliased sql from with clause', async ({ db }) => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test.concurrent('prefixed table', async ({ db, push }) => { + const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); + + const users = mysqlTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-3.ts b/integration-tests/tests/mysql/mysql-common-3.ts new file mode 100644 index 0000000000..9bfacefe30 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-3.ts @@ -0,0 +1,606 @@ +import 'dotenv/config'; +import { asc, eq, gt, sql, TransactionRollbackError } from 'drizzle-orm'; +import { datetime, int, mysqlTable, mysqlView, serial, text, union, unionAll } from 'drizzle-orm/mysql-core'; +import { expect } from 'vitest'; + +import type { Test } from './instrumentation'; +import { createCitiesTable, createUsers2Table, createUserTable } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test.concurrent('orderBy with aliased column', ({ db }) => { + const users2 = createUserTable('users2_41'); + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as `test` from `users2_41` order by `test`'); + }); + + test.concurrent('timestamp timezone', async ({ db, push }) => { + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + const users = createUserTable('users_48'); + await push({ users }); + await db.insert(users).values({ name: 'With default times' }); + await db.insert(users).values({ + name: 'Without default times', + createdAt: date, + }); + const result = await db.select().from(users); + + // check that the timestamps are set correctly for default times + expect(Math.abs(result[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(result[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test('transaction', async ({ db, push }) => { + const users = mysqlTable('users_transactions_48', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = mysqlTable('products_transactions_48', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await push({ users, products }); + + const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + }); + + test('transaction with options (set isolationLevel)', async ({ db, push }) => { + const users = mysqlTable('users_transactions_49', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = mysqlTable('products_transactions_49', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await push({ users, products }); + + const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }, { isolationLevel: 'serializable' }); + + const result = await db.select().from(users); + expect(result).toEqual([{ id: 1, balance: 90 }]); + }); + + test('transaction rollback', async ({ db, push }) => { + const users = mysqlTable('users_transactions_rollback_50', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + }); + + test('nested transaction', async ({ db, push }) => { + const users = mysqlTable('users_nested_transactions_51', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + }); + + test('nested transaction rollback', async ({ db, push }) => { + const users = mysqlTable('users_52', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await push({ users }); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + }); + + test.concurrent('join subquery with join', async ({ db, push }) => { + const internalStaff = mysqlTable('users_53_internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = mysqlTable('users_53_custom_user', { + id: int('id').notNull(), + }); + + const ticket = mysqlTable('users_53_ticket', { + staffId: int('staff_id').notNull(), + }); + + await push({ internalStaff, customUser, ticket }); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.users_53_internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + users_53_ticket: { staffId: 1 }, + internal_staff: { + users_53_internal_staff: { userId: 1 }, + users_53_custom_user: { id: 1 }, + }, + }]); + }); + + test.concurrent('subquery with view', async ({ db, push }) => { + const users = mysqlTable('users_54', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('users_54_new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await push({ users, newYorkers }); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + }); + + test.concurrent('join view as subquery', async ({ db, push }) => { + const users = mysqlTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = mysqlView('users_55_new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await push({ users, newYorkers }); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + }); + + test.concurrent('select iterator', async ({ db, push }) => { + const users = mysqlTable('users_iterator_1', { + id: serial('id').primaryKey(), + }); + + await push({ users }); + + await db.insert(users).values([{}, {}, {}]); + + const iter = db.select().from(users).iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test.concurrent('select iterator w/ prepared statement', async ({ db, push }) => { + const users = mysqlTable('users_iterator_2', { + id: serial('id').primaryKey(), + }); + + await push({ users }); + + await db.insert(users).values([{}, {}, {}]); + + const prepared = db.select().from(users).prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test.concurrent('insert undefined', async ({ db, push }) => { + const users = mysqlTable('users_58', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await push({ users }); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + }); + + test.concurrent('update undefined', async ({ db, push }) => { + const users = mysqlTable('users_59', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await push({ users }); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + }); + + test.concurrent('utc config for datetime', async ({ db, push, client }) => { + const query = client.query; + const datesTable = mysqlTable('datestable_2', { + datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), + datetime: datetime('datetime', { fsp: 3 }), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + }); + + await push({ datesTable }); + + await query(`SET time_zone = '+00:00'`, []); + + const dateObj = new Date('2022-11-11'); + const dateUtc = new Date('2022-11-11T12:12:12.122Z'); + + await db.insert(datesTable).values({ + datetimeUTC: dateUtc, + datetime: dateObj, + datetimeAsString: '2022-11-11 12:12:12', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + datetimeUTC: dateUtc, + datetime: new Date('2022-11-11'), + datetimeAsString: '2022-11-11 12:12:12', + }]); + }); + + test('set operations (union) from query builder with subquery', async ({ db, push }) => { + const cities = createCitiesTable('cities_38'); + const users2 = createUsers2Table('users2_38', cities); + await push({ cities, users2 }); + + await db.insert(cities).values([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + + const sq = db + .select({ id: users2.id, name: users2.name }) + .from(users2).as('sq'); + + const result = await db + .select({ id: cities.id, name: cities.name }) + .from(cities).union( + db.select().from(sq), + ).limit(8); + + expect(result).toStrictEqual([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: cities.id, name: cities.name }) + .from(cities).union( + db + .select({ name: users2.name, id: users2.id }) + .from(users2), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union) as function', async ({ db, push }) => { + const cities = createCitiesTable('cities_39'); + const users2 = createUsers2Table('users2_39', cities); + await push({ cities, users2 }); + + await db.insert(cities).values([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + + const result = await union( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'Paris' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + union( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ name: users2.name, id: users2.id }) + .from(users2).where(eq(users2.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test('set operations (union all) from query builder', async ({ db, push }) => { + const cities = createCitiesTable('cities_40'); + await push({ cities }); + + await db.insert(cities).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + const result = await db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2).unionAll( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2), + ).orderBy(asc(sql`id`)).limit(3); + + expect(result).toStrictEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2).unionAll( + db + .select({ name: cities.name, id: cities.id }) + .from(cities).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (union all) as function', async ({ db, push }) => { + const cities = createCitiesTable('cities_41'); + const users2 = createUsers2Table('users2_41', cities); + await push({ cities, users2 }); + + await db.insert(cities).values([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + + const result = await unionAll( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(1); + + expect(result).toStrictEqual([ + { id: 1, name: 'Paris' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ name: users2.name, id: users2.id }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (intersect) from query builder', async ({ db, push }) => { + const cities = createCitiesTable('cities_42'); + await push({ cities }); + + await db.insert(cities).values([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + const result = await db + .select({ id: cities.id, name: cities.name }) + .from(cities).intersect( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(gt(cities.id, 1)), + ); + + expect(result).toStrictEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: cities.name, id: cities.id }) + .from(cities).intersect( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(gt(cities.id, 1)), + ); + })()).rejects.toThrowError(); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-4.ts b/integration-tests/tests/mysql/mysql-common-4.ts new file mode 100644 index 0000000000..8c9f6faaca --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-4.ts @@ -0,0 +1,488 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { asc, avg, avgDistinct, count, countDistinct, eq, gt, gte, max, min, sql, sum, sumDistinct } from 'drizzle-orm'; +import { except, exceptAll, intersect, intersectAll, union } from 'drizzle-orm/mysql-core'; +import { expect } from 'vitest'; + +import type { Test } from './instrumentation'; +import { createAggregateTable, createCitiesTable, createUsers2Table } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test.concurrent('set operations (intersect) as function', async ({ db, push }) => { + const cities = createCitiesTable('cities_43'); + const users2 = createUsers2Table('users2_43', cities); + await push({ cities, users2 }); + + const result = await intersect( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(1); + + expect(result).toStrictEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ name: users2.name, id: users2.id }) + .from(users2).where(eq(users2.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (intersect all) from query builder', async ({ db, push, seed }) => { + const cities = createCitiesTable('cities_44'); + await push({ cities }); + + await seed( + { cities }, + (funcs) => ({ cities: { count: 3, columns: { name: funcs.city() } } }), + ); + + const result = await db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2).intersectAll( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toStrictEqual([ + { id: 1, name: 'Hoogvliet' }, + { id: 2, name: 'South Milwaukee' }, + ]); + + await expect((async () => { + db + .select({ id: cities.id, name: cities.name }) + .from(cities).limit(2).intersectAll( + db + .select({ name: cities.name, id: cities.id }) + .from(cities).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (intersect all) as function', async ({ db, push, seed }) => { + const cities = createCitiesTable('cities_45'); + const users2 = createUsers2Table('users2_45', cities); + await push({ cities, users2 }); + + await seed( + { cities, users2 }, + (funcs) => ({ + cities: { count: 3, columns: { name: funcs.city() } }, + users2: { count: 8 }, + }), + ); + + const result = await intersectAll( + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ); + + expect(result).toStrictEqual([ + { id: 1, name: 'Melina' }, + ]); + + await expect((async () => { + intersectAll( + db + .select({ name: users2.name, id: users2.id }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (except) from query builder', async ({ db, push, seed }) => { + const cities = createCitiesTable('cities_46'); + await push({ cities }); + + await seed( + { cities }, + (funcs) => ({ cities: { count: 3, columns: { name: funcs.city() } } }), + ); + + const result = await db + .select() + .from(cities).except( + db + .select() + .from(cities).where(gt(cities.id, 1)), + ); + + expect(result).toStrictEqual([ + { id: 1, name: 'Hoogvliet' }, + ]); + }); + + test.concurrent('set operations (except) as function', async ({ db, push, seed }) => { + const cities = createCitiesTable('cities_47'); + const users2 = createUsers2Table('users2_47', cities); + await push({ cities, users2 }); + + await seed( + { cities, users2 }, + (funcs) => ({ + cities: { count: 3, columns: { name: funcs.city() } }, + users2: { count: 8 }, + }), + ); + + const result = await except( + db + .select({ id: cities.id, name: cities.name }) + .from(cities), + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(3); + + expect(result).toStrictEqual([ + { id: 2, name: 'South Milwaukee' }, + { id: 3, name: 'Bou Hadjar' }, + ]); + + await expect((async () => { + except( + db + .select({ name: cities.name, id: cities.id }) + .from(cities), + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(3); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (except all) from query builder', async ({ db, push }) => { + const cities = createCitiesTable('cities_50'); + await push({ cities }); + await db.insert(cities).values([ + { id: 1, name: 'Paris' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + const result = await db + .select() + .from(cities).exceptAll( + db + .select({ id: cities.id, name: cities.name }) + .from(cities).where(eq(cities.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toStrictEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select() + .from(cities).exceptAll( + db + .select({ name: cities.name, id: cities.id }) + .from(cities).where(eq(cities.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (except all) as function', async ({ db, push }) => { + const cities = createCitiesTable('cities_51'); + const users2 = createUsers2Table('users2_51', cities); + await push({ cities, users2 }); + await db.insert(users2).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + const result = await exceptAll( + db + .select({ id: users2.id, name: users2.name }) + .from(users2), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(gt(users2.id, 7)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(6).orderBy(asc(sql.identifier('id'))); + + expect(result).toStrictEqual([ + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + await expect((async () => { + exceptAll( + db + .select({ name: users2.name, id: users2.id }) + .from(users2), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(gt(users2.id, 7)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + ).limit(6); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (mixed) from query builder', async ({ db, push }) => { + const cities = createCitiesTable('cities_52'); + await push({ cities }); + await db.insert(cities).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + const result = await db + .select() + .from(cities).except( + ({ unionAll }) => + unionAll( + db + .select() + .from(cities).where(gt(cities.id, 1)), + db.select().from(cities).where(eq(cities.id, 2)), + ).orderBy(asc(cities.id)).limit(1).offset(1), + ); + + expect(result).toStrictEqual([ + { id: 1, name: 'New York' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select() + .from(cities).except( + ({ unionAll }) => + unionAll( + db + .select({ name: cities.name, id: cities.id }) + .from(cities).where(gt(cities.id, 1)), + db.select().from(cities).where(eq(cities.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (mixed all) as function with subquery', async ({ db, push }) => { + const cities = createCitiesTable('cities_48'); + const users2 = createUsers2Table('users2_48', cities); + await push({ cities, users2 }); + await db.insert(cities).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + await db.insert(users2).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + const sq = except( + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(gte(users2.id, 5)), + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 7)), + ).orderBy(asc(sql.identifier('id'))).as('sq'); + + const result = await union( + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + db.select().from(sq).limit(1), + db + .select().from(cities).where(gt(cities.id, 1)), + ); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 5, name: 'Ben' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(eq(users2.id, 1)), + except( + db + .select({ id: users2.id, name: users2.name }) + .from(users2).where(gte(users2.id, 5)), + db + .select({ name: users2.name, id: users2.id }) + .from(users2).where(eq(users2.id, 7)), + ).limit(1), + db + .select().from(cities).where(gt(cities.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('aggregate function: count', async ({ db, push }) => { + const aggregateTable = createAggregateTable('aggregate_table_1'); + + await push({ aggregateTable }); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + + const result1 = await db.select({ value: count() }).from(aggregateTable); + const result2 = await db.select({ value: count(aggregateTable.a) }).from(aggregateTable); + const result3 = await db.select({ value: countDistinct(aggregateTable.name) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test.concurrent('aggregate function: avg', async ({ db, push }) => { + const aggregateTable = createAggregateTable('aggregate_table_2'); + + await push({ aggregateTable }); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + const result1 = await db.select({ value: avg(aggregateTable.b) }).from(aggregateTable); + const result2 = await db.select({ value: avg(aggregateTable.nullOnly) }).from(aggregateTable); + const result3 = await db.select({ value: avgDistinct(aggregateTable.b) }).from(aggregateTable); + + expect(result1[0]?.value).toBe('33.3333'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('42.5000'); + }); + + test.concurrent('aggregate function: sum', async ({ db, push }) => { + const table = createAggregateTable('aggregate_table_3'); + await push({ table }); + await db.insert(table).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('170'); + }); + + test.concurrent('aggregate function: max', async ({ db, push }) => { + const aggregateTable = createAggregateTable('aggregate_table_4'); + await push({ table: aggregateTable }); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + + const result1 = await db.select({ value: max(aggregateTable.b) }).from(aggregateTable); + const result2 = await db.select({ value: max(aggregateTable.nullOnly) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBe(null); + }); + + test.concurrent('aggregate function: min', async ({ db, push }) => { + const aggregateTable = createAggregateTable('aggregate_table_5'); + await push({ table: aggregateTable }); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); + + const result1 = await db.select({ value: min(aggregateTable.b) }).from(aggregateTable); + const result2 = await db.select({ value: min(aggregateTable.nullOnly) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBe(null); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-5.ts b/integration-tests/tests/mysql/mysql-common-5.ts new file mode 100644 index 0000000000..63fff3a105 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-5.ts @@ -0,0 +1,370 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { eq, sql } from 'drizzle-orm'; +import { alias, getViewConfig, int, mysqlTable, serial, text } from 'drizzle-orm/mysql-core'; +import { describe, expect } from 'vitest'; +import type { Test } from './instrumentation'; +import { citiesMySchemaTable, mySchema, users2MySchemaTable, usersMySchemaTable } from './schema2'; + +async function setupReturningFunctionsTest(batch: (s: string[]) => Promise) { + await batch([`drop table if exists \`users_default_fn\``]); + await batch([`create table \`users_default_fn\` ( + \`id\` varchar(256) primary key, + \`name\` text not null + );`]); +} + +export function tests(test: Test, exclude: Set = new Set([])) { + describe('mySchema_tests', () => { + test.beforeEach(async ({ task, skip, db }) => { + if (exclude.has(task.name) || (task.suite?.name && exclude.has(task.suite.name))) skip(); + await db.execute(sql`drop schema if exists \`mySchema\``); + await db.execute(sql`create schema if not exists \`mySchema\``); + + await db.execute( + sql` + create table \`mySchema\`.\`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`cities\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`mySchema\`.\`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int references \`mySchema\`.\`cities\`(\`id\`) + ) + `, + ); + }); + // mySchema tests + test('mySchema :: select all fields', async ({ db, push }) => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: select sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select typed sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test('mySchema :: select distinct', async ({ db }) => { + const usersDistinctTable = mysqlTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test('mySchema :: insert returning sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + const [result, _] = await db.insert(usersMySchemaTable).values({ name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test('mySchema :: delete returning sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test('mySchema :: update with returning partial', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( + eq(usersMySchemaTable.name, 'John'), + ); + + const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ) + .where( + eq(usersMySchemaTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test('mySchema :: delete with returning all fields', async ({ db }) => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test('mySchema :: insert + select', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test('mySchema :: insert with overridden default values', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test('mySchema :: insert many', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test('mySchema :: select with group by as field', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test('mySchema :: select with group by as column + sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test('mySchema :: build query', async ({ db }) => { + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, + params: [], + }); + }); + + test('mySchema :: insert with spaces', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test('mySchema :: prepared statement with placeholder in .where', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const stmt = db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }).from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test('mySchema :: select from tables with same name from different schema using alias', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersMySchemaTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersMySchemaTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + userstest: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.userstest.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test('mySchema :: view', async ({ db }) => { + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-6.ts b/integration-tests/tests/mysql/mysql-common-6.ts new file mode 100644 index 0000000000..4e1cdbef4f --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-6.ts @@ -0,0 +1,847 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { eq, gt, like, not, sql } from 'drizzle-orm'; +import { int, mysqlTable, serial, text, varchar } from 'drizzle-orm/mysql-core'; +import { expect, expectTypeOf } from 'vitest'; +import type { Test } from './instrumentation'; +import { rqbPost, rqbUser } from './schema'; +import { createCitiesTable, createCountTestTable, createUsers2Table, createUserTable } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + let firstTime = true; + let resolveValue: (val: any) => void; + const promise = new Promise((resolve) => { + resolveValue = resolve; + }); + test.beforeEach(async ({ task, skip, client, db }) => { + if (firstTime) { + firstTime = false; + + await client.batch([ + `CREATE TABLE \`user_rqb_test\` ( + \`id\` SERIAL PRIMARY KEY, + \`name\` TEXT NOT NULL, + \`created_at\` TIMESTAMP NOT NULL + );`, + `CREATE TABLE \`post_rqb_test\` ( + \`id\` SERIAL PRIMARY KEY, + \`user_id\` BIGINT(20) UNSIGNED NOT NULL, + \`content\` TEXT, + \`created_at\` TIMESTAMP NOT NULL + );`, + `CREATE TABLE \`empty\` (\`id\` int);`, + ]); + + const date = new Date(120000); + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + resolveValue(''); + } + + await promise; + if (exclude.has(task.name)) skip(); + }); + + // .sequential is needed for beforeEach to be executed before all tests + test.concurrent('insert $returningId: serial as id', async ({ db, push }) => { + const users = createUserTable('users_60'); + await push({ users }); + const result = await db.insert(users).values({ name: 'John' }).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }]); + }); + + test.concurrent('insert $returningId: serial as id, not first column', async ({ db, push }) => { + const usersTableDefNotFirstColumn = mysqlTable('users2_52', { + name: text('name').notNull(), + id: serial('id').primaryKey(), + }); + + await push({ usersTableDefNotFirstColumn }); + + const result = await db.insert(usersTableDefNotFirstColumn).values({ name: 'John' }).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }]); + }); + + test.concurrent('insert $returningId: serial as id, batch insert', async ({ db, push }) => { + const users = createUserTable('users_61'); + await push({ users }); + + const result = await db.insert(users).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }, { id: 2 }]); + }); + + test.concurrent('insert $returningId: $default as primary key', async ({ db, push }) => { + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = mysqlTable('users_default_fn_1', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await push({ usersTableDefFn }); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { + customId: 'dyqs529eom0iczo2efxzbcut', + }]); + }); + + test.concurrent('insert $returningId: $default as primary key with value', async ({ db, push }) => { + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = mysqlTable('users_default_fn_2', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await push({ usersTableDefFn }); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); + }); + + test.concurrent('$count separate', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_1'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable); + + expect(count).toStrictEqual(4); + }); + + test.concurrent('$count embedded', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_2'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + expect(count).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); + }); + + test.concurrent('$count separate reuse', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_3'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.$count(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + expect(count1).toStrictEqual(4); + expect(count2).toStrictEqual(5); + expect(count3).toStrictEqual(6); + }); + + test.concurrent('$count embedded reuse', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_4'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + expect(count1).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); + expect(count2).toStrictEqual([ + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + ]); + expect(count3).toStrictEqual([ + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + ]); + }); + + test.concurrent('$count separate with filters', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_5'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); + + expect(count).toStrictEqual(3); + }); + + test.concurrent('$count embedded with filters', async ({ db, push }) => { + const countTestTable = createCountTestTable('count_test_6'); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable, gt(countTestTable.id, 1)), + }).from(countTestTable); + + expect(count).toStrictEqual([ + { count: 3 }, + { count: 3 }, + { count: 3 }, + { count: 3 }, + ]); + }); + + test.concurrent('limit 0', async ({ db, push }) => { + const users = createUserTable('users_62'); + await push({ users }); + await db.insert(users).values({ name: 'John' }); + + const result = await db + .select() + .from(users) + .limit(0); + + expect(result).toEqual([]); + }); + + test.concurrent('limit -1', async ({ db, push }) => { + const users = createUserTable('users_631'); + await push({ users }); + await db.insert(users).values({ name: 'John' }); + + const result = await db + .select() + .from(users) + .limit(-1); + + expect(result.length).toBeGreaterThan(0); + }); + + test.concurrent('cross join', async ({ db, push, seed }) => { + const users = createUserTable('users_63'); + const cities = createCitiesTable('cities_63'); + + await push({ users, cities }); + await seed({ users, cities }, (funcs) => ({ + users: { count: 2, columns: { name: funcs.firstName() } }, + cities: { count: 2, columns: { name: funcs.city() } }, + })); + + const result = await db + .select({ + user: users.name, + city: cities.name, + }) + .from(users) + .crossJoin(cities) + .orderBy(users.name, cities.name); + + expect(result).toStrictEqual([ + { city: 'Hoogvliet', user: 'Agripina' }, + { city: 'South Milwaukee', user: 'Agripina' }, + { city: 'Hoogvliet', user: 'Candy' }, + { city: 'South Milwaukee', user: 'Candy' }, + ]); + }); + + test.concurrent('left join (lateral)', async ({ db, push }) => { + const cities = createCitiesTable('cities_64'); + const users2 = createUsers2Table('users2_64', cities); + + await push({ cities, users2 }); + + await db + .insert(cities) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2.id, + userName: users2.name, + cityId: users2.cityId, + }) + .from(users2) + .where(eq(users2.cityId, cities.id)) + .as('sq'); + + const res = await db + .select({ + cityId: cities.id, + cityName: cities.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(cities) + .leftJoinLateral(sq, sql`true`); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + { cityId: 2, cityName: 'London', userId: null, userName: null }, + ]); + }); + + test.concurrent('inner join (lateral)', async ({ db, push }) => { + const cities = createCitiesTable('cities_65'); + const users2 = createUsers2Table('users2_65', cities); + + await push({ cities, users2 }); + + await db.insert(cities).values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2.id, + userName: users2.name, + cityId: users2.cityId, + }) + .from(users2) + .where(eq(users2.cityId, cities.id)) + .as('sq'); + + const res = await db + .select({ + cityId: cities.id, + cityName: cities.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(cities) + .innerJoinLateral(sq, sql`true`); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + ]); + }); + + test.concurrent('cross join (lateral)', async ({ db, push }) => { + const cities = createCitiesTable('cities_66'); + const users2 = createUsers2Table('users2_66', cities); + + await push({ cities, users2 }); + + await db + .insert(cities) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }, { id: 3, name: 'Berlin' }]); + + await db.insert(users2).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }, { + name: 'Patrick', + cityId: 2, + }]); + + const sq = db + .select({ + userId: users2.id, + userName: users2.name, + cityId: users2.cityId, + }) + .from(users2) + .where(not(like(cities.name, 'L%'))) + .as('sq'); + + const res = await db + .select({ + cityId: cities.id, + cityName: cities.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(cities) + .crossJoinLateral(sq) + .orderBy(cities.id, sq.userId); + + expect(res).toStrictEqual([ + { + cityId: 1, + cityName: 'Paris', + userId: 1, + userName: 'John', + }, + { + cityId: 1, + cityName: 'Paris', + userId: 2, + userName: 'Jane', + }, + { + cityId: 1, + cityName: 'Paris', + userId: 3, + userName: 'Patrick', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 1, + userName: 'John', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 2, + userName: 'Jane', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 3, + userName: 'Patrick', + }, + ]); + }); + + test.concurrent('RQB v2 simple find first - no rows', async ({ db }) => { + const result = await db.query.empty.findFirst(); + expect(result).toStrictEqual(undefined); + }); + + test.concurrent('RQB v2 simple find first - multiple rows', async ({ db }) => { + const result = await db.query.rqbUser.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }); + }); + + test.concurrent('RQB v2 simple find first - with relation', async ({ db }) => { + const result = await db.query.rqbUser.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: new Date(120000), + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: new Date(120000), + content: null, + }, { + id: 2, + userId: 1, + createdAt: new Date(120000), + content: 'Has message this time', + }], + }); + }); + + test('RQB v2 simple find first - placeholders', async ({ db }) => { + const query = db.query.rqbUser.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }); + }); + + test('RQB v2 simple find many - no rows', async ({ db }) => { + const result = await db.query.empty.findMany(); + + expect(result).toStrictEqual([]); + }); + + test('RQB v2 simple find many - multiple rows', async ({ db }) => { + const result = await db.query.rqbUser.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }, { + id: 1, + createdAt: new Date(120000), + name: 'First', + }]); + }); + + test('RQB v2 simple find many - with relation', async ({ db }) => { + const result = await db.query.rqbPost.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: new Date(120000), + content: null, + author: { + id: 1, + createdAt: new Date(120000), + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: new Date(120000), + content: 'Has message this time', + author: { + id: 1, + createdAt: new Date(120000), + name: 'First', + }, + }]); + }); + + test('RQB v2 simple find many - placeholders', async ({ db }) => { + const query = db.query.rqbUser.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }]); + }); + + test('RQB v2 transaction find first - no rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.empty.findFirst(); + + expect(result).toStrictEqual(undefined); + }); + }); + + test('RQB v2 transaction find first - multiple rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }); + }); + }); + + test('RQB v2 transaction find first - with relation', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: new Date(120000), + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: new Date(120000), + content: null, + }, { + id: 2, + userId: 1, + createdAt: new Date(120000), + content: 'Has message this time', + }], + }); + }); + }); + + test('RQB v2 transaction find first - placeholders', async ({ db }) => { + await db.transaction(async (db) => { + const query = db.query.rqbUser.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }); + }); + }); + + test('RQB v2 transaction find many - no rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.empty.findMany(); + + expect(result).toStrictEqual([]); + }); + }); + + test('RQB v2 transaction find many - multiple rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }, { + id: 1, + createdAt: new Date(120000), + name: 'First', + }]); + }); + }); + + test('RQB v2 transaction find many - with relation', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbPost.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: new Date(120000), + content: null, + author: { + id: 1, + createdAt: new Date(120000), + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: new Date(120000), + content: 'Has message this time', + author: { + id: 1, + createdAt: new Date(120000), + name: 'First', + }, + }]); + }); + }); + + test('RQB v2 transaction find many - placeholders', async ({ db }) => { + await db.transaction(async (db) => { + const query = db.query.rqbUser.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: new Date(120000), + name: 'Second', + }]); + }); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-7.ts b/integration-tests/tests/mysql/mysql-common-7.ts new file mode 100644 index 0000000000..8f141095d5 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-7.ts @@ -0,0 +1,954 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { and, asc, count, eq, inArray, sql } from 'drizzle-orm'; +import { + bigint, + getTableConfig, + index, + int, + mysqlTable, + mysqlView, + primaryKey, + serial, + text, + timestamp, + varchar, +} from 'drizzle-orm/mysql-core'; +import { expect, expectTypeOf } from 'vitest'; +import type { Test } from './instrumentation'; +import { allTypesTable, createCitiesTable, createUsers2Table } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test('select from a many subquery', async ({ db, push }) => { + const citiesTable = createCitiesTable('cities_many_subquery'); + const users2Table = createUsers2Table('users_2_many_subquery', citiesTable); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + population: db.select({ count: count().as('count') }).from(users2Table).where( + eq(users2Table.cityId, citiesTable.id), + ).as( + 'population', + ), + name: citiesTable.name, + }).from(citiesTable); + + expectTypeOf(res).toEqualTypeOf< + { + population: number; + name: string; + }[] + >(); + + expect(res).toStrictEqual([{ + population: 1, + name: 'Paris', + }, { + population: 2, + name: 'London', + }]); + }); + + test('select from a one subquery', async ({ db, push }) => { + const citiesTable = createCitiesTable('cities_one_subquery'); + const users2Table = createUsers2Table('users_2_one_subquery', citiesTable); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) + .as( + 'cityName', + ), + name: users2Table.name, + }).from(users2Table); + + expectTypeOf(res).toEqualTypeOf< + { + cityName: string; + name: string; + }[] + >(); + + expect(res).toStrictEqual([{ + cityName: 'Paris', + name: 'John', + }, { + cityName: 'London', + name: 'Jane', + }, { + cityName: 'London', + name: 'Jack', + }]); + }); + + test('test $onUpdateFn and $onUpdate works with sql value', async ({ db, push }) => { + const users = mysqlTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updatedAt: timestamp('updated_at', { + fsp: 6, + }) + .notNull() + .$onUpdate(() => sql`current_timestamp`), + }); + + await push({ users }); + + await db.insert(users).values({ + name: 'John', + }); + const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const now = Date.now(); + await new Promise((resolve) => setTimeout(resolve, 1000)); + await db.update(users).set({ + name: 'John', + }); + const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); + + expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); + expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); + }); + + test.concurrent('all types', async ({ db, push }) => { + await push({ allTypesTable }); + + await db.insert(allTypesTable).values({ + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date(1741743161623), + dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + datetime: new Date(1741743161623), + datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + enum: 'enV1', + float: 1.048596, + real: 1.048596, + text: 'C4-', + int: 621, + json: { + str: 'strval', + arr: ['str', 10], + }, + medInt: 560, + smallInt: 14, + time: '04:13:22', + timestamp: new Date(1741743161623), + timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + blob: Buffer.from('string'), + longblob: Buffer.from('string'), + mediumblob: Buffer.from('string'), + tinyblob: Buffer.from('string'), + stringblob: 'string', + stringlongblob: 'string', + stringmediumblob: 'string', + stringtinyblob: 'string', + }); + + const rawRes = await db.select().from(allTypesTable); + + type ExpectedType = { + serial: number; + bigint53: number | null; + bigint64: bigint | null; + bigintString: string | null; + binary: string | null; + boolean: boolean | null; + char: string | null; + date: Date | null; + dateStr: string | null; + datetime: Date | null; + datetimeStr: string | null; + decimal: string | null; + decimalNum: number | null; + decimalBig: bigint | null; + double: number | null; + float: number | null; + int: number | null; + json: unknown; + medInt: number | null; + smallInt: number | null; + real: number | null; + text: string | null; + time: string | null; + timestamp: Date | null; + timestampStr: string | null; + tinyInt: number | null; + varbin: string | null; + varchar: string | null; + year: number | null; + enum: 'enV1' | 'enV2' | null; + blob: Buffer | null; + tinyblob: Buffer | null; + mediumblob: Buffer | null; + longblob: Buffer | null; + stringblob: string | null; + stringtinyblob: string | null; + stringmediumblob: string | null; + stringlongblob: string | null; + }[]; + + const expectedRes: ExpectedType = [ + { + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date('2025-03-12T00:00:00.000Z'), + dateStr: '2025-03-12', + datetime: new Date('2025-03-12T01:32:42.000Z'), + datetimeStr: '2025-03-12 01:32:41', + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + float: 1.0486, + int: 621, + json: { arr: ['str', 10], str: 'strval' }, + medInt: 560, + smallInt: 14, + real: 1.048596, + text: 'C4-', + time: '04:13:22', + timestamp: new Date('2025-03-12T01:32:42.000Z'), + timestampStr: '2025-03-12 01:32:41', + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + enum: 'enV1', + blob: Buffer.from('string'), + longblob: Buffer.from('string'), + mediumblob: Buffer.from('string'), + tinyblob: Buffer.from('string'), + stringblob: 'string', + stringlongblob: 'string', + stringmediumblob: 'string', + stringtinyblob: 'string', + }, + ]; + + expectTypeOf(rawRes).toEqualTypeOf(); + expect(rawRes).toStrictEqual(expectedRes); + }); + + test.concurrent('insert into ... select', async ({ db, push }) => { + const notifications = mysqlTable('notifications', { + id: int('id').primaryKey().autoincrement(), + sentAt: timestamp('sent_at').notNull().defaultNow(), + message: text('message').notNull(), + }); + const users = mysqlTable('users_64', { + id: int('id').primaryKey().autoincrement(), + name: text('name').notNull(), + }); + const userNotications = mysqlTable('user_notifications', { + userId: int('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), + notificationId: int('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade' }), + }, (t) => [primaryKey({ columns: [t.userId, t.notificationId] })]); + + await push({ notifications, users, userNotications }); + + await db + .insert(notifications) + .values({ message: 'You are one of the 3 lucky winners!' }); + const newNotification = await db + .select({ id: notifications.id }) + .from(notifications) + .then((result) => result[0]); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await db + .insert(userNotications) + .select( + db + .select({ + userId: users.id, + notificationId: sql`(${newNotification!.id})`.as('notification_id'), + }) + .from(users) + .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) + .orderBy(asc(users.id)), + ); + const sentNotifications = await db.select().from(userNotications); + + expect(sentNotifications).toStrictEqual([ + { userId: 1, notificationId: newNotification!.id }, + { userId: 3, notificationId: newNotification!.id }, + { userId: 5, notificationId: newNotification!.id }, + ]); + }); + + test.concurrent('insert into ... select with keys in different order', async ({ db, push }) => { + const users1 = mysqlTable('users_65', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2 = mysqlTable('users_66', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users1, users2 }); + + expect( + () => + db + .insert(users1) + .select( + db + .select({ + name: users2.name, + id: users2.id, + }) + .from(users2), + ), + ).toThrowError(); + }); + + test.concurrent('MySqlTable :: select with `use index` hint', async ({ db, push }) => { + const users = mysqlTable('users_67', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }, () => [usersTableNameIndex]); + const usersTableNameIndex = index('users_name_index_67').on(users.name); + + await push({ users }); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + const result = await db.select() + .from(users, { + useIndex: [usersTableNameIndex], + }) + .where(eq(users.name, 'David')); + + expect(result).toHaveLength(1); + expect(result).toEqual([{ id: 4, name: 'David' }]); + }); + + test.concurrent('MySqlTable :: select with `use index` hint on 1 index', async ({ db, push }) => { + const users = mysqlTable('users_68', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }, () => [usersTableNameIndex]); + const usersTableNameIndex = index('users_name_index_68').on(users.name); + + await push({ users }); + + const query = db.select() + .from(users, { + useIndex: usersTableNameIndex, + }) + .where(eq(users.name, 'David')) + .toSQL(); + + expect(query.sql).to.include('USE INDEX (users_name_index_68)'); + }); + + test.concurrent('MySqlTable :: select with `use index` hint on multiple indexes', async ({ db, push }) => { + const users = mysqlTable('users_69', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + age: int('age').notNull(), + }, () => [usersTableNameIndex, usersTableAgeIndex]); + const usersTableNameIndex = index('users_name_index_69').on(users.name); + const usersTableAgeIndex = index('users_age_index_69').on(users.age); + + await push({ users }); + + const query = db.select() + .from(users, { + useIndex: [usersTableNameIndex, usersTableAgeIndex], + }) + .where(eq(users.name, 'David')) + .toSQL(); + + expect(query.sql).to.include('USE INDEX (users_name_index_69, users_age_index_69)'); + }); + + test.concurrent('MySqlTable :: select with `use index` hint on not existed index', async ({ db, push }) => { + const users = mysqlTable('users_70', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }, () => [usersTableNameIndex]); + const usersTableNameIndex = index('users_name_index_70').on(users.name); + + await push({ users }); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await expect((async () => { + return await db.select() + .from(users, { + useIndex: ['some_other_index'], + }) + .where(eq(users.name, 'David')); + })()).rejects.toThrowError(); + }); + + test.concurrent( + 'MySqlTable :: select with `use index` + `force index` incompatible hints', + async ({ db, push }) => { + const users = mysqlTable('users_71', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + age: int('age').notNull(), + }, () => [usersTableNameIndex, usersTableAgeIndex]); + const usersTableNameIndex = index('users_name_index_71').on(users.name); + const usersTableAgeIndex = index('users_age_index_71').on(users.age); + + await push({ users }); + + await db.insert(users).values([ + { name: 'Alice', age: 18 }, + { name: 'Bob', age: 19 }, + { name: 'Charlie', age: 20 }, + { name: 'David', age: 21 }, + { name: 'Eve', age: 22 }, + ]); + + await expect((async () => { + return await db.select() + .from(users, { + useIndex: [usersTableNameIndex], + forceIndex: [usersTableAgeIndex], + }) + .where(eq(users.name, 'David')); + })()).rejects.toThrowError(); + }, + ); + + test.concurrent('MySqlTable :: select with join `use index` hint', async ({ db, push }) => { + const users = mysqlTable('users_72', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_72', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_72').on(posts.userId); + + await push({ users, posts }); + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await db.insert(posts).values([ + { text: 'Alice post', userId: 1 }, + { text: 'Bob post', userId: 2 }, + { text: 'Charlie post', userId: 3 }, + { text: 'David post', userId: 4 }, + { text: 'Eve post', userId: 5 }, + ]); + + const result = await db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.userId), { + useIndex: [postsTableUserIdIndex], + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )); + + expect(result).toHaveLength(1); + expect(result).toEqual([{ userId: 4, name: 'David', postId: 4, text: 'David post' }]); + }); + + test.concurrent('MySqlTable :: select with join `use index` hint on 1 index', async ({ db, push }) => { + const users = mysqlTable('users_73', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_73', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_73').on(posts.userId); + + await push({ users, posts }); + + const query = db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.userId), { + useIndex: postsTableUserIdIndex, + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )).toSQL(); + + expect(query.sql).to.include('USE INDEX (posts_user_id_index_73)'); + }); + + test.concurrent('MySqlTable :: select with cross join `use index` hint', async ({ db, push }) => { + const users = mysqlTable('users_74', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_74', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_74').on(posts.userId); + + await push({ users, posts }); + + await db.insert(users).values([ + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ]); + + await db.insert(posts).values([ + { id: 1, text: 'Alice post', userId: 1 }, + { id: 2, text: 'Bob post', userId: 2 }, + ]); + + const result = await db.select() + .from(users) + .crossJoin(posts, { + useIndex: [postsTableUserIdIndex], + }) + .orderBy(users.id, posts.id); + + expect(result).toStrictEqual([{ + users_74: { id: 1, name: 'Alice' }, + posts_74: { id: 1, text: 'Alice post', userId: 1 }, + }, { + users_74: { id: 1, name: 'Alice' }, + posts_74: { id: 2, text: 'Bob post', userId: 2 }, + }, { + users_74: { id: 2, name: 'Bob' }, + posts_74: { id: 1, text: 'Alice post', userId: 1 }, + }, { + users_74: { id: 2, name: 'Bob' }, + posts_74: { id: 2, text: 'Bob post', userId: 2 }, + }]); + }); + + test.concurrent('MySqlTable :: select with cross join `use index` hint on 1 index', async ({ db, push }) => { + const users = mysqlTable('users_75', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_75', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_75').on(posts.userId); + + await push({ users, posts }); + + const query = db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .crossJoin(posts, { + useIndex: postsTableUserIdIndex, + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )).toSQL(); + + expect(query.sql).to.include('USE INDEX (posts_user_id_index_75)'); + }); + + test.concurrent('MySqlTable :: select with join `use index` hint on multiple indexes', async ({ db, push }) => { + const users = mysqlTable('users_76', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_76', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex, postsTableTextIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_76').on(posts.userId); + const postsTableTextIndex = index('posts_text_index_76').on(posts.text); + + await push({ users, posts }); + + const query = db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.userId), { + useIndex: [postsTableUserIdIndex, postsTableTextIndex], + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )).toSQL(); + + expect(query.sql).to.include('USE INDEX (posts_user_id_index_76, posts_text_index_76)'); + }); + + test.concurrent('MySqlTable :: select with join `use index` hint on not existed index', async ({ db, push }) => { + const users = mysqlTable('users_77', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_77', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_77').on(posts.userId); + + await push({ users, posts }); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await db.insert(posts).values([ + { text: 'Alice post', userId: 1 }, + { text: 'Bob post', userId: 2 }, + { text: 'Charlie post', userId: 3 }, + { text: 'David post', userId: 4 }, + { text: 'Eve post', userId: 5 }, + ]); + + await expect((async () => { + return await db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.userId), { + useIndex: ['some_other_index'], + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )); + })()).rejects.toThrowError(); + }); + + test.concurrent( + 'MySqlTable :: select with join `use index` + `force index` incompatible hints', + async ({ db, push }) => { + const users = mysqlTable('users_78', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_78', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { + onDelete: 'cascade', + }) + .notNull(), + }, () => [postsTableUserIdIndex, postsTableTextIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_78').on(posts.userId); + const postsTableTextIndex = index('posts_text_index_78').on(posts.text); + + await push({ users, posts }); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await db.insert(posts).values([ + { text: 'Alice post', userId: 1 }, + { text: 'Bob post', userId: 2 }, + { text: 'Charlie post', userId: 3 }, + { text: 'David post', userId: 4 }, + { text: 'Eve post', userId: 5 }, + ]); + + await expect((async () => { + return await db.select({ + userId: users.id, + name: users.name, + postId: posts.id, + text: posts.text, + }) + .from(users) + .leftJoin(posts, eq(users.id, posts.userId), { + useIndex: [postsTableUserIdIndex], + forceIndex: [postsTableTextIndex], + }) + .where(and( + eq(users.name, 'David'), + eq(posts.text, 'David post'), + )); + })()).rejects.toThrowError(); + }, + ); + + test.concurrent('MySqlTable :: select with Subquery join `use index`', async ({ db, push }) => { + const users = mysqlTable('users_79', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_79', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_79').on(posts.userId); + + await push({ users, posts }); + + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + await db.insert(posts).values([ + { text: 'Alice post', userId: 1 }, + { text: 'Bob post', userId: 2 }, + { text: 'Charlie post', userId: 3 }, + { text: 'David post', userId: 4 }, + { text: 'Eve post', userId: 5 }, + ]); + + const sq = db.select().from(posts, { useIndex: [postsTableUserIdIndex] }).where(eq(posts.userId, 1)).as('sq'); + + const result = await db.select({ + userId: users.id, + name: users.name, + postId: sq.id, + text: sq.text, + }) + .from(users) + .leftJoin(sq, eq(users.id, sq.userId)) + .where(eq(users.name, 'Alice')); + + expect(result).toHaveLength(1); + expect(result).toEqual([{ userId: 1, name: 'Alice', postId: 1, text: 'Alice post' }]); + }); + + test.concurrent('MySqlTable :: select with Subquery join with `use index` in join', async ({ db, push }) => { + const users = mysqlTable('users_80', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }); + + const posts = mysqlTable('posts_80', { + id: serial('id').primaryKey(), + text: varchar('text', { length: 100 }).notNull(), + userId: bigint('user_id', { mode: 'number', unsigned: true }).references(() => users.id, { onDelete: 'cascade' }) + .notNull(), + }, () => [postsTableUserIdIndex]); + const postsTableUserIdIndex = index('posts_user_id_index_80').on(posts.userId); + + await push({ users, posts }); + + const sq = db.select().from(posts).where(eq(posts.userId, 1)).as('sq'); + + const query = db.select({ + userId: users.id, + name: users.name, + postId: sq.id, + text: sq.text, + }) + .from(users) + // @ts-expect-error + .leftJoin(sq, eq(users.id, sq.userId, { useIndex: [postsTableUserIdIndex] })) + .where(eq(users.name, 'Alice')) + .toSQL(); + + expect(query.sql).not.include('USE INDEX'); + }); + + test.concurrent('View :: select with `use index` hint', async ({ db, push }) => { + const users = mysqlTable('users_81', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }, () => [usersTableNameIndex]); + + const usersTableNameIndex = index('users_name_index_81').on(users.name); + + const usersView = mysqlView('users_view_81').as((qb) => qb.select().from(users)); + + await push({ users, usersView }); + + // @ts-expect-error + const query = db.select().from(usersView, { + useIndex: [usersTableNameIndex], + }).toSQL(); + + expect(query.sql).not.include('USE INDEX'); + }); + + test.concurrent('Subquery :: select with `use index` hint', async ({ db, push }) => { + const users = mysqlTable('users_82', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 100 }).notNull(), + }, () => [usersTableNameIndex]); + const usersTableNameIndex = index('users_name_index_82').on(users.name); + + await push({ users }); + + const sq = db.select().from(users).as('sq'); + + // @ts-expect-error + const query = db.select().from(sq, { + useIndex: [usersTableNameIndex], + }).toSQL(); + + expect(query.sql).not.include('USE INDEX'); + }); + + test.concurrent('sql operator as cte', async ({ db, push }) => { + const users = mysqlTable('users_83', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); + + test.concurrent('contraint names config', async ({ db, push }) => { + const users = mysqlTable('users_84', { + id: int('id').unique(), + id1: int('id1').unique('custom_name'), + }); + + await push({ users }); + + const tableConf = getTableConfig(users); + + expect(tableConf.columns.find((it) => it.name === 'id')!.uniqueName).toBe(undefined); + expect(tableConf.columns.find((it) => it.name === 'id1')!.uniqueName).toBe('custom_name'); + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-8.ts b/integration-tests/tests/mysql/mysql-common-8.ts new file mode 100644 index 0000000000..45bcf13666 --- /dev/null +++ b/integration-tests/tests/mysql/mysql-common-8.ts @@ -0,0 +1,757 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { and, asc, eq, getTableColumns, gt, Name, sql } from 'drizzle-orm'; +import { + alias, + bigint, + boolean, + datetime, + int, + mysqlEnum, + mysqlTable, + mysqlTableCreator, + mysqlView, + serial, + text, + timestamp, +} from 'drizzle-orm/mysql-core'; +import { expect } from 'vitest'; +import type { Test } from './instrumentation'; +import { createUsersOnUpdateTable, createUserTable, usersMigratorTable } from './schema2'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + test.concurrent('insert+update+delete returning sql', async ({ db, push }) => { + const users = createUserTable('users_85'); + await push({ users }); + + const res0 = await db.insert(users).values({ name: 'John' }); + const res1 = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + const res2 = await db.delete(users).where(eq(users.name, 'Jane')); + + const insertId = res0.insertId ? Number(res0.insertId) : res0[0].insertId; + const changedRows = res1.rowsAffected ?? res1[0].changedRows; + const affectedRows = res2.rowsAffected ?? res2[0].affectedRows; + + expect(insertId).toBe(1); + expect(changedRows).toBe(1); + expect(affectedRows).toBe(1); + }); + + test.concurrent('update with returning all fields + partial', async ({ db, push }) => { + const users = createUserTable('users_86'); + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const updatedUsers = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + + const result = await db.select().from(users).where(eq(users.id, 1)); + + const countRows = updatedUsers[0]?.changedRows ?? updatedUsers.rowsAffected; + expect(countRows).toBe(1); + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(result).toStrictEqual([{ + id: 1, + name: 'Jane', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); + }); + + test.concurrent('update with returning partial', async ({ db, push }) => { + const users = createUserTable('users_87'); + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const updatedUsers = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')); + + const result = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(result).toStrictEqual([{ id: 1, name: 'Jane' }]); + }); + + test.concurrent('delete with returning all fields', async ({ db, push }) => { + const users = createUserTable('users_88'); + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const deletedUser = await db.delete(users).where(eq(users.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test.concurrent('delete with returning partial', async ({ db, push }) => { + const users = createUserTable('users_89'); + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const deletedUser = await db.delete(users).where(eq(users.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test.concurrent('insert + select', async ({ db, push }) => { + const users = createUserTable('users_90'); + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + verified: false, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); + + await db.insert(users).values({ name: 'Jane' }); + const result2 = await db.select().from(users); + expect(result2).toStrictEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test.concurrent('json insert', async ({ db, push }) => { + const users = createUserTable('users_91'); + await push({ users }); + + await db.insert(users).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + }).from(users); + + expect(result).toStrictEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test.concurrent('insert with overridden default values', async ({ db, push }) => { + const users = createUserTable('users_92'); + await push({ users }); + + await db.insert(users).values({ name: 'John', verified: true }); + const result = await db.select().from(users); + + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + verified: true, + jsonb: null, + createdAt: result[0]!.createdAt, + }]); + }); + + test.concurrent('insert many', async ({ db, push }) => { + const users = createUserTable('users_93'); + await push({ users }); + + await db.insert(users).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + verified: users.verified, + }).from(users); + + expect(result).toStrictEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('insert many with returning', async ({ db, push }) => { + const users = createUserTable('users_94'); + await push({ users }); + + const result = await db.insert(users).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); + }); + test.concurrent('$default function', async ({ db, push }) => { + const orders = mysqlTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + }); + await push({ orders }); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toStrictEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test.concurrent('$default with empty array', async ({ db, push }) => { + const sOrders = mysqlTable('s_orders', { + id: serial('id').primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + await push({ sOrders }); + + await db.insert(sOrders).values({}); + const selectedOrder = await db.select().from(sOrders); + + expect(selectedOrder).toStrictEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + // here + + test.concurrent('Insert all defaults in 1 row', async ({ db, push }) => { + const users = mysqlTable('empty_insert_single_97', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await push({ users }); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toStrictEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test.concurrent('Insert all defaults in multiple rows', async ({ db, push }) => { + const users = mysqlTable('empty_insert_multiple_97', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await push({ users }); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toStrictEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test.concurrent('insert with onDuplicate', async ({ db, push }) => { + const users = createUserTable('users_98'); + await push({ users }); + + await db.insert(users) + .values({ name: 'John' }); + + await db.insert(users) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(res).toStrictEqual([{ id: 1, name: 'John1' }]); + }); + + test.concurrent('insert conflict', async ({ db, push }) => { + const users = createUserTable('users_99'); + await push({ users }); + + await db.insert(users) + .values({ name: 'John' }); + + await expect((async () => { + await db.insert(users).values({ id: 1, name: 'John1' }); + })()).rejects.toThrowError(); + }); + + test.concurrent('insert conflict with ignore', async ({ db, push }) => { + const users = createUserTable('users_100'); + await push({ users }); + + await db.insert(users) + .values({ name: 'John' }); + + await db.insert(users) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: users.id, name: users.name }).from(users).where( + eq(users.id, 1), + ); + + expect(res).toStrictEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert sql', async ({ db, push }) => { + const users = createUserTable('users_101'); + await push({ users }); + + await db.insert(users).values({ name: sql`${'John'}` }); + const result = await db.select({ id: users.id, name: users.name }).from(users); + expect(result).toStrictEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('full join with alias', async ({ db, push }) => { + const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); + const users = mysqlTable('users_102', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toStrictEqual([{ + users_102: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + }); + + test.concurrent('select from alias', async ({ db, push }) => { + const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); + + const users = mysqlTable('users_103', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toStrictEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + }); + + test.concurrent('insert with spaces', async ({ db, push }) => { + const users = createUserTable('users_104'); + await push({ users }); + + await db.insert(users).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: users.id, name: users.name }).from(users); + + expect(result).toStrictEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test.concurrent('insert: placeholders on columns with encoder', async ({ db, push }) => { + const users = createUserTable('users_105'); + await push({ users }); + + const date = new Date('2024-08-07T15:30:00Z'); + + const statement = db.insert(users).values({ + name: 'John', + createdAt: sql.placeholder('createdAt'), + }).prepare(); + + await statement.execute({ createdAt: date }); + + const result = await db + .select({ + id: users.id, + createdAt: users.createdAt, + }) + .from(users); + + expect(result).toStrictEqual([ + { id: 1, createdAt: date }, + ]); + }); + + test.concurrent('prepared statement reuse', async ({ db, push }) => { + const users = createUserTable('users_106'); + await push({ users }); + + const stmt = db.insert(users).values({ + verified: true, + name: sql.placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db.select({ + id: users.id, + name: users.name, + verified: users.verified, + }).from(users); + + expect(result).toStrictEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test.concurrent('insert via db.execute + select via db.execute', async ({ db, push }) => { + const users = createUserTable('users_108'); + await push({ users }); + + await db.execute(sql`insert into ${users} (${new Name(users.name.name)}) values (${'John'})`); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${users}`); + expect(result[0]).toStrictEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert via db.execute w/ query builder', async ({ db, push }) => { + const users = createUserTable('users_109'); + await push({ users }); + + const inserted = await db.execute( + db.insert(users).values({ name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); + }); + + test.concurrent('Mysql enum as ts enum', async ({ db, push }) => { + enum Test { + a = 'a', + b = 'b', + c = 'c', + } + + const tableWithTsEnums = mysqlTable('enums_test_case_109', { + id: serial('id').primaryKey(), + enum1: mysqlEnum('enum1', Test).notNull(), + enum2: mysqlEnum('enum2', Test).default(Test.a), + enum3: mysqlEnum('enum3', Test).notNull().default(Test.b), + }); + + await push({ tableWithTsEnums }); + + await db.insert(tableWithTsEnums).values([ + { id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c }, + { id: 2, enum1: Test.a, enum3: Test.c }, + { id: 3, enum1: Test.a }, + ]); + + const res = await db.select().from(tableWithTsEnums); + + expect(res).toStrictEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + test.concurrent('test $onUpdateFn and $onUpdate works as $default', async ({ db, push }) => { + const usersOnUpdate = createUsersOnUpdateTable('users_on_update_1'); + await push({ usersOnUpdate }); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toStrictEqual([ + { name: 'John', id: 1, updateCounter: 1, uppercaseName: 'JOHN', alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, + ]); + const msDelay = 750; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test.concurrent('test $onUpdateFn and $onUpdate works updating', async ({ db, push }) => { + const usersOnUpdate = createUsersOnUpdateTable('users_on_update_2'); + await push({ usersOnUpdate }); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel', uppercaseName: null }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate); + + expect(response).toStrictEqual([ + { name: 'Angel', id: 1, updateCounter: 2, uppercaseName: null, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, + ]); + const msDelay = 750; + + expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test.concurrent('Object keys as column names', async ({ db, push }) => { + // Tests the following: + // Column with required config + // Column with optional config without providing a value + // Column with optional config providing a value + // Column without config + const users = mysqlTable('users_114', { + id: bigint({ mode: 'number' }).autoincrement().primaryKey(), + createdAt: timestamp(), + updatedAt: timestamp({ fsp: 3 }), + admin: boolean(), + }); + + await push({ users }); + + await db.insert(users).values([ + { createdAt: sql`now() - interval 30 day`, updatedAt: sql`now() - interval 1 day`, admin: true }, + { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 30 day`, admin: true }, + { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 1 day`, admin: false }, + ]); + const result = await db + .select({ id: users.id, admin: users.admin }) + .from(users) + .where( + and( + gt(users.createdAt, sql`now() - interval 7 day`), + gt(users.updatedAt, sql`now() - interval 7 day`), + ), + ); + + expect(result).toStrictEqual([ + { id: 3, admin: false }, + ]); + }); + + test.concurrent('update with limit and order by', async ({ db, push }) => { + const users = createUserTable('users_112'); + await push({ users }); + + await db.insert(users).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.update(users).set({ verified: true }).limit(2).orderBy(asc(users.name)); + + const result = await db.select({ name: users.name, verified: users.verified }).from(users).orderBy( + asc(users.name), + ); + expect(result).toStrictEqual([ + { name: 'Alan', verified: true }, + { name: 'Barry', verified: true }, + { name: 'Carl', verified: false }, + ]); + }); + + test.concurrent('delete with limit and order by', async ({ db, push }) => { + const users = createUserTable('users_113'); + await push({ users }); + + await db.insert(users).values([ + { name: 'Barry', verified: false }, + { name: 'Alan', verified: false }, + { name: 'Carl', verified: false }, + ]); + + await db.delete(users).where(eq(users.verified, false)).limit(1).orderBy(asc(users.name)); + + const result = await db.select({ name: users.name, verified: users.verified }).from(users).orderBy( + asc(users.name), + ); + expect(result).toStrictEqual([ + { name: 'Barry', verified: false }, + { name: 'Carl', verified: false }, + ]); + }); + + test.concurrent('column.as', async ({ db, push }) => { + const users = mysqlTable('users_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => cities.id), + }); + + const cities = mysqlTable('cities_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + const ucView = mysqlView('cities_users_column_as_view').as((qb) => + qb.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)) + ); + + await push({ users, cities, ucView }); + + try { + await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]); + + await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewSelectReturn = await db.select().from(ucView); + + expect(viewSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewJoinReturn = await db.select({ + userId: ucView.userId.as('user_id_ucv'), + cityId: cities.id.as('city_id'), + userName: ucView.userName.as('user_name_ucv'), + cityName: cities.name.as('city_name'), + }).from(ucView).leftJoin(cities, eq(cities.id, ucView.cityId)); + + expect(viewJoinReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + } finally { + await db.execute(sql`DROP TABLE ${users}`).catch(() => null); + await db.execute(sql`DROP TABLE ${cities}`).catch(() => null); + await db.execute(sql`DROP VIEW ${ucView}`).catch(() => null); + } + }); +} diff --git a/integration-tests/tests/mysql/mysql-common-cache.ts b/integration-tests/tests/mysql/mysql-common-cache.ts index 9a7a2f1d7c..333244aa2f 100644 --- a/integration-tests/tests/mysql/mysql-common-cache.ts +++ b/integration-tests/tests/mysql/mysql-common-cache.ts @@ -1,90 +1,9 @@ -import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; -import type { MutationOption } from 'drizzle-orm/cache/core'; -import { Cache } from 'drizzle-orm/cache/core'; -import type { CacheConfig } from 'drizzle-orm/cache/core/types'; -import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; +import { eq, sql } from 'drizzle-orm'; import { alias, boolean, int, json, mysqlTable, serial, text, timestamp } from 'drizzle-orm/mysql-core'; -import Keyv from 'keyv'; -import { beforeEach, describe, expect, test, vi } from 'vitest'; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestGlobalCache extends Cache { - private globalTtl: number = 1000; - private usedTablesPerKey: Record = {}; - - constructor(private kv: Keyv = new Keyv()) { - super(); - } - - override strategy(): 'explicit' | 'all' { - return 'all'; - } - override async get(key: string, _tables: string[], _isTag: boolean): Promise { - const res = await this.kv.get(key) ?? undefined; - return res; - } - override async put( - key: string, - response: any, - tables: string[], - isTag: boolean, - config?: CacheConfig, - ): Promise { - await this.kv.set(key, response, config ? config.ex : this.globalTtl); - for (const table of tables) { - const keys = this.usedTablesPerKey[table]; - if (keys === undefined) { - this.usedTablesPerKey[table] = [key]; - } else { - keys.push(key); - } - } - } - override async onMutate(params: MutationOption): Promise { - const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; - const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; - - const keysToDelete = new Set(); - - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - const keys = this.usedTablesPerKey[tableName] ?? []; - for (const key of keys) keysToDelete.add(key); - } - - if (keysToDelete.size > 0 || tagsArray.length > 0) { - for (const tag of tagsArray) { - await this.kv.delete(tag); - } - - for (const key of keysToDelete) { - await this.kv.delete(key); - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - this.usedTablesPerKey[tableName] = []; - } - } - } - } -} - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestCache extends TestGlobalCache { - override strategy(): 'explicit' | 'all' { - return 'explicit'; - } -} +import { describe, expect } from 'vitest'; +import type { Test } from './instrumentation'; -declare module 'vitest' { - interface TestContext { - cachedMySQL: { - db: MySqlDatabase; - dbGlobalCached: MySqlDatabase; - }; - } -} - -const usersTable = mysqlTable('users', { +const usersTable = mysqlTable('users_for_cache', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), @@ -92,258 +11,189 @@ const usersTable = mysqlTable('users', { createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); -const postsTable = mysqlTable('posts', { +const postsTable = mysqlTable('posts_for_cache', { id: serial().primaryKey(), description: text().notNull(), userId: int('city_id').references(() => usersTable.id), }); -export function tests() { - describe('common_cache', () => { - beforeEach(async (ctx) => { - const { db, dbGlobalCached } = ctx.cachedMySQL; - await db.execute(sql`drop table if exists users`); - await db.execute(sql`drop table if exists posts`); - await db.$cache?.invalidate({ tables: 'users' }); - await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); - // public users - await db.execute( - sql` - create table users ( +export function runTests(vendor: 'mysql' | 'planetscale', test: Test) { + describe('cache:', () => { + test.beforeEach(async ({ client }) => { + await client.batch([ + `drop table if exists users_for_cache, posts_for_cache`, + ]); + await client.batch([ + `create table users_for_cache ( id serial primary key, name text not null, verified boolean not null default false, jsonb json, created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table posts ( + )`, + `create table posts_for_cache ( id serial primary key, description text not null, user_id int - ) - `, - ); + )`, + ]); }); - test('test force invalidate', async (ctx) => { - const { db } = ctx.cachedMySQL; + test('test force invalidate', async ({ drizzle }) => { + const { db, invalidate } = drizzle.withCacheExplicit; - const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); - await db.$cache?.invalidate({ tables: 'users' }); - expect(spyInvalidate).toHaveBeenCalledTimes(1); + await db.$cache?.invalidate({ tables: 'users_for_cache' }); + expect(invalidate).toHaveBeenCalledTimes(1); }); - test('default global config - no cache should be hit', async (ctx) => { - const { db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('default global config - no cache should be hit', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheExplicit; await db.select().from(usersTable); - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(0); + expect(get).toHaveBeenCalledTimes(0); + expect(invalidate).toHaveBeenCalledTimes(0); }); - test('default global config + enable cache on select: get, put', async (ctx) => { - const { db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('default global config + enable cache on select: get, put', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheExplicit; await db.select().from(usersTable).$withCache(); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); }); - test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { - const { db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('default global config + enable cache on select + write: get, put, onMutate', async ({ drizzle }) => { + const { db, put, get, onMutate: invalidate } = drizzle.withCacheExplicit; await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); + put.mockClear(); + get.mockClear(); + invalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); + expect(put).toHaveBeenCalledTimes(0); + expect(get).toHaveBeenCalledTimes(0); + expect(invalidate).toHaveBeenCalledTimes(1); }); - test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { - const { db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('default global config + enable cache on select + disable invalidate: get, put', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheExplicit; await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); + // TODO: check? }); - test('global: true + disable cache', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('global: true + disable cache', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheAll; await db.select().from(usersTable).$withCache(false); - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(0); + expect(get).toHaveBeenCalledTimes(0); + expect(invalidate).toHaveBeenCalledTimes(0); }); - test('global: true - cache should be hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('global: true - cache should be hit', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheAll; await db.select().from(usersTable); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); }); - test('global: true - cache: false on select - no cache hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('global: true - cache: false on select - no cache hit', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheAll; await db.select().from(usersTable).$withCache(false); - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(0); + expect(get).toHaveBeenCalledTimes(0); + expect(invalidate).toHaveBeenCalledTimes(0); }); - test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('global: true - disable invalidate - cache hit + no invalidate', async ({ drizzle }) => { + const { db, put, get, onMutate: invalidate } = drizzle.withCacheAll; await db.select().from(usersTable).$withCache({ autoInvalidate: false }); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); + put.mockClear(); + get.mockClear(); + invalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); + expect(put).toHaveBeenCalledTimes(0); + expect(get).toHaveBeenCalledTimes(0); + expect(invalidate).toHaveBeenCalledTimes(1); }); - test('global: true - with custom tag', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedMySQL; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + test('global: true - with custom tag', async ({ drizzle }) => { + const { db, put, get, invalidate } = drizzle.withCacheAll; await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); + expect(put).toHaveBeenCalledTimes(1); + expect(get).toHaveBeenCalledTimes(1); + expect(invalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); + // TODO: check? }); // check select used tables - test('check simple select used tables', (ctx) => { - const { db } = ctx.cachedMySQL; + test('check simple select used tables', ({ drizzle }) => { + const { db } = drizzle.withCacheExplicit; // @ts-expect-error - expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); + expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users_for_cache']); // @ts-expect-error - expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); + expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users_for_cache']); }); + // check select+join used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedMySQL; + test('select+join', ({ drizzle }) => { + const { db } = drizzle.withCacheExplicit; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) - .toStrictEqual(['users', 'posts']); + .toStrictEqual(['users_for_cache', 'posts_for_cache']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), - ).toStrictEqual(['users', 'posts']); + ).toStrictEqual(['users_for_cache', 'posts_for_cache']); }); + // check select+2join used tables - test('select+2joins', (ctx) => { - const { db } = ctx.cachedMySQL; + test('select+2joins', ({ drizzle }) => { + const { db } = drizzle.withCacheExplicit; expect( db.select().from(usersTable).leftJoin( @@ -356,24 +206,24 @@ export function tests() { // @ts-expect-error .getUsedTables(), ) - .toStrictEqual(['users', 'posts']); + .toStrictEqual(['users_for_cache', 'posts_for_cache']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), // @ts-expect-error ).getUsedTables(), - ).toStrictEqual(['users', 'posts']); + ).toStrictEqual(['users_for_cache', 'posts_for_cache']); }); // select subquery used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedMySQL; + test('select+join', ({ drizzle }) => { + const { db } = drizzle.withCacheExplicit; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); db.select().from(sq); // @ts-expect-error - expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); + expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users_for_cache']); }); }); } diff --git a/integration-tests/tests/mysql/mysql-common.ts b/integration-tests/tests/mysql/mysql-common.ts index 6040e6e85e..6cfe099da8 100644 --- a/integration-tests/tests/mysql/mysql-common.ts +++ b/integration-tests/tests/mysql/mysql-common.ts @@ -1,6119 +1,27 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ import 'dotenv/config'; -import Docker from 'dockerode'; -import { - and, - asc, - avg, - avgDistinct, - count, - countDistinct, - eq, - exists, - getTableColumns, - gt, - gte, - inArray, - like, - lt, - max, - min, - Name, - not, - notInArray, - sql, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; -import { - alias, - bigint, - binary, - boolean, - char, - date, - datetime, - decimal, - double, - except, - exceptAll, - float, - foreignKey, - getTableConfig, - getViewConfig, - index, - int, - intersect, - intersectAll, - json, - mediumint, - mysqlEnum, - mysqlSchema, - mysqlTable, - mysqlTableCreator, - mysqlView, - primaryKey, - real, - serial, - smallint, - text, - time, - timestamp, - tinyint, - union, - unionAll, - unique, - uniqueIndex, - uniqueKeyName, - varbinary, - varchar, - year, -} from 'drizzle-orm/mysql-core'; -import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; -import { migrate } from 'drizzle-orm/mysql2/migrator'; -import getPort from 'get-port'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; -import { Expect, toLocalDate } from '~/utils.ts'; -import type { Equal } from '~/utils.ts'; -import type relations from './relations'; -import { clear, init, rqbPost, rqbUser } from './schema'; -type TestMySQLDB = MySqlDatabase; - -declare module 'vitest' { - interface TestContext { - mysql: { - db: TestMySQLDB; - }; - mysqlProxy: { - db: MySqlRemoteDatabase; - }; - } -} - -const ENABLE_LOGGING = false; - -const allTypesTable = mysqlTable('all_types', { - serial: serial('serial'), - bigint53: bigint('bigint53', { - mode: 'number', - }), - bigint64: bigint('bigint64', { - mode: 'bigint', - }), - bigintString: bigint('bigint_string', { - mode: 'string', - }), - binary: binary('binary'), - boolean: boolean('boolean'), - char: char('char'), - date: date('date', { - mode: 'date', - }), - dateStr: date('date_str', { - mode: 'string', - }), - datetime: datetime('datetime', { - mode: 'date', - }), - datetimeStr: datetime('datetime_str', { - mode: 'string', - }), - decimal: decimal('decimal'), - decimalNum: decimal('decimal_num', { - scale: 30, - mode: 'number', - }), - decimalBig: decimal('decimal_big', { - scale: 30, - mode: 'bigint', - }), - double: double('double'), - float: float('float'), - int: int('int'), - json: json('json'), - medInt: mediumint('med_int'), - smallInt: smallint('small_int'), - real: real('real'), - text: text('text'), - time: time('time'), - timestamp: timestamp('timestamp', { - mode: 'date', - }), - timestampStr: timestamp('timestamp_str', { - mode: 'string', - }), - tinyInt: tinyint('tiny_int'), - varbin: varbinary('varbin', { - length: 16, - }), - varchar: varchar('varchar', { - length: 255, - }), - year: year('year'), - enum: mysqlEnum('enum', ['enV1', 'enV2']), -}); - -const usersTable = mysqlTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2Table = mysqlTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesTable = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const usersOnUpdate = mysqlTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: datetime('updated_at', { mode: 'date', fsp: 3 }).$onUpdate(() => new Date()), - uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), - alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value -}); - -const datesTable = mysqlTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time', { fsp: 1 }), - datetime: datetime('datetime', { fsp: 2 }), - datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), - timestamp: timestamp('timestamp', { fsp: 3 }), - timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), - year: year('year'), -}); - -const coursesTable = mysqlTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: int('category_id').references(() => courseCategoriesTable.id), -}); - -const courseCategoriesTable = mysqlTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = mysqlTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: int('amount').notNull(), - quantity: int('quantity').notNull(), -}); - -const usersMigratorTable = mysqlTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; -}); - -// To test aggregate functions -const aggregateTable = mysqlTable('aggregate_table', { - id: serial('id').notNull(), - name: text('name').notNull(), - a: int('a'), - b: int('b'), - c: int('c'), - nullOnly: int('null_only'), -}); - -// To test another schema and multischema -const mySchema = mysqlSchema(`mySchema`); - -const usersMySchemaTable = mySchema.table('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), -}); - -const users2MySchemaTable = mySchema.table('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').references(() => citiesTable.id), -}); - -const citiesMySchemaTable = mySchema.table('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -let mysqlContainer: Docker.Container; -export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - await new Promise((resolve) => setTimeout(resolve, 4000)); - - return { connectionString: `mysql://root:mysql@127.0.0.1:${port}/drizzle`, container: mysqlContainer }; -} - -afterAll(async () => { - await mysqlContainer?.stop().catch(console.error); -}); - -export function tests(driver?: string) { - describe('common', () => { - // afterAll(async () => { - // await mysqlContainer?.stop().catch(console.error); - // }); - - beforeEach(async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`drop table if exists userstest`); - await db.execute(sql`drop table if exists users2`); - await db.execute(sql`drop table if exists cities`); - await db.execute(sql`drop table if exists \`all_types\``); - - if (driver !== 'planetscale') { - await db.execute(sql`drop schema if exists \`mySchema\``); - await db.execute(sql`create schema if not exists \`mySchema\``); - } - - await db.execute( - sql` - create table userstest ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb json, - created_at timestamp not null default now() - ) - `, - ); - - await db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id int references cities(id) - ) - `, - ); - - await db.execute( - sql` - create table cities ( - id serial primary key, - name text not null - ) - `, - ); - - if (driver !== 'planetscale') { - // mySchema - await db.execute( - sql` - create table \`mySchema\`.\`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await db.execute( - sql` - create table \`mySchema\`.\`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`mySchema\`.\`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`mySchema\`.\`cities\`(\`id\`) - ) - `, - ); - } - }); - - async function setupReturningFunctionsTest(db: MySqlDatabase) { - await db.execute(sql`drop table if exists \`users_default_fn\``); - await db.execute( - sql` - create table \`users_default_fn\` ( - \`id\` varchar(256) primary key, - \`name\` text not null - ); - `, - ); - } - - async function setupSetOperationTest(db: TestMySQLDB) { - await db.execute(sql`drop table if exists \`users2\``); - await db.execute(sql`drop table if exists \`cities\``); - await db.execute( - sql` - create table \`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int references \`cities\`(\`id\`) - ) - `, - ); - - await db.execute( - sql` - create table \`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.insert(citiesTable).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); - } - - async function setupAggregateFunctionsTest(db: TestMySQLDB) { - await db.execute(sql`drop table if exists \`aggregate_table\``); - await db.execute( - sql` - create table \`aggregate_table\` ( - \`id\` integer primary key auto_increment not null, - \`name\` text not null, - \`a\` integer, - \`b\` integer, - \`c\` integer, - \`null_only\` integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { name: 'value 1', a: 5, b: 10, c: 20 }, - { name: 'value 1', a: 5, b: 20, c: 30 }, - { name: 'value 2', a: 10, b: 50, c: 60 }, - { name: 'value 3', a: 20, b: 20, c: null }, - { name: 'value 4', a: null, b: 90, c: 120 }, - { name: 'value 5', a: 80, b: 10, c: null }, - { name: 'value 6', a: null, b: null, c: 150 }, - ]); - } - - test('table config: unsigned ints', async () => { - const unsignedInts = mysqlTable('cities1', { - bigint: bigint('bigint', { mode: 'number', unsigned: true }), - int: int('int', { unsigned: true }), - smallint: smallint('smallint', { unsigned: true }), - mediumint: mediumint('mediumint', { unsigned: true }), - tinyint: tinyint('tinyint', { unsigned: true }), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); - expect(intColumn.getSQLType()).toBe('int unsigned'); - expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); - expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); - expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); - }); - - test('table config: signed ints', async () => { - const unsignedInts = mysqlTable('cities1', { - bigint: bigint('bigint', { mode: 'number' }), - int: int('int'), - smallint: smallint('smallint'), - mediumint: mediumint('mediumint'), - tinyint: tinyint('tinyint'), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - expect(bigintColumn.getSQLType()).toBe('bigint'); - expect(intColumn.getSQLType()).toBe('int'); - expect(smallintColumn.getSQLType()).toBe('smallint'); - expect(mediumintColumn.getSQLType()).toBe('mediumint'); - expect(tinyintColumn.getSQLType()).toBe('tinyint'); - }); - - test('table config: foreign keys name', async () => { - const table = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - })); - - const tableConfig = getTableConfig(table); - - expect(tableConfig.foreignKeys).toHaveLength(1); - expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); - }); - - test('table config: primary keys name', async () => { - const table = mysqlTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - expect(tableConfig.primaryKeys).toHaveLength(1); - expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); - }); - - test('table configs: unique third param', async () => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - expect(tableConfig.uniqueConstraints).toHaveLength(2); - - expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); - expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); - - expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); - expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); - }); - - test('table configs: unique in column', async () => { - const cities1Table = mysqlTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: text('state').unique('custom'), - field: text('field').unique('custom_field'), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); - expect(columnName?.isUnique).toBeTruthy(); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - expect(columnState?.uniqueName).toBe('custom'); - expect(columnState?.isUnique).toBeTruthy(); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - expect(columnField?.uniqueName).toBe('custom_field'); - expect(columnField?.isUnique).toBeTruthy(); - }); - - test('select all fields', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('select sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('select typed sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('select with empty array in inArray', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(inArray(usersTable.id, [])); - - expect(result).toEqual([]); - }); - - test('select with empty array in notInArray', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(notInArray(usersTable.id, [])); - - expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); - }); - - test('select distinct', async (ctx) => { - const { db } = ctx.mysql; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - }); - - test('insert returning sql', async (ctx) => { - const { db } = ctx.mysql; - - const [result, _] = await db.insert(usersTable).values({ name: 'John' }); - - expect(result.insertId).toBe(1); - }); - - test('delete returning sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(users[0].affectedRows).toBe(1); - }); - - test('update returning sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - expect(users[0].changedRows).toBe(1); - }); - - test('update with returning all fields', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); - }); - - test('update with returning partial', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); - }); - - test('delete with returning all fields', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('delete with returning partial', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('insert + select', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - expect(result2).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); - }); - - test('json insert', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); - }); - - test('insert with overridden default values', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('insert many', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); - }); - - test('insert many with returning', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - expect(result[0].affectedRows).toBe(4); - }); - - test('select with group by as field', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('select with exists', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists( - db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), - ), - ); - - expect(result).toEqual([{ name: 'John' }]); - }); - - test('select with group by as sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('$default function', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); - const selectedOrder = await db.select().from(orders); - - expect(selectedOrder).toEqual([{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - }); - - test('$default with empty array', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`s_orders\``); - await db.execute( - sql` - create table \`s_orders\` ( - \`id\` serial primary key, - \`region\` text default ('Ukraine'), - \`product\` text not null - ) - `, - ); - - const users = mysqlTable('s_orders', { - id: serial('id').primaryKey(), - region: text('region').default('Ukraine'), - product: text('product').$defaultFn(() => 'random_string'), - }); - - await db.insert(users).values({}); - const selectedOrder = await db.select().from(users); - - expect(selectedOrder).toEqual([{ - id: 1, - region: 'Ukraine', - product: 'random_string', - }]); - }); - - test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('select with group by complex query', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - expect(result).toEqual([{ name: 'Jane' }]); - }); - - test('build query', async (ctx) => { - const { db } = ctx.mysql; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, - params: [], - }); - }); - - test('Query check: Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - expect(query).toEqual({ - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', - params: [], - }); - }); - - test('Query check: Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', - params: [], - }); - }); - - test('Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values({}); - - const res = await db.select().from(users); - - expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); - }); - - test('Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, - ); - - await db.insert(users).values([{}, {}]); - - const res = await db.select().from(users); - - expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); - }); - - test('build query insert with onDuplicate', async (ctx) => { - const { db } = ctx.mysql; - - const query = db.insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', - params: ['John', '["foo","bar"]', 'John1'], - }); - }); - - test('insert with onDuplicate', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John1' }]); - }); - - test('insert conflict', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await expect((async () => { - await db.insert(usersTable).values({ id: 1, name: 'John1' }); - })()).rejects.toThrowError(); - }); - - test('insert conflict with ignore', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable) - .values({ name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('partial join with alias', async (ctx) => { - const { db } = ctx.mysql; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - expect(result).toEqual([{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); - }); - - test('full join with alias', async (ctx) => { - const { db } = ctx.mysql; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)); - - expect(result).toEqual([{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('select from alias', async (ctx) => { - const { db } = ctx.mysql; - - const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - expect(result).toEqual([{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('insert with spaces', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); - }); - - test('prepared statement', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare(); - const result = await statement.execute(); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert: placeholders on columns with encoder', async (ctx) => { - const { db } = ctx.mysql; - - const date = new Date('2024-08-07T15:30:00Z'); - - const statement = db.insert(usersTable).values({ - name: 'John', - createdAt: sql.placeholder('createdAt'), - }).prepare(); - - await statement.execute({ createdAt: date }); - - const result = await db - .select({ - id: usersTable.id, - createdAt: usersTable.createdAt, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, createdAt: date }, - ]); - }); - - test('prepared statement reuse', async (ctx) => { - const { db } = ctx.mysql; - - const stmt = db.insert(usersTable).values({ - verified: true, - name: sql.placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); - }); - - test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, sql.placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('prepared statement with placeholder in .limit', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, sql.placeholder('id'))) - .limit(sql.placeholder('limit')) - .prepare(); - - const result = await stmt.execute({ id: 1, limit: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - expect(result).toHaveLength(1); - }); - - test('prepared statement with placeholder in .offset', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .limit(sql.placeholder('limit')) - .offset(sql.placeholder('offset')) - .prepare(); - - const result = await stmt.execute({ limit: 1, offset: 1 }); - - expect(result).toEqual([{ id: 2, name: 'John1' }]); - }); - - test('prepared statement built using $dynamic', async (ctx) => { - const { db } = ctx.mysql; - - function withLimitOffset(qb: any) { - return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); - } - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .$dynamic(); - withLimitOffset(stmt).prepare('stmt_limit'); - - const result = await stmt.execute({ limit: 1, offset: 1 }); - - expect(result).toEqual([{ id: 2, name: 'John1' }]); - expect(result).toHaveLength(1); - }); - - test('migrator', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists cities_migration`); - await db.execute(sql`drop table if exists users_migration`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/mysql' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table cities_migration`); - await db.execute(sql`drop table users_migration`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table __drizzle_migrations`); - }); - - test('insert via db.execute + select via db.execute', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - expect(result[0]).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert via db.execute w/ query builder', async (ctx) => { - const { db } = ctx.mysql; - - const inserted = await db.execute( - db.insert(usersTable).values({ name: 'John' }), - ); - expect(inserted[0].affectedRows).toBe(1); - }); - - test('insert + select all possible dates', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`timestamp\` timestamp(3), - \`timestamp_as_string\` timestamp(3), - \`year\` year - ) - `, - ); - - const date = new Date('2022-11-11'); - const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: dateWithMilliseconds, - timestampAsString: '2022-11-11 12:12:12.123', - }); - - const res = await db.select().from(datesTable); - - expect(res[0]?.date).toBeInstanceOf(Date); - expect(res[0]?.datetime).toBeInstanceOf(Date); - expect(typeof res[0]?.dateAsString).toBe('string'); - expect(typeof res[0]?.datetimeAsString).toBe('string'); - - expect(res).toEqual([{ - date: toLocalDate(new Date('2022-11-11')), - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: new Date('2022-11-11 12:12:12.123'), - timestampAsString: '2022-11-11 12:12:12.123', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); - }); - - const tableWithEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), - }); - - test('Mysql enum as ts enum', async (ctx) => { - enum Test { - a = 'a', - b = 'b', - c = 'c', - } - - const tableWithTsEnums = mysqlTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: mysqlEnum('enum1', Test).notNull(), - enum2: mysqlEnum('enum2', Test).default(Test.a), - enum3: mysqlEnum('enum3', Test).notNull().default(Test.b), - }); - - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`enums_test_case\``); - - await db.execute(sql` - create table \`enums_test_case\` ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithTsEnums).values([ - { id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c }, - { id: 2, enum1: Test.a, enum3: Test.c }, - { id: 3, enum1: Test.a }, - ]); - - const res = await db.select().from(tableWithTsEnums); - - await db.execute(sql`drop table \`enums_test_case\``); - - expect(res).toEqual([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); - }); - - test('Mysql enum test case #1', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`enums_test_case\``); - - await db.execute(sql` - create table \`enums_test_case\` ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums); - - await db.execute(sql`drop table \`enums_test_case\``); - - expect(res).toEqual([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); - }); - - test('left join (flat object fields)', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); - }); - - test('left join (grouped fields)', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); - }); - - test('left join (all fields)', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); - - expect(res).toEqual([ - { - users2: { - id: 1, - name: 'John', - cityId: 1, - }, - cities: { - id: 1, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); - }); - - test('select from a many subquery', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - population: db.select({ count: count().as('count') }).from(users2Table).where( - eq(users2Table.cityId, citiesTable.id), - ).as( - 'population', - ), - name: citiesTable.name, - }).from(citiesTable); - - expectTypeOf(res).toEqualTypeOf< - { - population: number; - name: string; - }[] - >(); - - expect(res).toStrictEqual([{ - population: 1, - name: 'Paris', - }, { - population: 2, - name: 'London', - }]); - }); - - test('select from a one subquery', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) - .as( - 'cityName', - ), - name: users2Table.name, - }).from(users2Table); - - expectTypeOf(res).toEqualTypeOf< - { - cityName: string; - name: string; - }[] - >(); - - expect(res).toStrictEqual([{ - cityName: 'Paris', - name: 'John', - }, { - cityName: 'London', - name: 'Jane', - }, { - cityName: 'London', - name: 'Jack', - }]); - }); - - test('join subquery', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - - await db.execute( - sql` - create table \`course_categories\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`courses\` ( - \`id\` serial primary key, - \`name\` text not null, - \`category_id\` int references \`course_categories\`(\`id\`) - ) - `, - ); - - await db.insert(courseCategoriesTable).values([ - { name: 'Category 1' }, - { name: 'Category 2' }, - { name: 'Category 3' }, - { name: 'Category 4' }, - ]); - - await db.insert(coursesTable).values([ - { name: 'Development', categoryId: 2 }, - { name: 'IT & Software', categoryId: 3 }, - { name: 'Marketing', categoryId: 4 }, - { name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - expect(res).toEqual([ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - }); - - test('with ... select', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, - productSales: sql`cast(sum(${orders.amount}) as unsigned)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - expect(result).toEqual([ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); - }); - - test('with ... update', async (ctx) => { - const { db } = ctx.mysql; - - const products = mysqlTable('products', { - id: serial('id').primaryKey(), - price: decimal('price', { - precision: 15, - scale: 2, - }).notNull(), - cheap: boolean('cheap').notNull().default(false), - }); - - await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql` - create table ${products} ( - id serial primary key, - price decimal(15, 2) not null, - cheap boolean not null default false - ) - `); - - await db.insert(products).values([ - { price: '10.99' }, - { price: '25.85' }, - { price: '32.99' }, - { price: '2.50' }, - { price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)); - - const result = await db - .select({ - id: products.id, - }) - .from(products) - .where(eq(products.cheap, true)); - - expect(result).toEqual([ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); - }); - - test('with ... delete', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); - - const result = await db - .select({ - id: orders.id, - }) - .from(orders); - - expect(result).toEqual([ - { id: 1 }, - { id: 2 }, - { id: 3 }, - { id: 4 }, - { id: 5 }, - ]); - }); - - test('select from subquery sql', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); - - const sq = db - .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) - .from(users2Table) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); - }); - - test('select a field without joining its table', (ctx) => { - const { db } = ctx.mysql; - - expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); - }); - - test('select all fields from subquery without alias', (ctx) => { - const { db } = ctx.mysql; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - expect(() => db.select().from(sq).prepare()).toThrowError(); - }); - - test('select count()', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - expect(res).toEqual([{ count: 2 }]); - }); - - test('select for ...', (ctx) => { - const { db } = ctx.mysql; - - { - const query = db.select().from(users2Table).for('update').toSQL(); - expect(query.sql).toMatch(/ for update$/); - } - { - const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - expect(query.sql).toMatch(/ for share skip locked$/); - } - { - const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - expect(query.sql).toMatch(/ for update nowait$/); - } - }); - - test('having', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - expect(result).toEqual([ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); - }); - - test('view', async (ctx) => { - const { db } = ctx.mysql; - - const newYorkers1 = mysqlView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = mysqlView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - expect(result).toEqual([ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); - }); - - test('select from raw sql', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John' }, - ]); - }); - - test('select from raw sql with joins', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); - }); - - test('join on aliased sql from select', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> - >; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); - }); - - test('join on aliased sql from with clause', async (ctx) => { - const { db } = ctx.mysql; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> - >; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); - }); - - test('prefixed table', async (ctx) => { - const { db } = ctx.mysql; - - const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); - - const users = mysqlTable('test_prefixed_table_with_unique_name', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('orderBy with aliased column', (ctx) => { - const { db } = ctx.mysql; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); - }); - - test('timestamp timezone', async (ctx) => { - const { db } = ctx.mysql; - - const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); - - await db.insert(usersTable).values({ name: 'With default times' }); - await db.insert(usersTable).values({ - name: 'Without default times', - createdAt: date, - }); - const users = await db.select().from(usersTable); - - // check that the timestamps are set correctly for default times - expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); - - // check that the timestamps are set correctly for non default times - expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); - }); - - test('transaction', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - const products = mysqlTable('products_transactions', { - id: serial('id').primaryKey(), - price: int('price').notNull(), - stock: int('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, - ); - - const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); - const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); - const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); - const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); - }); - - test('transaction with options (set isolationLevel)', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - const products = mysqlTable('products_transactions', { - id: serial('id').primaryKey(), - price: int('price').notNull(), - stock: int('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, - ); - - const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); - const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); - const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); - const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }, { isolationLevel: 'serializable' }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); - }); - - test('transaction rollback', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, - ); - - await expect((async () => { - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }); - })()).rejects.toThrowError(TransactionRollbackError); - - const result = await db.select().from(users); - - expect(result).toEqual([]); - - await db.execute(sql`drop table ${users}`); - }); - - test('nested transaction', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('nested transaction rollback', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance int not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await expect((async () => { - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - tx.rollback(); - }); - })()).rejects.toThrowError(TransactionRollbackError); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('join subquery with join', async (ctx) => { - const { db } = ctx.mysql; - - const internalStaff = mysqlTable('internal_staff', { - userId: int('user_id').notNull(), - }); - - const customUser = mysqlTable('custom_user', { - id: int('id').notNull(), - }); - - const ticket = mysqlTable('ticket', { - staffId: int('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - expect(mainQuery).toEqual([{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); - }); - - test('subquery with view', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq); - - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); - }); - - test('join view as subquery', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 1 }, - { name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); - - expect(result).toEqual([ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); - }); - - test('select iterator', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{}, {}, {}]); - - const iter = db.select().from(users).iterator(); - - const result: typeof users.$inferSelect[] = []; - - for await (const row of iter) { - result.push(row); - } - - expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); - }); - - test('select iterator w/ prepared statement', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{}, {}, {}]); - - const prepared = db.select().from(users).prepare(); - const iter = prepared.iterator(); - const result: typeof users.$inferSelect[] = []; - - for await (const row of iter) { - result.push(row); - } - - expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); - }); - - test('insert undefined', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await expect((async () => { - await db.insert(users).values({ name: undefined }); - })()).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); - }); - - test('update undefined', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await expect((async () => { - await db.update(users).set({ name: undefined }); - })()).rejects.toThrowError(); - - await expect((async () => { - await db.update(users).set({ id: 1, name: undefined }); - })()).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); - }); - - test('utc config for datetime', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`datetime_utc\` datetime(3), - \`datetime\` datetime(3), - \`datetime_as_string\` datetime - ) - `, - ); - const datesTable = mysqlTable('datestable', { - datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), - datetime: datetime('datetime', { fsp: 3 }), - datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), - }); - - const dateObj = new Date('2022-11-11'); - const dateUtc = new Date('2022-11-11T12:12:12.122Z'); - - await db.insert(datesTable).values({ - datetimeUTC: dateUtc, - datetime: dateObj, - datetimeAsString: '2022-11-11 12:12:12', - }); - - const res = await db.select().from(datesTable); - - const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); - const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; - - expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122'); - expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); - - expect(res[0]?.datetime).toBeInstanceOf(Date); - expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); - expect(typeof res[0]?.datetimeAsString).toBe('string'); - - expect(res).toEqual([{ - datetimeUTC: dateUtc, - datetime: new Date('2022-11-11'), - datetimeAsString: '2022-11-11 12:12:12', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); - }); - - test('set operations (union) from query builder with subquery', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - const sq = db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).as('sq'); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db.select().from(sq), - ).limit(8); - - expect(result).toHaveLength(8); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - ]); - - // union should throw if selected fields are not in the same order - await expect((async () => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (union) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - ]); - - await expect((async () => { - union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (union all) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)).limit(3); - - expect(result).toHaveLength(3); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - await expect((async () => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - })()).rejects.toThrowError(); - }); - - test('set operations (union all) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - - expect(result).toHaveLength(1); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - ]); - - await expect((async () => { - unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - - expect(result).toHaveLength(0); - - expect(result).toEqual([]); - - await expect((async () => { - intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect all) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).intersectAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - await expect((async () => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).intersectAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect all) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await intersectAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - expect(result).toHaveLength(1); - - expect(result).toEqual([ - { id: 1, name: 'John' }, - ]); - - await expect((async () => { - intersectAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (except) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - expect(result).toHaveLength(1); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - ]); - }); - - test('set operations (except) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await except( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(3); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - except( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(3); - })()).rejects.toThrowError(); - }); - - test('set operations (except all) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).exceptAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - db - .select() - .from(citiesTable).exceptAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - ).orderBy(asc(sql`id`)); - })()).rejects.toThrowError(); - }); - - test('set operations (except all) as function', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await exceptAll( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(6).orderBy(asc(sql.identifier('id'))); - - expect(result).toHaveLength(6); - - expect(result).toEqual([ - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - { id: 6, name: 'Jill' }, - { id: 7, name: 'Mary' }, - ]); - - await expect((async () => { - exceptAll( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gt(users2Table.id, 7)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(6); - })()).rejects.toThrowError(); - }); - - test('set operations (mixed) from query builder', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ).orderBy(asc(citiesTable.id)).limit(1).offset(1), - ); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (mixed all) as function with subquery', async (ctx) => { - const { db } = ctx.mysql; - - await setupSetOperationTest(db); - - const sq = except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).orderBy(asc(sql.identifier('id'))).as('sq'); - - const result = await union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db.select().from(sq).limit(1), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - expect(result).toHaveLength(4); - - expect(result).toEqual([ - { id: 1, name: 'John' }, - { id: 5, name: 'Ben' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).limit(1), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('aggregate function: count', async (ctx) => { - const { db } = ctx.mysql; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - expect(result1[0]?.value).toBe(7); - expect(result2[0]?.value).toBe(5); - expect(result3[0]?.value).toBe(6); - }); - - test('aggregate function: avg', async (ctx) => { - const { db } = ctx.mysql; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.b) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - expect(result1[0]?.value).toBe('33.3333'); - expect(result2[0]?.value).toBe(null); - expect(result3[0]?.value).toBe('42.5000'); - }); - - test('aggregate function: sum', async (ctx) => { - const { db } = ctx.mysql; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - expect(result1[0]?.value).toBe('200'); - expect(result2[0]?.value).toBe(null); - expect(result3[0]?.value).toBe('170'); - }); - - test('aggregate function: max', async (ctx) => { - const { db } = ctx.mysql; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - expect(result1[0]?.value).toBe(90); - expect(result2[0]?.value).toBe(null); - }); - - test('aggregate function: min', async (ctx) => { - const { db } = ctx.mysql; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - expect(result1[0]?.value).toBe(10); - expect(result2[0]?.value).toBe(null); - }); - - test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(3), - uppercase_name text, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate); - - const response = await db.select({ ...rest }).from(usersOnUpdate); - - expect(response).toEqual([ - { name: 'John', id: 1, updateCounter: 1, uppercaseName: 'JOHN', alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, - ]); - const msDelay = 750; - - for (const eachUser of justDates) { - expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); - } - }); - - test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(3), - uppercase_name text, - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { name: 'John', alwaysNull: 'this will will be null after updating' }, - { name: 'Jane' }, - { name: 'Jack' }, - { name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - const initial = await db.select({ updatedAt }).from(usersOnUpdate); - - await db.update(usersOnUpdate).set({ name: 'Angel', uppercaseName: null }).where(eq(usersOnUpdate.id, 1)); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate); - - const response = await db.select({ ...rest }).from(usersOnUpdate); - - expect(response).toEqual([ - { name: 'Angel', id: 1, updateCounter: 2, uppercaseName: null, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, - ]); - const msDelay = 750; - - expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); - - for (const eachUser of justDates) { - expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); - } - }); - - // mySchema tests - test('mySchema :: select all fields', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const result = await db.select().from(usersMySchemaTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('mySchema :: select sql', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersMySchemaTable.name})`, - }).from(usersMySchemaTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('mySchema :: select typed sql', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersMySchemaTable.name})`, - }).from(usersMySchemaTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('mySchema :: select distinct', async (ctx) => { - const { db } = ctx.mysql; - - const usersDistinctTable = mysqlTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - }); - - test('mySchema :: insert returning sql', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - const [result, _] = await db.insert(usersMySchemaTable).values({ name: 'John' }); - - expect(result.insertId).toBe(1); - }); - - test('mySchema :: delete returning sql', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); - - expect(users[0].affectedRows).toBe(1); - }); - - test('mySchema :: update with returning partial', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( - eq(usersMySchemaTable.name, 'John'), - ); - - const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( - usersMySchemaTable, - ) - .where( - eq(usersMySchemaTable.id, 1), - ); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); - }); - - test('mySchema :: delete with returning all fields', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('mySchema :: insert + select', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const result = await db.select().from(usersMySchemaTable); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersMySchemaTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersMySchemaTable); - expect(result2).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); - }); - - test('mySchema :: insert with overridden default values', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersMySchemaTable); - - expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('mySchema :: insert many', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersMySchemaTable.id, - name: usersMySchemaTable.name, - jsonb: usersMySchemaTable.jsonb, - verified: usersMySchemaTable.verified, - }).from(usersMySchemaTable); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); - }); - - test('mySchema :: select with group by as field', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.name); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('mySchema :: select with group by as column + sql', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('mySchema :: build query', async (ctx) => { - const { db } = ctx.mysql; - - const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: - `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, - params: [], - }); - }); - - test('mySchema :: insert with spaces', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); - const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( - usersMySchemaTable, - ); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); - }); - - test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const stmt = db.select({ - id: usersMySchemaTable.id, - name: usersMySchemaTable.name, - }).from(usersMySchemaTable) - .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { - const { db } = ctx.mysql; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.execute(sql`drop table if exists \`userstest\``); - await db.execute( - sql` - create table \`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); - await db.insert(usersTable).values({ id: 11, name: 'Hans' }); - - const customerAlias = alias(usersTable, 'customer'); - - const result = await db - .select().from(usersMySchemaTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersMySchemaTable.id, 10)); - - expect(result).toEqual([{ - userstest: { - id: 10, - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.userstest.createdAt, - }, - customer: { - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, - }, - }]); - }); - - test('insert $returningId: serial as id', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db.insert(usersTable).values({ name: 'John' }).$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - }[]>(); - - expect(result).toStrictEqual([{ id: 1 }]); - }); - - test('insert $returningId: serial as id, not first column', async (ctx) => { - const { db } = ctx.mysql; - - const usersTableDefNotFirstColumn = mysqlTable('users2', { - name: text('name').notNull(), - id: serial('id').primaryKey(), - }); - - const result = await db.insert(usersTableDefNotFirstColumn).values({ name: 'John' }).$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - }[]>(); - - expect(result).toStrictEqual([{ id: 1 }]); - }); - - test('insert $returningId: serial as id, batch insert', async (ctx) => { - const { db } = ctx.mysql; - - const result = await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - }[]>(); - - expect(result).toStrictEqual([{ id: 1 }, { id: 2 }]); - }); - - test('insert $returningId: $default as primary key', async (ctx) => { - const { db } = ctx.mysql; - - const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; - let iterator = 0; - - const usersTableDefFn = mysqlTable('users_default_fn', { - customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { - const value = uniqueKeys[iterator]!; - iterator++; - return value; - }), - name: text('name').notNull(), - }); - - await setupReturningFunctionsTest(db); - - const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) - // ^? - .$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - customId: string; - }[]>(); - - expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { - customId: 'dyqs529eom0iczo2efxzbcut', - }]); - }); - - test('insert $returningId: $default as primary key with value', async (ctx) => { - const { db } = ctx.mysql; - - const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; - let iterator = 0; - - const usersTableDefFn = mysqlTable('users_default_fn', { - customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { - const value = uniqueKeys[iterator]!; - iterator++; - return value; - }), - name: text('name').notNull(), - }); - - await setupReturningFunctionsTest(db); - - const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) - // ^? - .$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - customId: string; - }[]>(); - - expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); - }); - - test('mySchema :: view', async (ctx) => { - const { db } = ctx.mysql; - - const newYorkers1 = mySchema.view('new_yorkers') - .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); - - const newYorkers2 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); - - const newYorkers3 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); - - await db.insert(users2MySchemaTable).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 1 }, - { name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); - expect(result).toEqual([ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); - }); - - test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updatedAt: timestamp('updated_at', { - fsp: 6, - }) - .notNull() - .$onUpdate(() => sql`current_timestamp`), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute( - sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` text not null, - \`updated_at\` timestamp not null - ) - `, - ); - - await db.insert(users).values({ - name: 'John', - }); - const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); - await new Promise((resolve) => setTimeout(resolve, 1000)); - - const now = Date.now(); - await new Promise((resolve) => setTimeout(resolve, 1000)); - await db.update(users).set({ - name: 'John', - }); - const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); - - expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); - expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); - }); - - test('$count separate', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = await db.$count(countTestTable); - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count).toStrictEqual(4); - }); - - test('$count embedded', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = await db.select({ - count: db.$count(countTestTable), - }).from(countTestTable); - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count).toStrictEqual([ - { count: 4 }, - { count: 4 }, - { count: 4 }, - { count: 4 }, - ]); - }); - - test('$count separate reuse', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = db.$count(countTestTable); - - const count1 = await count; - - await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); - - const count2 = await count; - - await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); - - const count3 = await count; - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count1).toStrictEqual(4); - expect(count2).toStrictEqual(5); - expect(count3).toStrictEqual(6); - }); - - test('$count embedded reuse', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = db.select({ - count: db.$count(countTestTable), - }).from(countTestTable); - - const count1 = await count; - - await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); - - const count2 = await count; - - await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); - - const count3 = await count; - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count1).toStrictEqual([ - { count: 4 }, - { count: 4 }, - { count: 4 }, - { count: 4 }, - ]); - expect(count2).toStrictEqual([ - { count: 5 }, - { count: 5 }, - { count: 5 }, - { count: 5 }, - { count: 5 }, - ]); - expect(count3).toStrictEqual([ - { count: 6 }, - { count: 6 }, - { count: 6 }, - { count: 6 }, - { count: 6 }, - { count: 6 }, - ]); - }); - - test('$count separate with filters', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count).toStrictEqual(3); - }); - - test('$count embedded with filters', async (ctx) => { - const { db } = ctx.mysql; - - const countTestTable = mysqlTable('count_test', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${countTestTable}`); - await db.execute(sql`create table ${countTestTable} (id int, name text)`); - - await db.insert(countTestTable).values([ - { id: 1, name: 'First' }, - { id: 2, name: 'Second' }, - { id: 3, name: 'Third' }, - { id: 4, name: 'Fourth' }, - ]); - - const count = await db.select({ - count: db.$count(countTestTable, gt(countTestTable.id, 1)), - }).from(countTestTable); - - await db.execute(sql`drop table ${countTestTable}`); - - expect(count).toStrictEqual([ - { count: 3 }, - { count: 3 }, - { count: 3 }, - { count: 3 }, - ]); - }); - - test('limit 0', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select() - .from(usersTable) - .limit(0); - - expect(users).toEqual([]); - }); - - test('limit -1', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select() - .from(usersTable) - .limit(-1); - - expect(users.length).toBeGreaterThan(0); - }); - - test('define constraints as array', async (ctx) => { - const { db } = ctx.mysql; - - const table = mysqlTable('name', { - id: int(), - }, (t) => [ - index('name').on(t.id), - primaryKey({ columns: [t.id], name: 'custom' }), - ]); - - const { indexes, primaryKeys } = getTableConfig(table); - - expect(indexes.length).toBe(1); - expect(primaryKeys.length).toBe(1); - }); - - test('define constraints as array inside third param', async (ctx) => { - const { db } = ctx.mysql; - - const table = mysqlTable('name', { - id: int(), - }, (t) => [ - [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], - ]); - - const { indexes, primaryKeys } = getTableConfig(table); - - expect(indexes.length).toBe(1); - expect(primaryKeys.length).toBe(1); - }); - - test('update with limit and order by', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([ - { name: 'Barry', verified: false }, - { name: 'Alan', verified: false }, - { name: 'Carl', verified: false }, - ]); - - await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); - - const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( - asc(usersTable.name), - ); - expect(result).toStrictEqual([ - { name: 'Alan', verified: true }, - { name: 'Barry', verified: true }, - { name: 'Carl', verified: false }, - ]); - }); - - test('delete with limit and order by', async (ctx) => { - const { db } = ctx.mysql; - - await db.insert(usersTable).values([ - { name: 'Barry', verified: false }, - { name: 'Alan', verified: false }, - { name: 'Carl', verified: false }, - ]); - - await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); - - const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( - asc(usersTable.name), - ); - expect(result).toStrictEqual([ - { name: 'Barry', verified: false }, - { name: 'Carl', verified: false }, - ]); - }); - - test('Object keys as column names', async (ctx) => { - const { db } = ctx.mysql; - - // Tests the following: - // Column with required config - // Column with optional config without providing a value - // Column with optional config providing a value - // Column without config - const users = mysqlTable('users', { - id: bigint({ mode: 'number' }).autoincrement().primaryKey(), - createdAt: timestamp(), - updatedAt: timestamp({ fsp: 3 }), - admin: boolean(), - }); - - await db.execute(sql`drop table if exists users`); - await db.execute( - sql` - create table users ( - \`id\` bigint auto_increment primary key, - \`createdAt\` timestamp, - \`updatedAt\` timestamp(3), - \`admin\` boolean - ) - `, - ); - - await db.insert(users).values([ - { createdAt: sql`now() - interval 30 day`, updatedAt: sql`now() - interval 1 day`, admin: true }, - { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 30 day`, admin: true }, - { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 1 day`, admin: false }, - ]); - const result = await db - .select({ id: users.id, admin: users.admin }) - .from(users) - .where( - and( - gt(users.createdAt, sql`now() - interval 7 day`), - gt(users.updatedAt, sql`now() - interval 7 day`), - ), - ); - - expect(result).toEqual([ - { id: 3, admin: false }, - ]); - - await db.execute(sql`drop table users`); - }); - - test('cross join', async (ctx) => { - const { db } = ctx.mysql; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Jane' }, - ]); - - await db - .insert(citiesTable) - .values([ - { name: 'Seattle' }, - { name: 'New York City' }, - ]); - - const result = await db - .select({ - user: usersTable.name, - city: citiesTable.name, - }) - .from(usersTable) - .crossJoin(citiesTable) - .orderBy(usersTable.name, citiesTable.name); - - expect(result).toStrictEqual([ - { city: 'New York City', user: 'Jane' }, - { city: 'Seattle', user: 'Jane' }, - { city: 'New York City', user: 'John' }, - { city: 'Seattle', user: 'John' }, - ]); - }); - - test('left join (lateral)', async (ctx) => { - const { db } = ctx.mysql; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(eq(users2Table.cityId, citiesTable.id)) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .leftJoinLateral(sq, sql`true`); - - expect(res).toStrictEqual([ - { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, - { cityId: 2, cityName: 'London', userId: null, userName: null }, - ]); - }); - - test('inner join (lateral)', async (ctx) => { - const { db } = ctx.mysql; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(eq(users2Table.cityId, citiesTable.id)) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .innerJoinLateral(sq, sql`true`); - - expect(res).toStrictEqual([ - { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, - ]); - }); - - test('cross join (lateral)', async (ctx) => { - const { db } = ctx.mysql; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }, { id: 3, name: 'Berlin' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }, { - name: 'Patrick', - cityId: 2, - }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(not(like(citiesTable.name, 'L%'))) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .crossJoinLateral(sq) - .orderBy(citiesTable.id, sq.userId); - - expect(res).toStrictEqual([ - { - cityId: 1, - cityName: 'Paris', - userId: 1, - userName: 'John', - }, - { - cityId: 1, - cityName: 'Paris', - userId: 2, - userName: 'Jane', - }, - { - cityId: 1, - cityName: 'Paris', - userId: 3, - userName: 'Patrick', - }, - { - cityId: 3, - cityName: 'Berlin', - userId: 1, - userName: 'John', - }, - { - cityId: 3, - cityName: 'Berlin', - userId: 2, - userName: 'Jane', - }, - { - cityId: 3, - cityName: 'Berlin', - userId: 3, - userName: 'Patrick', - }, - ]); - }); - - test('RQB v2 simple find first - no rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - multiple rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - with relation', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find first - placeholders', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - no rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - multiple rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - with relation', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 simple find many - placeholders', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - no rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - multiple rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - with relation', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find first - placeholders', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - no rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - multiple rows', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - with relation', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - } finally { - await clear(db); - } - }); - - test('RQB v2 transaction find many - placeholders', async (ctx) => { - const { db } = ctx.mysql; - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - } finally { - await clear(db); - } - }); - - test('all types', async (ctx) => { - const { db } = ctx.mysql; - - await db.execute(sql` - CREATE TABLE \`all_types\` ( - \`serial\` serial AUTO_INCREMENT, - \`bigint53\` bigint, - \`bigint64\` bigint, - \`bigint_string\` bigint, - \`binary\` binary, - \`boolean\` boolean, - \`char\` char, - \`date\` date, - \`date_str\` date, - \`datetime\` datetime, - \`datetime_str\` datetime, - \`decimal\` decimal, - \`decimal_num\` decimal(30), - \`decimal_big\` decimal(30), - \`double\` double, - \`float\` float, - \`int\` int, - \`json\` json, - \`med_int\` mediumint, - \`small_int\` smallint, - \`real\` real, - \`text\` text, - \`time\` time, - \`timestamp\` timestamp, - \`timestamp_str\` timestamp, - \`tiny_int\` tinyint, - \`varbin\` varbinary(16), - \`varchar\` varchar(255), - \`year\` year, - \`enum\` enum('enV1','enV2') - ); - `); - - await db.insert(allTypesTable).values({ - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date(1741743161623), - dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - datetime: new Date(1741743161623), - datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - enum: 'enV1', - float: 1.048596, - real: 1.048596, - text: 'C4-', - int: 621, - json: { - str: 'strval', - arr: ['str', 10], - }, - medInt: 560, - smallInt: 14, - time: '04:13:22', - timestamp: new Date(1741743161623), - timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - }); - - const rawRes = await db.select().from(allTypesTable); - - type ExpectedType = { - serial: number; - bigint53: number | null; - bigint64: bigint | null; - bigintString: string | null; - binary: string | null; - boolean: boolean | null; - char: string | null; - date: Date | null; - dateStr: string | null; - datetime: Date | null; - datetimeStr: string | null; - decimal: string | null; - decimalNum: number | null; - decimalBig: bigint | null; - double: number | null; - float: number | null; - int: number | null; - json: unknown; - medInt: number | null; - smallInt: number | null; - real: number | null; - text: string | null; - time: string | null; - timestamp: Date | null; - timestampStr: string | null; - tinyInt: number | null; - varbin: string | null; - varchar: string | null; - year: number | null; - enum: 'enV1' | 'enV2' | null; - }[]; - - const expectedRes: ExpectedType = [ - { - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date('2025-03-12T00:00:00.000Z'), - dateStr: '2025-03-12', - datetime: new Date('2025-03-12T01:32:42.000Z'), - datetimeStr: '2025-03-12 01:32:41', - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - float: 1.0486, - int: 621, - json: { arr: ['str', 10], str: 'strval' }, - medInt: 560, - smallInt: 14, - real: 1.048596, - text: 'C4-', - time: '04:13:22', - timestamp: new Date('2025-03-12T01:32:42.000Z'), - timestampStr: '2025-03-12 01:32:41', - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - enum: 'enV1', - }, - ]; - - expectTypeOf(rawRes).toEqualTypeOf(); - expect(rawRes).toStrictEqual(expectedRes); - }); - }); - - test('insert into ... select', async (ctx) => { - const { db } = ctx.mysql; - - const notifications = mysqlTable('notifications', { - id: serial('id').primaryKey(), - sentAt: timestamp('sent_at').notNull().defaultNow(), - message: text('message').notNull(), - }); - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - const userNotications = mysqlTable('user_notifications', { - userId: int('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), - notificationId: int('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade' }), - }, (t) => ({ - pk: primaryKey({ columns: [t.userId, t.notificationId] }), - })); - - await db.execute(sql`drop table if exists ${notifications}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${userNotications}`); - await db.execute(sql` - create table ${notifications} ( - \`id\` serial primary key, - \`sent_at\` timestamp not null default now(), - \`message\` text not null - ) - `); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` text not null - ) - `); - await db.execute(sql` - create table ${userNotications} ( - \`user_id\` int references users(id) on delete cascade, - \`notification_id\` int references notifications(id) on delete cascade, - primary key (user_id, notification_id) - ) - `); - - await db - .insert(notifications) - .values({ message: 'You are one of the 3 lucky winners!' }); - const newNotification = await db - .select({ id: notifications.id }) - .from(notifications) - .then((result) => result[0]); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await db - .insert(userNotications) - .select( - db - .select({ - userId: users.id, - notificationId: sql`(${newNotification!.id})`.as('notification_id'), - }) - .from(users) - .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) - .orderBy(asc(users.id)), - ); - const sentNotifications = await db.select().from(userNotications); - - expect(sentNotifications).toStrictEqual([ - { userId: 1, notificationId: newNotification!.id }, - { userId: 3, notificationId: newNotification!.id }, - { userId: 5, notificationId: newNotification!.id }, - ]); - }); - - test('insert into ... select with keys in different order', async (ctx) => { - const { db } = ctx.mysql; - - const users1 = mysqlTable('users1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - const users2 = mysqlTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users1}`); - await db.execute(sql`drop table if exists ${users2}`); - await db.execute(sql` - create table ${users1} ( - \`id\` serial primary key, - \`name\` text not null - ) - `); - await db.execute(sql` - create table ${users2} ( - \`id\` serial primary key, - \`name\` text not null - ) - `); - - expect( - () => - db - .insert(users1) - .select( - db - .select({ - name: users2.name, - id: users2.id, - }) - .from(users2), - ), - ).toThrowError(); - }); - - test('MySqlTable :: select with `use index` hint', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }, () => [usersTableNameIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - const result = await db.select() - .from(users, { - useIndex: [usersTableNameIndex], - }) - .where(eq(users.name, 'David')); - - expect(result).toHaveLength(1); - expect(result).toEqual([{ id: 4, name: 'David' }]); - }); - - test('MySqlTable :: select with `use index` hint on 1 index', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }, () => [usersTableNameIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - - const query = db.select() - .from(users, { - useIndex: usersTableNameIndex, - }) - .where(eq(users.name, 'David')) - .toSQL(); - - expect(query.sql).to.include('USE INDEX (users_name_index)'); - }); - - test('MySqlTable :: select with `use index` hint on multiple indexes', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - age: int('age').notNull(), - }, () => [usersTableNameIndex, usersTableAgeIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - const usersTableAgeIndex = index('users_age_index').on(users.age); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null, - \`age\` int not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - await db.execute(sql`create index users_age_index ON users(age)`); - - const query = db.select() - .from(users, { - useIndex: [usersTableNameIndex, usersTableAgeIndex], - }) - .where(eq(users.name, 'David')) - .toSQL(); - - expect(query.sql).to.include('USE INDEX (users_name_index, users_age_index)'); - }); - - test('MySqlTable :: select with `use index` hint on not existed index', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }, () => [usersTableNameIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await expect((async () => { - return await db.select() - .from(users, { - useIndex: ['some_other_index'], - }) - .where(eq(users.name, 'David')); - })()).rejects.toThrowError(); - }); - - test('MySqlTable :: select with `use index` + `force index` incompatible hints', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - age: int('age').notNull(), - }, () => [usersTableNameIndex, usersTableAgeIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - const usersTableAgeIndex = index('users_age_index').on(users.age); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null, - \`age\` int not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - await db.execute(sql`create index users_age_index ON users(age)`); - - await db.insert(users).values([ - { name: 'Alice', age: 18 }, - { name: 'Bob', age: 19 }, - { name: 'Charlie', age: 20 }, - { name: 'David', age: 21 }, - { name: 'Eve', age: 22 }, - ]); - - await expect((async () => { - return await db.select() - .from(users, { - useIndex: [usersTableNameIndex], - forceIndex: [usersTableAgeIndex], - }) - .where(eq(users.name, 'David')); - })()).rejects.toThrowError(); - }); - - test('MySqlTable :: select with join `use index` hint', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await db.insert(posts).values([ - { text: 'Alice post', userId: 1 }, - { text: 'Bob post', userId: 2 }, - { text: 'Charlie post', userId: 3 }, - { text: 'David post', userId: 4 }, - { text: 'Eve post', userId: 5 }, - ]); - - const result = await db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .leftJoin(posts, eq(users.id, posts.userId), { - useIndex: [postsTableUserIdIndex], - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )); - - expect(result).toHaveLength(1); - expect(result).toEqual([{ userId: 4, name: 'David', postId: 4, text: 'David post' }]); - }); - - test('MySqlTable :: select with join `use index` hint on 1 index', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - const query = db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .leftJoin(posts, eq(users.id, posts.userId), { - useIndex: postsTableUserIdIndex, - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )).toSQL(); - - expect(query.sql).to.include('USE INDEX (posts_user_id_index)'); - }); - - test('MySqlTable :: select with cross join `use index` hint', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - await db.insert(users).values([ - { id: 1, name: 'Alice' }, - { id: 2, name: 'Bob' }, - ]); - - await db.insert(posts).values([ - { id: 1, text: 'Alice post', userId: 1 }, - { id: 2, text: 'Bob post', userId: 2 }, - ]); - - const result = await db.select() - .from(users) - .crossJoin(posts, { - useIndex: [postsTableUserIdIndex], - }) - .orderBy(users.id, posts.id); - - expect(result).toStrictEqual([{ - users: { id: 1, name: 'Alice' }, - posts: { id: 1, text: 'Alice post', userId: 1 }, - }, { - users: { id: 1, name: 'Alice' }, - posts: { id: 2, text: 'Bob post', userId: 2 }, - }, { - users: { id: 2, name: 'Bob' }, - posts: { id: 1, text: 'Alice post', userId: 1 }, - }, { - users: { id: 2, name: 'Bob' }, - posts: { id: 2, text: 'Bob post', userId: 2 }, - }]); - }); - - test('MySqlTable :: select with cross join `use index` hint on 1 index', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - const query = db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .crossJoin(posts, { - useIndex: postsTableUserIdIndex, - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )).toSQL(); - - expect(query.sql).to.include('USE INDEX (posts_user_id_index)'); - }); - - test('MySqlTable :: select with join `use index` hint on multiple indexes', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex, postsTableTextIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - const postsTableTextIndex = index('posts_text_index').on(posts.text); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - await db.execute(sql`create index posts_text_index ON posts(text)`); - - const query = db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .leftJoin(posts, eq(users.id, posts.userId), { - useIndex: [postsTableUserIdIndex, postsTableTextIndex], - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )).toSQL(); - - expect(query.sql).to.include('USE INDEX (posts_user_id_index, posts_text_index)'); - }); - - test('MySqlTable :: select with join `use index` hint on not existed index', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await db.insert(posts).values([ - { text: 'Alice post', userId: 1 }, - { text: 'Bob post', userId: 2 }, - { text: 'Charlie post', userId: 3 }, - { text: 'David post', userId: 4 }, - { text: 'Eve post', userId: 5 }, - ]); - - await expect((async () => { - return await db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .leftJoin(posts, eq(users.id, posts.userId), { - useIndex: ['some_other_index'], - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )); - })()).rejects.toThrowError(); - }); - - test('MySqlTable :: select with join `use index` + `force index` incompatible hints', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex, postsTableTextIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - const postsTableTextIndex = index('posts_text_index').on(posts.text); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - await db.execute(sql`create index posts_text_index ON posts(text)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await db.insert(posts).values([ - { text: 'Alice post', userId: 1 }, - { text: 'Bob post', userId: 2 }, - { text: 'Charlie post', userId: 3 }, - { text: 'David post', userId: 4 }, - { text: 'Eve post', userId: 5 }, - ]); - - await expect((async () => { - return await db.select({ - userId: users.id, - name: users.name, - postId: posts.id, - text: posts.text, - }) - .from(users) - .leftJoin(posts, eq(users.id, posts.userId), { - useIndex: [postsTableUserIdIndex], - forceIndex: [postsTableTextIndex], - }) - .where(and( - eq(users.name, 'David'), - eq(posts.text, 'David post'), - )); - })()).rejects.toThrowError(); - }); - - test('MySqlTable :: select with Subquery join `use index`', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - await db.insert(users).values([ - { name: 'Alice' }, - { name: 'Bob' }, - { name: 'Charlie' }, - { name: 'David' }, - { name: 'Eve' }, - ]); - - await db.insert(posts).values([ - { text: 'Alice post', userId: 1 }, - { text: 'Bob post', userId: 2 }, - { text: 'Charlie post', userId: 3 }, - { text: 'David post', userId: 4 }, - { text: 'Eve post', userId: 5 }, - ]); - - const sq = db.select().from(posts, { useIndex: [postsTableUserIdIndex] }).where(eq(posts.userId, 1)).as('sq'); - - const result = await db.select({ - userId: users.id, - name: users.name, - postId: sq.id, - text: sq.text, - }) - .from(users) - .leftJoin(sq, eq(users.id, sq.userId)) - .where(eq(users.name, 'Alice')); - - expect(result).toHaveLength(1); - expect(result).toEqual([{ userId: 1, name: 'Alice', postId: 1, text: 'Alice post' }]); - }); - - test('MySqlTable :: select with Subquery join with `use index` in join', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }); - - const posts = mysqlTable('posts', { - id: serial('id').primaryKey(), - text: varchar('text', { length: 100 }).notNull(), - userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), - }, () => [postsTableUserIdIndex]); - const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); - - await db.execute(sql`drop table if exists ${posts}`); - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql` - create table ${posts} ( - \`id\` serial primary key, - \`text\` varchar(100) not null, - \`user_id\` int not null references users(id) on delete cascade - ) - `); - await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); - - const sq = db.select().from(posts).where(eq(posts.userId, 1)).as('sq'); - - const query = db.select({ - userId: users.id, - name: users.name, - postId: sq.id, - text: sq.text, - }) - .from(users) - // @ts-expect-error - .leftJoin(sq, eq(users.id, sq.userId, { useIndex: [postsTableUserIdIndex] })) - .where(eq(users.name, 'Alice')) - .toSQL(); - - expect(query.sql).not.include('USE INDEX'); - }); - - test('View :: select with `use index` hint', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }, () => [usersTableNameIndex]); - - const usersTableNameIndex = index('users_name_index').on(users.name); - - const usersView = mysqlView('users_view').as((qb) => qb.select().from(users)); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - await db.execute(sql`create view ${usersView} as select * from ${users}`); - - // @ts-expect-error - const query = db.select().from(usersView, { - useIndex: [usersTableNameIndex], - }).toSQL(); - - expect(query.sql).not.include('USE INDEX'); - - await db.execute(sql`drop view ${usersView}`); - }); - - test('Subquery :: select with `use index` hint', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: varchar('name', { length: 100 }).notNull(), - }, () => [usersTableNameIndex]); - const usersTableNameIndex = index('users_name_index').on(users.name); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` varchar(100) not null - ) - `); - await db.execute(sql`create index users_name_index ON users(name)`); - - const sq = db.select().from(users).as('sq'); - - // @ts-expect-error - const query = db.select().from(sq, { - useIndex: [usersTableNameIndex], - }).toSQL(); - - expect(query.sql).not.include('USE INDEX'); - }); - - test('sql operator as cte', async (ctx) => { - const { db } = ctx.mysql; - - const users = mysqlTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); - await db.insert(users).values([ - { name: 'John' }, - { name: 'Jane' }, - ]); - - const sq1 = db.$with('sq', { - userId: users.id, - data: { - name: users.name, - }, - }).as(sql`select * from ${users} where ${users.name} = 'John'`); - const result1 = await db.with(sq1).select().from(sq1); - - const sq2 = db.$with('sq', { - userId: users.id, - data: { - name: users.name, - }, - }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); - const result2 = await db.with(sq2).select().from(sq1); - - expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); - expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); - }); +import type { Test } from './instrumentation'; +import { tests as tests1 } from './mysql-common-1'; +import { tests as tests2 } from './mysql-common-2'; +import { tests as tests3 } from './mysql-common-3'; +import { tests as tests4 } from './mysql-common-4'; +import { tests as tests5 } from './mysql-common-5'; +import { tests as tests6 } from './mysql-common-6'; +import { tests as tests7 } from './mysql-common-7'; +import { tests as tests8 } from './mysql-common-8'; + +export function tests(test: Test, exclude: Set = new Set([])) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.has(task.name)) skip(); + }); + + tests1(test, exclude); + tests2(test, exclude); + tests3(test, exclude); + tests4(test, exclude); + tests5(test, exclude); + tests6(test, exclude); + tests7(test, exclude); + tests8(test, exclude); } diff --git a/integration-tests/tests/mysql/mysql-planetscale.test.ts b/integration-tests/tests/mysql/mysql-planetscale.test.ts deleted file mode 100644 index c9ec2bd535..0000000000 --- a/integration-tests/tests/mysql/mysql-planetscale.test.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { Client } from '@planetscale/database'; -import type { PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; -import { drizzle } from 'drizzle-orm/planetscale-serverless'; -import { beforeAll, beforeEach } from 'vitest'; -import { skipTests } from '~/common'; -import { tests } from './mysql-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './mysql-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: PlanetScaleDatabase; -let dbGlobalCached: PlanetScaleDatabase; -let cachedDb: PlanetScaleDatabase; - -beforeAll(async () => { - const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING']! }); - db = drizzle(client, { - logger: ENABLE_LOGGING, - relations, - }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); - -beforeEach((ctx) => { - ctx.mysql = { - db, - }; - ctx.cachedMySQL = { - db: cachedDb, - dbGlobalCached, - }; -}); - -skipTests([ - 'mySchema :: view', - 'mySchema :: select from tables with same name from different schema using alias', - 'mySchema :: prepared statement with placeholder in .where', - 'mySchema :: insert with spaces', - 'mySchema :: select with group by as column + sql', - 'mySchema :: select with group by as field', - 'mySchema :: insert many', - 'mySchema :: insert with overridden default values', - 'mySchema :: insert + select', - 'mySchema :: delete with returning all fields', - 'mySchema :: update with returning partial', - 'mySchema :: delete returning sql', - 'mySchema :: insert returning sql', - 'mySchema :: select typed sql', - 'mySchema :: select sql', - 'mySchema :: select all fields', - 'test $onUpdateFn and $onUpdate works updating', - 'test $onUpdateFn and $onUpdate works as $default', - 'set operations (mixed all) as function with subquery', - 'set operations (mixed) from query builder', - 'set operations (except all) as function', - 'set operations (except all) from query builder', - 'set operations (except) as function', - 'set operations (except) from query builder', - 'set operations (intersect all) as function', - 'set operations (intersect all) from query builder', - 'set operations (intersect) as function', - 'set operations (intersect) from query builder', - 'select iterator w/ prepared statement', - 'select iterator', - 'subquery with view', - 'join on aliased sql from with clause', - 'with ... delete', - 'with ... update', - 'with ... select', - - // to redefine in this file - 'utc config for datetime', - 'transaction', - 'transaction with options (set isolationLevel)', - 'having', - 'select count()', - 'insert via db.execute w/ query builder', - 'insert via db.execute + select via db.execute', - 'insert many with returning', - 'delete with returning partial', - 'delete with returning all fields', - 'update with returning partial', - 'update with returning all fields', - 'update returning sql', - 'delete returning sql', - 'insert returning sql', -]); - -tests('planetscale'); -cacheTests(); diff --git a/integration-tests/tests/mysql/mysql-proxy.test.ts b/integration-tests/tests/mysql/mysql-proxy.test.ts deleted file mode 100644 index 8f94c2598a..0000000000 --- a/integration-tests/tests/mysql/mysql-proxy.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import retry from 'async-retry'; -import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/mysql-proxy'; -import * as mysql from 'mysql2/promise'; -import { afterAll, beforeAll, beforeEach } from 'vitest'; -import { skipTests } from '~/common'; -import { createDockerDB, tests } from './mysql-common'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -class ServerSimulator { - constructor(private db: mysql.Connection) {} - - async query(sql: string, params: any[], method: 'all' | 'execute') { - if (method === 'all') { - try { - const result = await this.db.query({ - sql, - values: params, - rowsAsArray: true, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result[0] as any }; - } catch (e: any) { - return { error: e }; - } - } else if (method === 'execute') { - try { - const result = await this.db.query({ - sql, - values: params, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result as any }; - } catch (e: any) { - return { error: e }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - async migrations(queries: string[]) { - await this.db.query('START TRANSACTION'); - try { - for (const query of queries) { - await this.db.query(query); - } - await this.db.query('COMMIT'); - } catch (e) { - await this.db.query('ROLLBACK'); - throw e; - } - - return {}; - } -} - -let db: MySqlRemoteDatabase; -let client: mysql.Connection; -let serverSimulator: ServerSimulator; - -beforeAll(async () => { - let connectionString; - if (process.env['MYSQL_CONNECTION_STRING']) { - connectionString = process.env['MYSQL_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = await mysql.createConnection({ - uri: connectionString, - supportBigNumbers: true, - }); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - serverSimulator = new ServerSimulator(client); - db = proxyDrizzle(async (sql, params, method) => { - try { - const response = await serverSimulator.query(sql, params, method); - - if (response.error !== undefined) { - throw response.error; - } - - return { rows: response.data }; - } catch (e: any) { - console.error('Error from mysql proxy server:', e.message); - throw e; - } - }, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - await client?.end(); -}); - -beforeEach((ctx) => { - ctx.mysql = { - db, - }; -}); - -skipTests([ - 'select iterator w/ prepared statement', - 'select iterator', - 'nested transaction rollback', - 'nested transaction', - 'transaction rollback', - 'transaction', - 'transaction with options (set isolationLevel)', - 'migrator', - 'RQB v2 transaction find first - no rows', - 'RQB v2 transaction find first - multiple rows', - 'RQB v2 transaction find first - with relation', - 'RQB v2 transaction find first - placeholders', - 'RQB v2 transaction find many - no rows', - 'RQB v2 transaction find many - multiple rows', - 'RQB v2 transaction find many - with relation', - 'RQB v2 transaction find many - placeholders', -]); - -tests(); diff --git a/integration-tests/tests/mysql-returning.test.ts b/integration-tests/tests/mysql/mysql-returning.test.ts similarity index 98% rename from integration-tests/tests/mysql-returning.test.ts rename to integration-tests/tests/mysql/mysql-returning.test.ts index 557d8a756b..3c66397305 100644 --- a/integration-tests/tests/mysql-returning.test.ts +++ b/integration-tests/tests/mysql/mysql-returning.test.ts @@ -83,7 +83,7 @@ test.before(async (t) => { await ctx.mysqlContainer?.stop().catch(console.error); throw lastError; } - ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); + ctx.db = drizzle({ client: ctx.client, logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); }); test.after.always(async (t) => { diff --git a/integration-tests/tests/relational/mysql-v1.test.ts b/integration-tests/tests/mysql/mysql-v1.test.ts similarity index 99% rename from integration-tests/tests/relational/mysql-v1.test.ts rename to integration-tests/tests/mysql/mysql-v1.test.ts index ccf2e80771..e14d8490cb 100644 --- a/integration-tests/tests/relational/mysql-v1.test.ts +++ b/integration-tests/tests/mysql/mysql-v1.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './mysql.schema.ts'; +import * as schema from './mysql.schema'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, usersV1, usersTableV1 } = schema; @@ -83,7 +83,7 @@ beforeAll(async () => { await mysqlContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING, mode: 'default', casing: 'snake_case' }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING, mode: 'default', casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.test.ts b/integration-tests/tests/mysql/mysql.duplicates.test.ts similarity index 98% rename from integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.test.ts rename to integration-tests/tests/mysql/mysql.duplicates.test.ts index 0ee22792fa..2fdce707d9 100644 --- a/integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.test.ts +++ b/integration-tests/tests/mysql/mysql.duplicates.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './mysql.duplicates.ts'; +import * as schema from './mysql.duplicates'; const ENABLE_LOGGING = false; @@ -71,7 +71,7 @@ beforeAll(async () => { await mysqlContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING, mode: 'default' }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING, mode: 'default' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.ts b/integration-tests/tests/mysql/mysql.duplicates.ts similarity index 90% rename from integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.ts rename to integration-tests/tests/mysql/mysql.duplicates.ts index d77ca90be3..93f7a26e1e 100644 --- a/integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.ts +++ b/integration-tests/tests/mysql/mysql.duplicates.ts @@ -24,9 +24,7 @@ export const artists = mysqlTable( website: varchar('website', { length: 255 }).notNull(), spotifyId: varchar('spotify_id', { length: 32 }), }, - (table) => ({ - nameEnIndex: index('artists__name_en__idx').on(table.nameEn), - }), + (table) => [index('artists__name_en__idx').on(table.nameEn)], ); export const members = mysqlTable('members', { @@ -52,12 +50,12 @@ export const artistsToMembers = mysqlTable( memberId: int('member_id').notNull(), artistId: int('artist_id').notNull(), }, - (table) => ({ - memberArtistIndex: index('artist_to_member__artist_id__member_id__idx').on( + (table) => [ + index('artist_to_member__artist_id__member_id__idx').on( table.memberId, table.artistId, ), - }), + ], ); export const albums = mysqlTable( @@ -77,10 +75,7 @@ export const albums = mysqlTable( image: varchar('image', { length: 255 }).notNull(), spotifyId: varchar('spotify_id', { length: 32 }), }, - (table) => ({ - artistIndex: index('albums__artist_id__idx').on(table.artistId), - nameIndex: index('albums__name__idx').on(table.name), - }), + (table) => [index('albums__artist_id__idx').on(table.artistId), index('albums__name__idx').on(table.name)], ); // relations diff --git a/integration-tests/tests/relational/mysql.planetscale-v1.test.ts b/integration-tests/tests/mysql/mysql.planetscale-v1.test.ts similarity index 99% rename from integration-tests/tests/relational/mysql.planetscale-v1.test.ts rename to integration-tests/tests/mysql/mysql.planetscale-v1.test.ts index 983827ff60..60da29fca1 100644 --- a/integration-tests/tests/relational/mysql.planetscale-v1.test.ts +++ b/integration-tests/tests/mysql/mysql.planetscale-v1.test.ts @@ -4,7 +4,7 @@ import { Client } from '@planetscale/database'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './mysql.schema.ts'; +import * as schema from './mysql.schema'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; @@ -19,13 +19,17 @@ let db: PlanetScaleDatabase; beforeAll(async () => { db = drizzle( - new Client({ - url: process.env['PLANETSCALE_CONNECTION_STRING']!, - // host: process.env['DATABASE_HOST']!, - // username: process.env['DATABASE_USERNAME']!, - // password: process.env['DATABASE_PASSWORD']!, - }), - { schema, logger: ENABLE_LOGGING, casing: 'snake_case' }, + { + client: new Client({ + url: process.env['PLANETSCALE_CONNECTION_STRING']!, + // host: process.env['DATABASE_HOST']!, + // username: process.env['DATABASE_USERNAME']!, + // password: process.env['DATABASE_PASSWORD']!, + }), + schema, + logger: ENABLE_LOGGING, + casing: 'snake_case', + }, ); await Promise.all([ diff --git a/integration-tests/tests/relational/mysql.planetscale.test.ts b/integration-tests/tests/mysql/mysql.planetscale.test.ts similarity index 99% rename from integration-tests/tests/relational/mysql.planetscale.test.ts rename to integration-tests/tests/mysql/mysql.planetscale.test.ts index efd0cfb7bc..078a426c12 100644 --- a/integration-tests/tests/relational/mysql.planetscale.test.ts +++ b/integration-tests/tests/mysql/mysql.planetscale.test.ts @@ -5,7 +5,7 @@ import { DrizzleError, sql, TransactionRollbackError } from 'drizzle-orm'; import { alias } from 'drizzle-orm/mysql-core'; import { drizzle, type PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './mysql.relations.ts'; +import relations from './mysql.relations'; import { allTypesTable, commentsTable, @@ -14,7 +14,7 @@ import { postsTable, usersTable, usersToGroupsTable, -} from './mysql.schema.ts'; +} from './mysql.schema'; const ENABLE_LOGGING = false; @@ -22,10 +22,14 @@ let db: PlanetScaleDatabase; beforeAll(async () => { db = drizzle( - new Client({ - url: process.env['PLANETSCALE_CONNECTION_STRING']!, - }), - { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }, + { + client: new Client({ + url: process.env['PLANETSCALE_CONNECTION_STRING']!, + }), + relations, + logger: ENABLE_LOGGING, + casing: 'snake_case', + }, ); await Promise.all([ diff --git a/integration-tests/tests/relational/mysql.relations.ts b/integration-tests/tests/mysql/mysql.relations.ts similarity index 99% rename from integration-tests/tests/relational/mysql.relations.ts rename to integration-tests/tests/mysql/mysql.relations.ts index c5807c2469..ef8c9fb86e 100644 --- a/integration-tests/tests/relational/mysql.relations.ts +++ b/integration-tests/tests/mysql/mysql.relations.ts @@ -1,5 +1,5 @@ import { defineRelations } from 'drizzle-orm'; -import * as schema from './mysql.schema.ts'; +import * as schema from './mysql.schema'; export default defineRelations(schema, (r) => ({ usersView: { diff --git a/integration-tests/tests/relational/mysql.schema.ts b/integration-tests/tests/mysql/mysql.schema.ts similarity index 99% rename from integration-tests/tests/relational/mysql.schema.ts rename to integration-tests/tests/mysql/mysql.schema.ts index 3f2d2cbad7..86f3adc39e 100644 --- a/integration-tests/tests/relational/mysql.schema.ts +++ b/integration-tests/tests/mysql/mysql.schema.ts @@ -89,9 +89,7 @@ export const usersToGroupsTable = mysqlTable( () => groupsTable.id, ), }, - (t) => ({ - pk: primaryKey(t.userId, t.groupId), - }), + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { diff --git a/integration-tests/tests/mysql/mysql.test.ts b/integration-tests/tests/mysql/mysql.test.ts index 15aef3f48e..795508e510 100644 --- a/integration-tests/tests/mysql/mysql.test.ts +++ b/integration-tests/tests/mysql/mysql.test.ts @@ -1,62 +1,12940 @@ -import retry from 'async-retry'; -import type { MySql2Database } from 'drizzle-orm/mysql2'; -import { drizzle } from 'drizzle-orm/mysql2'; +import 'dotenv/config'; +import Docker from 'dockerode'; +import { DrizzleError, sql, TransactionRollbackError } from 'drizzle-orm'; +import { alias } from 'drizzle-orm/mysql-core'; +import { drizzle, type MySql2Database } from 'drizzle-orm/mysql2'; +import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; -import { afterAll, beforeAll, beforeEach } from 'vitest'; -import { createDockerDB, tests } from './mysql-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './mysql-common-cache'; -import relations from './relations'; +import { v4 as uuid } from 'uuid'; +import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; +import relations from './mysql.relations'; +import { + allTypesTable, + commentsTable, + courseOfferings, + customTypesTable, + groupsTable, + postsTable, + schemaGroups, + schemaPosts, + schemaUsers, + schemaUsersToGroups, + studentGrades, + students, + usersTable, + usersToGroupsTable, +} from './mysql.schema'; const ENABLE_LOGGING = false; +declare module 'vitest' { + export interface TestContext { + docker: Docker; + mysqlContainer: Docker.Container; + mysqlDbV2: MySql2Database; + mysqlClient: mysql.Connection; + } +} + +let globalDocker: Docker; +let mysqlContainer: Docker.Container; let db: MySql2Database; -let dbGlobalCached: MySql2Database; -let cachedDb: MySql2Database; let client: mysql.Connection; +async function createDockerDB(): Promise { + const docker = (globalDocker = new Docker()); + const port = await getPort({ port: 3306 }); + const image = 'mysql:8'; + + const pullStream = await docker.pull(image); + await new Promise((resolve, reject) => + docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) + ); + + mysqlContainer = await docker.createContainer({ + Image: image, + Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], + name: `drizzle-integration-tests-${uuid()}`, + HostConfig: { + AutoRemove: true, + PortBindings: { + '3306/tcp': [{ HostPort: `${port}` }], + }, + }, + }); + + await mysqlContainer.start(); + + return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; +} + beforeAll(async () => { - let connectionString; - if (process.env['MYSQL_CONNECTION_STRING']) { - connectionString = process.env['MYSQL_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = await mysql.createConnection({ - uri: connectionString!, - supportBigNumbers: true, + const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); + + const sleep = 1000; + let timeLeft = 30000; + let connected = false; + let lastError: unknown | undefined; + do { + try { + client = await mysql.createConnection({ uri: connectionString, supportBigNumbers: true, bigNumberStrings: true }); + await client.connect(); + connected = true; + break; + } catch (e) { + lastError = e; + await new Promise((resolve) => setTimeout(resolve, sleep)); + timeLeft -= sleep; + } + } while (timeLeft > 0); + if (!connected) { + console.error('Cannot connect to MySQL'); + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); + throw lastError; + } + db = drizzle({ client, relations, logger: ENABLE_LOGGING, mode: 'default', casing: 'snake_case' }); +}); + +afterAll(async () => { + await client?.end().catch(console.error); + await mysqlContainer?.stop().catch(console.error); +}); + +beforeEach(async (ctx) => { + ctx.mysqlDbV2 = db; + ctx.mysqlClient = client; + ctx.docker = globalDocker; + ctx.mysqlContainer = mysqlContainer; + + await ctx.mysqlDbV2.execute(sql`drop table if exists \`users\``); + await ctx.mysqlDbV2.execute(sql`drop view if exists \`rqb_users_view\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`users\``); + await ctx.mysqlDbV2.execute(sql`drop view if exists \`rqb_test_schema\`.\`users_sch_view\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`groups\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`groups\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`users_to_groups\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`users_to_groups\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`posts\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`posts\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`comments\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`comment_likes\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`all_types\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`custom_types\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`course_offerings\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`student_grades\``); + await ctx.mysqlDbV2.execute(sql`drop table if exists \`students\``); + + await ctx.mysqlDbV2.execute(sql`create schema if not exists \`rqb_test_schema\``); + + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`users\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`verified\` boolean DEFAULT false NOT NULL, + \`invited_by\` bigint REFERENCES \`users\`(\`id\`) + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`rqb_test_schema\`.\`users\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`verified\` boolean DEFAULT false NOT NULL, + \`invited_by\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`) + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`description\` text + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`rqb_test_schema\`.\`groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL, + \`description\` text + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`users_to_groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`user_id\` bigint REFERENCES \`users\`(\`id\`), + \`group_id\` bigint REFERENCES \`groups\`(\`id\`) + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`rqb_test_schema\`.\`users_to_groups\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`user_id\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`), + \`group_id\` bigint REFERENCES \`rqb_test_schema\`.\`groups\`(\`id\`) + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`posts\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`content\` text NOT NULL, + \`owner_id\` bigint REFERENCES \`users\`(\`id\`), + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`rqb_test_schema\`.\`posts\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`content\` text NOT NULL, + \`owner_id\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`), + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE ALGORITHM = undefined + SQL SECURITY definer + VIEW \`rqb_test_schema\`.\`users_sch_view\` AS (select \`rqb_test_schema\`.\`users\`.\`id\`, \`rqb_test_schema\`.\`users\`.\`name\`, \`rqb_test_schema\`.\`users\`.\`verified\`, \`rqb_test_schema\`.\`users\`.\`invited_by\`, \`rqb_test_schema\`.\`posts\`.\`content\`, \`rqb_test_schema\`.\`posts\`.\`created_at\`, (select count(*) from \`rqb_test_schema\`.\`users\` as \`count_source\` where \`rqb_test_schema\`.\`users\`.\`id\` <> 2) as \`count\` from \`rqb_test_schema\`.\`users\` left join \`rqb_test_schema\`.\`posts\` on \`rqb_test_schema\`.\`users\`.\`id\` = \`rqb_test_schema\`.\`posts\`.\`owner_id\`); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE ALGORITHM = undefined + SQL SECURITY definer + VIEW \`rqb_users_view\` AS (select \`users\`.\`id\`, \`users\`.\`name\`, \`users\`.\`verified\`, \`users\`.\`invited_by\`, \`posts\`.\`content\`, \`posts\`.\`created_at\`, (select count(*) from \`users\` as \`count_source\` where \`users\`.\`id\` <> 2) as \`count\` from \`users\` left join \`posts\` on \`users\`.\`id\` = \`posts\`.\`owner_id\`); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`comments\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`content\` text NOT NULL, + \`creator\` bigint REFERENCES \`users\`(\`id\`), + \`post_id\` bigint REFERENCES \`posts\`(\`id\`), + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`comment_likes\` ( + \`id\` serial PRIMARY KEY NOT NULL, + \`creator\` bigint REFERENCES \`users\`(\`id\`), + \`comment_id\` bigint REFERENCES \`comments\`(\`id\`), + \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`course_offerings\` ( + \`course_id\` integer NOT NULL, + \`semester\` varchar(10) NOT NULL, + CONSTRAINT \`course_offerings_pkey\` PRIMARY KEY(\`course_id\`,\`semester\`) + ) + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`student_grades\` ( + \`student_id\` integer NOT NULL, + \`course_id\` integer NOT NULL, + \`semester\` varchar(10) NOT NULL, + \`grade\` char(2), + CONSTRAINT \`student_grades_pkey\` PRIMARY KEY(\`student_id\`,\`course_id\`,\`semester\`) + ); + `, + ); + await ctx.mysqlDbV2.execute( + sql` + CREATE TABLE \`students\` ( + \`student_id\` serial PRIMARY KEY NOT NULL, + \`name\` text NOT NULL + ); + `, + ); +}); + +test('[Find Many] Get users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + limit posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + limit posts and users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + limit: 2, + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).eq(2); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + custom fields', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts.length).toEqual(3); + expect(usersWithPosts[0]?.posts.length).toEqual(3); + expect(usersWithPosts[1]?.posts.length).toEqual(2); + expect(usersWithPosts[2]?.posts.length).toEqual(2); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + lowerName: 'dan', + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { + id: 2, + ownerId: 1, + content: 'Post1.2', + createdAt: usersWithPosts[0]?.posts[1]?.createdAt, + }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + lowerName: 'andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { + id: 5, + ownerId: 2, + content: 'Post2.1', + createdAt: usersWithPosts[1]?.posts[1]?.createdAt, + }], + }); + expect(usersWithPosts[2]).toEqual({ + id: 3, + name: 'Alex', + lowerName: 'alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { + id: 7, + ownerId: 3, + content: 'Post3.1', + createdAt: usersWithPosts[2]?.posts[1]?.createdAt, + }], + }); +}); + +test('[Find Many] Get users with posts + custom fields + limits', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + limit: 1, + with: { + posts: { + limit: 1, + }, + }, + extras: ({ + lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + lowerName: 'dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: '1' }, + { ownerId: 1, content: '2' }, + { ownerId: 1, content: '3' }, + { ownerId: 2, content: '4' }, + { ownerId: 2, content: '5' }, + { ownerId: 3, content: '6' }, + { ownerId: 3, content: '7' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + orderBy: { + content: 'desc', + }, + }, + }, + orderBy: { + id: 'desc', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(2); + expect(usersWithPosts[1]?.posts.length).eq(2); + expect(usersWithPosts[2]?.posts.length).eq(3); + + expect(usersWithPosts[2]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { + id: 2, + ownerId: 1, + content: '2', + createdAt: usersWithPosts[2]?.posts[1]?.createdAt, + }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 5, + ownerId: 2, + content: '5', + createdAt: usersWithPosts[1]?.posts[1]?.createdAt, + }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ + id: 7, + ownerId: 3, + content: '7', + createdAt: usersWithPosts[0]?.posts[1]?.createdAt, + }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + where: { + id: 1, + }, + with: { + posts: { + where: { + id: 1, + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + where + partial', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + id: true, + name: false, + }, + with: { + posts: { + columns: { + id: true, + content: false, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + posts: { + id: number; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + posts: [{ id: 1 }], + }); +}); + +test('[Find Many] Get users with posts + where + partial(false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: { + name: false, + }, + with: { + posts: { + columns: { + content: false, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts in transaction', async (t) => { + const { mysqlDbV2: db } = t; + + let usersWithPosts: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[] = []; + + await db.transaction(async (tx) => { + await tx.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await tx.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + usersWithPosts = await tx.query.usersTable.findMany({ + where: { + id: 1, + }, + with: { + posts: { + where: { + id: 1, + }, + }, + }, }); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { + const { mysqlDbV2: db } = t; + + let usersWithPosts: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[] = []; + + await expect(db.transaction(async (tx) => { + await tx.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await tx.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + tx.rollback(); + + usersWithPosts = await tx.query.usersTable.findMany({ + where: { + id: 1, + }, + with: { + posts: { + where: { + id: 1, + }, + }, + }, + }); + })).rejects.toThrowError(new TransactionRollbackError()); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(0); +}); + +test('[Find Many] Get only custom fields', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 1, content: 'Post1.2' }, + { id: 3, ownerId: 1, content: 'Post1.3' }, + { id: 4, ownerId: 2, content: 'Post2' }, + { id: 5, ownerId: 2, content: 'Post2.1' }, + { id: 6, ownerId: 3, content: 'Post3' }, + { id: 7, ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(3); + expect(usersWithPosts[0]?.posts.length).toEqual(3); + expect(usersWithPosts[1]?.posts.length).toEqual(2); + expect(usersWithPosts[2]?.posts.length).toEqual(2); + + expect(usersWithPosts[0]?.lowerName).toEqual('dan'); + expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); + expect(usersWithPosts[2]?.lowerName).toEqual('alex'); + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1', + }); + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1.2', + }); + + expect(usersWithPosts[0]?.posts).toContainEqual({ + lowerName: 'post1.3', + }); + + expect(usersWithPosts[1]?.posts).toContainEqual({ + lowerName: 'post2', + }); + + expect(usersWithPosts[1]?.posts).toContainEqual({ + lowerName: 'post2.1', + }); + + expect(usersWithPosts[2]?.posts).toContainEqual({ + lowerName: 'post3', + }); + + expect(usersWithPosts[2]?.posts).toContainEqual({ + lowerName: 'post3.1', + }); +}); + +test('[Find Many] Get only custom fields + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(2); + + expect(usersWithPosts).toContainEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); -afterAll(async () => { - await client?.end(); +test('[Find Many] Get only custom fields + where + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + limit: 1, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(1); + + expect(usersWithPosts).toContainEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }], + }); +}); + +test('[Find Many] Get only custom fields + where + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + orderBy: { + id: 'desc', + }, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + lowerName: string; + posts: { + lowerName: string; + }[]; + }[]>(); + + expect(usersWithPosts.length).toEqual(1); + expect(usersWithPosts[0]?.posts.length).toEqual(2); + + expect(usersWithPosts).toContainEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], + }); }); -beforeEach((ctx) => { - ctx.mysql = { - db, - }; - ctx.cachedMySQL = { - db: cachedDb, - dbGlobalCached, - }; +test('[Find One] Get only custom fields', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(3); + + expect(usersWithPosts?.lowerName).toEqual('dan'); + + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1', + }); + + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1.2', + }); + + expect(usersWithPosts?.posts).toContainEqual({ + lowerName: 'post1.3', + }); }); -cacheTests(); -tests(); +test('[Find One] Get only custom fields + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(2); + + expect(usersWithPosts).toEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], + }); +}); + +test('[Find One] Get only custom fields + where + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + limit: 1, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(1); + + expect(usersWithPosts).toEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.2' }], + }); +}); + +test('[Find One] Get only custom fields + where + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + where: { + id: { + gte: 2, + }, + }, + orderBy: { + id: 'desc', + }, + extras: ({ + lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), + }), + }, + }, + where: { + id: 1, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + lowerName: string; + posts: { + lowerName: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts?.posts.length).toEqual(2); + + expect(usersWithPosts).toEqual({ + lowerName: 'dan', + posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], + }); +}); + +test('[Find Many] Get select {}', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await expect( + async () => + await db.query.usersTable.findMany({ + columns: {}, + }), + ).rejects.toThrow(DrizzleError); +}); + +test('[Find One] Get select {}', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await expect(async () => + await db.query.usersTable.findFirst({ + columns: {}, + }) + ).rejects.toThrow(DrizzleError); +}); + +test('[Find Many] Get deep select {}', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + await expect(async () => + await db.query.usersTable.findMany({ + columns: {}, + with: { + posts: { + columns: {}, + }, + }, + }) + ).rejects.toThrow(DrizzleError); +}); +test('[Find One] Get deep select {}', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + await expect(async () => + await db.query.usersTable.findFirst({ + columns: {}, + with: { + posts: { + columns: {}, + }, + }, + }) + ).rejects.toThrow(DrizzleError); +}); + +test('[Find Many] Get users with posts + prepared limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + with: { + posts: { + limit: sql.placeholder('limit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ limit: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(3); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + expect(usersWithPosts[2]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + limit: sql.placeholder('uLimit'), + offset: sql.placeholder('uOffset'), + with: { + posts: { + limit: sql.placeholder('pLimit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(2); + expect(usersWithPosts[0]?.posts.length).eq(1); + expect(usersWithPosts[1]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + prepared where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const prepared = db.query.usersTable.findMany({ + where: { + id: { + eq: sql.placeholder('id'), + }, + }, + with: { + posts: { + where: { + id: 1, + }, + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ id: 1 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const prepared = db.query.usersTable.findMany({ + limit: sql.placeholder('uLimit'), + offset: sql.placeholder('uOffset'), + where: { + id: { + OR: [ + { + eq: sql.placeholder('id'), + }, + 3, + ], + }, + }, + with: { + posts: { + where: { + id: { + eq: sql.placeholder('pid'), + }, + }, + limit: sql.placeholder('pLimit'), + }, + }, + }).prepare(); + + const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts.length).eq(1); + expect(usersWithPosts[0]?.posts.length).eq(1); + + expect(usersWithPosts).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: true, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + limit posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts no results found', async (t) => { + const { mysqlDbV2: db } = t; + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts).toBeUndefined(); +}); + +test('[Find One] Get users with posts + limit posts and users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + custom fields', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: true, + }, + extras: ({ + lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).toEqual(3); + + expect(usersWithPosts?.lowerName).toEqual('dan'); + expect(usersWithPosts?.id).toEqual(1); + expect(usersWithPosts?.verified).toEqual(false); + expect(usersWithPosts?.invitedBy).toEqual(null); + expect(usersWithPosts?.name).toEqual('Dan'); + + expect(usersWithPosts?.posts).toContainEqual({ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: usersWithPosts?.posts[0]?.createdAt, + }); + + expect(usersWithPosts?.posts).toContainEqual({ + id: 2, + ownerId: 1, + content: 'Post1.2', + createdAt: usersWithPosts?.posts[1]?.createdAt, + }); + + expect(usersWithPosts?.posts).toContainEqual({ + id: 3, + ownerId: 1, + content: 'Post1.3', + createdAt: usersWithPosts?.posts[2]?.createdAt, + }); +}); + +test('[Find One] Get users with posts + custom fields + limits', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.2' }, + { ownerId: 1, content: 'Post1.3' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + limit: 1, + }, + }, + extras: ({ + lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), + }), + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lowerName: string; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).toEqual(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + lowerName: 'dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: '1' }, + { ownerId: 1, content: '2' }, + { ownerId: 1, content: '3' }, + { ownerId: 2, content: '4' }, + { ownerId: 2, content: '5' }, + { ownerId: 3, content: '6' }, + { ownerId: 3, content: '7' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + with: { + posts: { + orderBy: { + content: 'desc', + }, + }, + }, + orderBy: { + id: 'desc', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(2); + + expect(usersWithPosts).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ + id: 7, + ownerId: 3, + content: '7', + createdAt: usersWithPosts?.posts[1]?.createdAt, + }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + where: { + id: 1, + }, + with: { + posts: { + where: { + id: 1, + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('[Find One] Get users with posts + where + partial', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: true, + }, + with: { + posts: { + columns: { + id: true, + content: true, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + name: string; + posts: { + id: number; + content: string; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + name: 'Dan', + posts: [{ id: 1, content: 'Post1' }], + }); +}); + +test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + id: true, + name: false, + }, + with: { + posts: { + columns: { + id: true, + content: false, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + posts: { + id: number; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + posts: [{ id: 1 }], + }); +}); + +test('[Find One] Get users with posts + where + partial(false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findFirst({ + columns: { + name: false, + }, + with: { + posts: { + columns: { + content: false, + }, + where: { + id: 1, + }, + }, + }, + where: { + id: 1, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf< + { + id: number; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + ownerId: number | null; + createdAt: Date; + }[]; + } | undefined + >(); + + expect(usersWithPosts!.posts.length).eq(1); + + expect(usersWithPosts).toEqual({ + id: 1, + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], + }); +}); + +test('Get user with invitee', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[3]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test('Get user + limit with invitee', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew', invitedBy: 1 }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + with: { + invitee: true, + }, + limit: 2, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee and custom fields', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + expect(usersWithInvitee[3]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[3]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee and custom fields + limits', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), + limit: 3, + with: { + invitee: { + extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(3); + expect(usersWithInvitee[0]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee + order by', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[3]?.invitee).toBeNull(); + expect(usersWithInvitee[2]?.invitee).toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + + expect(usersWithInvitee[3]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[0]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + with: { + invitee: true, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee + where + partial', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + columns: { + id: true, + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + columns: { + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test('Get user with invitee + where + partial(true+false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + columns: { + id: true, + name: true, + verified: false, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + verified: false, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitee: { + id: number; + name: string; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitee: { id: 1, name: 'Dan' }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitee: { id: 2, name: 'Andrew' }, + }); +}); + +test('Get user with invitee + where + partial(false)', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + columns: { + verified: false, + }, + with: { + invitee: { + columns: { + name: false, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + invitedBy: number | null; + invitee: { + id: number; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(usersWithInvitee.length).eq(2); + expect(usersWithInvitee[0]?.invitee).not.toBeNull(); + expect(usersWithInvitee[1]?.invitee).not.toBeNull(); + + expect(usersWithInvitee).toContainEqual({ + id: 3, + name: 'Alex', + invitedBy: 1, + invitee: { id: 1, verified: false, invitedBy: null }, + }); + expect(usersWithInvitee).toContainEqual({ + id: 4, + name: 'John', + invitedBy: 2, + invitee: { id: 2, verified: false, invitedBy: null }, + }); +}); + +test('Get user with invitee and posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + invitee: true, + posts: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(4); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + expect(response[3]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test('Get user with invitee and posts + limit posts and users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 3, + with: { + invitee: true, + posts: { + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); +}); + +test('Get user with invitee and posts + limits + custom fields in each', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 3, + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_invitee_name') }), + }, + posts: { + limit: 1, + extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_content') }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + lower: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], + }); +}); + +test('Get user with invitee and posts + custom fields in each', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), + with: { + invitee: { + extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), + }, + posts: { + extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_name') }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + lower: string; + invitedBy: number | null; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + lower: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(4); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).toBeNull(); + expect(response[2]?.invitee).not.toBeNull(); + expect(response[3]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(2); + expect(response[1]?.posts.length).eq(2); + expect(response[2]?.posts.length).eq(2); + expect(response[3]?.posts.length).eq(0); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { + id: 2, + ownerId: 1, + content: 'Post1.1', + lower: 'post1.1', + createdAt: response[0]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { + id: 4, + ownerId: 2, + content: 'Post2.1', + lower: 'post2.1', + createdAt: response[1]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { + id: 6, + ownerId: 3, + content: 'Post3.1', + lower: 'post3.1', + createdAt: response[2]?.posts[1]?.createdAt, + }], + }); + expect(response).toContainEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test('Get user with invitee and posts + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + invitee: true, + posts: { + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(4); + + expect(response[3]?.invitee).toBeNull(); + expect(response[2]?.invitee).toBeNull(); + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(2); + expect(response[3]?.posts.length).eq(2); + + expect(response[3]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[3]?.posts[1]?.createdAt, + }], + }); + expect(response[2]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { + id: 3, + ownerId: 2, + content: 'Post2', + createdAt: response[2]?.posts[1]?.createdAt, + }], + }); + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ + id: 5, + ownerId: 3, + content: 'Post3', + createdAt: response[3]?.posts[1]?.createdAt, + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test('Get user with invitee and posts + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + id: { + OR: [2, 3], + }, + }, + with: { + invitee: true, + posts: { + where: { + ownerId: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(2); + + expect(response[0]?.invitee).toBeNull(); + expect(response[1]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(0); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + invitee: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], + }); + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test('Get user with invitee and posts + limit posts and users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + { ownerId: 3, content: 'Post3.1' }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + id: { + OR: [3, 4], + }, + }, + limit: 1, + with: { + invitee: true, + posts: { + where: { + ownerId: 3, + }, + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(1); + + expect(response[0]?.invitee).not.toBeNull(); + expect(response[0]?.posts.length).eq(1); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], + }); +}); + +test('Get user with invitee and posts + orderBy + where + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + where: { + id: { + OR: [3, 4], + }, + }, + extras: ({ + lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), + }), + with: { + invitee: true, + posts: { + where: { + ownerId: 3, + }, + orderBy: { + id: 'desc', + }, + extras: ({ + lower: (postsTable) => sql`lower(${postsTable.content})`.as('lower_name'), + }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; + invitee: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + expect(response.length).eq(2); + + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: 1, + invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + posts: [{ + id: 5, + ownerId: 3, + content: 'Post3', + lower: 'post3', + createdAt: response[1]?.posts[0]?.createdAt, + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + verified: false, + invitedBy: 2, + invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, + posts: [], + }); +}); + +test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1' }, + { ownerId: 1, content: 'Post1.1' }, + { ownerId: 2, content: 'Post2' }, + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 3, content: 'Post3' }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + where: { + id: { + OR: [3, 4], + }, + }, + extras: ({ + lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), + }), + columns: { + id: true, + name: true, + }, + with: { + invitee: { + columns: { + id: true, + name: true, + }, + extras: ({ + lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), + }), + }, + posts: { + columns: { + id: true, + content: true, + }, + where: { + ownerId: 3, + }, + orderBy: { + id: 'desc', + }, + extras: ({ + lower: (postsTable) => sql`lower(${postsTable.content})`.as('lower_name'), + }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + lower: string; + posts: { id: number; lower: string; content: string }[]; + invitee: { + id: number; + name: string; + lower: string; + } | null; + }[] + >(); + + expect(response.length).eq(2); + + expect(response[1]?.invitee).not.toBeNull(); + expect(response[0]?.invitee).not.toBeNull(); + + expect(response[0]?.posts.length).eq(0); + expect(response[1]?.posts.length).eq(1); + + expect(response[1]).toEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + invitee: { id: 1, name: 'Dan', lower: 'dan' }, + posts: [{ + id: 5, + content: 'Post3', + lower: 'post3', + }], + }); + expect(response[0]).toEqual({ + id: 4, + name: 'John', + lower: 'john', + invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, + posts: [], + }); +}); + +test('Get user with posts and posts with comments', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + }[]; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + expect(response[1]?.posts[0]?.comments.length).eq(1); + expect(response[2]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: response[1]?.posts[0]?.createdAt, + comments: [ + { + id: 2, + content: 'Comment2', + creator: 2, + postId: 2, + createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + // expect(response[2]).toEqual({ + // id: 3, + // name: 'Alex', + // verified: false, + // invitedBy: null, + // posts: [{ + // id: 3, + // ownerId: 3, + // content: 'Post3', + // createdAt: response[2]?.posts[0]?.createdAt, + // comments: [ + // { + // id: , + // content: 'Comment3', + // creator: 3, + // postId: 3, + // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, + // }, + // ], + // }], + // }); +}); + +test('Get user with posts and posts with comments and comments with owner', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: { + with: { + author: true, + }, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + author: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).eq(3); + expect(response[0]?.posts.length).eq(1); + expect(response[1]?.posts.length).eq(1); + expect(response[2]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + expect(response[1]?.posts[0]?.comments.length).eq(1); + expect(response[2]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: response[1]?.posts[0]?.createdAt, + comments: [ + { + id: 2, + content: 'Comment2', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 2, + createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); +}); + +test('Get user with posts and posts with comments and comments with owner where exists', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1' }, + { id: 2, ownerId: 2, content: 'Post2' }, + { id: 3, ownerId: 3, content: 'Post3' }, + ]); + + await db.insert(commentsTable).values([ + { postId: 1, content: 'Comment1', creator: 2 }, + { postId: 2, content: 'Comment2', creator: 2 }, + { postId: 3, content: 'Comment3', creator: 3 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + posts: { + with: { + comments: { + with: { + author: true, + }, + }, + }, + }, + }, + where: { + RAW: ({ id }, { exists, eq }) => + exists(db.select({ one: sql`1` }).from(alias(usersTable, 'alias')).where(eq(sql`1`, id))), + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + comments: { + id: number; + content: string; + createdAt: Date; + creator: number | null; + postId: number | null; + author: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]; + }[]>(); + + expect(response.length).eq(1); + expect(response[0]?.posts.length).eq(1); + + expect(response[0]?.posts[0]?.comments.length).eq(1); + + expect(response[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [{ + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: response[0]?.posts[0]?.createdAt, + comments: [ + { + id: 1, + content: 'Comment1', + creator: 2, + author: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + postId: 1, + createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, + }, + ], + }], + }); +}); + +test('[Find Many] Get users with groups', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + usersToGroups: { + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(2); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test('[Find Many] Get groups with users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + usersToGroups: { + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find Many] Get users with groups + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 2, + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test('[Find Many] Get groups with users + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 2, + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find Many] Get users with groups + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 1, + where: { + id: { + OR: [1, 2], + }, + }, + with: { + usersToGroups: { + where: { + groupId: 1, + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(1); + + expect(response[0]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test('[Find Many] Get groups with users + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 1, + where: { + id: { + gt: 1, + }, + }, + with: { + usersToGroups: { + where: { + userId: 2, + }, + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(1); + + expect(response[0]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find Many] Get users with groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + usersToGroups: { + where: { + groupId: 2, + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(0); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test('[Find Many] Get groups with users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + where: { + id: { + gt: 1, + }, + }, + with: { + usersToGroups: { + where: { + userId: 2, + }, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(0); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [], + }); +}); + +test('[Find Many] Get users with groups + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + usersToGroups: { + orderBy: { + groupId: 'desc', + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(2); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response[2]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); + + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test('[Find Many] Get groups with users + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + usersToGroups: { + orderBy: { + userId: 'desc', + }, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response[2]).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[1]).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find Many] Get users with groups + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + limit: 2, + with: { + usersToGroups: { + limit: 1, + orderBy: { + groupId: 'desc', + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + }[]>(); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + usersToGroups: { + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test('[Find One] Get groups with users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + usersToGroups: { + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test('[Find One] Get groups with users + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + usersToGroups: { + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + usersToGroups: { + where: { + groupId: 1, + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + description: null, + }, + }], + }); +}); + +test('[Find One] Get groups with users + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: { + id: { + gt: 1, + }, + }, + with: { + usersToGroups: { + where: { + userId: 2, + }, + limit: 1, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + usersToGroups: { + where: { + groupId: 2, + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(0); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + usersToGroups: [], + }); +}); + +test('[Find One] Get groups with users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: { + id: { + gt: 1, + }, + }, + with: { + usersToGroups: { + where: { + userId: 2, + }, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + usersToGroups: { + orderBy: { + groupId: 'desc', + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(2); + + expect(response).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + description: null, + }, + }], + }); +}); + +test('[Find One] Get groups with users + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + usersToGroups: { + orderBy: { + userId: 'desc', + }, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('[Find One] Get users with groups + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + usersToGroups: { + limit: 1, + orderBy: { + groupId: 'desc', + }, + columns: {}, + with: { + group: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + }; + }[]; + } | undefined + >(); + + expect(response?.usersToGroups.length).toEqual(1); + + expect(response).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + description: null, + }, + }], + }); +}); + +test('Get groups with users + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: { + id: 'desc', + }, + limit: 2, + with: { + usersToGroups: { + limit: 1, + orderBy: { + userId: 'desc', + }, + columns: {}, + with: { + user: true, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }; + }[]; + }[] + >(); + + expect(response.length).toEqual(2); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + + expect(response[1]).toEqual({ + id: 2, + name: 'Group2', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response[0]).toEqual({ + id: 3, + name: 'Group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('Get users with groups + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + extras: ({ + lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), + }), + with: { + usersToGroups: { + columns: {}, + with: { + group: { + extras: ({ + lower: (groupsTable) => sql`lower(${groupsTable.name})`.as('lower_name'), + }), + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + usersToGroups: { + group: { + id: number; + name: string; + description: string | null; + lower: string; + }; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(1); + expect(response[2]?.usersToGroups.length).toEqual(2); + + expect(response).toContainEqual({ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + usersToGroups: [{ + group: { + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + }, + }, { + group: { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }, + }], + }); +}); + +test('Get groups with users + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + extras: ({ + lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), + }), + with: { + usersToGroups: { + columns: {}, + with: { + user: { + extras: ({ + lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), + }), + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + lower: string; + usersToGroups: { + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + }; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response.length).toEqual(3); + + expect(response[0]?.usersToGroups.length).toEqual(1); + expect(response[1]?.usersToGroups.length).toEqual(2); + expect(response[2]?.usersToGroups.length).toEqual(1); + + expect(response).toContainEqual({ + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + usersToGroups: [{ + user: { + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + usersToGroups: [{ + user: { + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + }, + }, { + user: { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }, + }], + }); + + expect(response).toContainEqual({ + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + usersToGroups: [{ + user: { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }, + }], + }); +}); + +test('Force optional on where on non-optional relation query', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex', invitedBy: 1 }, + { id: 4, name: 'John', invitedBy: 2 }, + ]); + + const usersWithInvitee = await db.query.usersTable.findMany({ + with: { + inviteeRequired: { + where: { + id: 1, + }, + }, + }, + }); + + expectTypeOf(usersWithInvitee).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + inviteeRequired: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[] + >(); + + usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithInvitee.length).eq(4); + expect(usersWithInvitee[0]?.inviteeRequired).toBeNull(); + expect(usersWithInvitee[1]?.inviteeRequired).toBeNull(); + expect(usersWithInvitee[2]?.inviteeRequired).not.toBeNull(); + expect(usersWithInvitee[3]?.inviteeRequired).toBeNull(); + + expect(usersWithInvitee[0]).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + inviteeRequired: null, + }); + expect(usersWithInvitee[1]).toEqual({ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + inviteeRequired: null, + }); + expect(usersWithInvitee[2]).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: 1, + inviteeRequired: { id: 1, name: 'Dan', verified: false, invitedBy: null }, + }); + expect(usersWithInvitee[3]).toEqual({ + id: 4, + name: 'John', + verified: false, + invitedBy: 2, + inviteeRequired: null, + }); +}); + +test('[Find Many .through] Get users with groups', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [ + { + id: 2, + name: 'Group2', + description: null, + }, + { + id: 3, + name: 'Group3', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get groups with users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + users: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }], + }, { + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with groups + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 2, + with: { + groups: { + limit: 1, + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get groups with users + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 2, + with: { + users: { + limit: 1, + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }], + }, { + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with groups + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + limit: 1, + where: { + id: { + OR: [1, 2], + }, + }, + with: { + groups: { + where: { + id: 1, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get groups with users + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + limit: 1, + where: { + id: { gt: 1 }, + }, + with: { + users: { + where: { + id: 2, + }, + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + groups: { + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get groups with users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + where: { + id: { gt: 1 }, + }, + with: { + users: { + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + users: [], + }]); +}); + +test('[Find Many .through] Get users with groups + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + groups: { + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 3, + name: 'Group3', + description: null, + }, { + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get groups with users + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: { + id: 'desc', + }, + with: { + users: { + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 3, + name: 'Group3', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }, { + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }, { + id: 1, + name: 'Group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with groups + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + orderBy: { + id: 'desc', + }, + limit: 2, + with: { + groups: { + limit: 1, + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 3, + name: 'Group3', + description: null, + }], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }]); +}); + +test('[Find One .through] Get users with groups', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + groups: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toStrictEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }); +}); + +test('[Find One .through] Get groups with users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + users: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + } | undefined + >(); + + expect(response).toStrictEqual({ + id: 1, + name: 'Group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }], + }); +}); + +test('[Find One .through] Get users with groups + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + with: { + groups: { + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toStrictEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }); +}); + +test('[Find One .through] Get groups with users + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + with: { + users: { + limit: 1, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 1, + name: 'Group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }], + }); +}); + +test('[Find One .through] Get users with groups + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + groups: { + where: { + id: 1, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }); +}); + +test('[Find One .through] Get groups with users + limit + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: { + id: { gt: 1 }, + }, + with: { + users: { + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + } | undefined + >(); + + expect(response).toStrictEqual({ + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }); +}); + +test('[Find One .through] Get users with groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + where: { + id: { + OR: [1, 2], + }, + }, + with: { + groups: { + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [], + }); +}); + +test('[Find One .through] Get groups with users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + where: { + id: { gt: 1 }, + }, + with: { + users: { + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }); +}); + +test('[Find One .through] Get users with groups + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + groups: { + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toStrictEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 3, + name: 'Group3', + description: null, + }, { + id: 2, + name: 'Group2', + description: null, + }], + }); +}); + +test('[Find One .through] Get groups with users + orderBy', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + users: { + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 3, + name: 'Group3', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }); +}); + +test('[Find One .through] Get users with groups + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findFirst({ + orderBy: { + id: 'desc', + }, + with: { + groups: { + limit: 1, + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + } | undefined + >(); + + expect(response).toEqual({ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 3, + name: 'Group3', + description: null, + }], + }); +}); + +test('[Find Many .through] Get groups with users + orderBy + limit', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + orderBy: { + id: 'desc', + }, + limit: 2, + with: { + users: { + limit: 1, + orderBy: { + id: 'desc', + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[] + >(); + + expect(response).toStrictEqual([{ + id: 3, + name: 'Group3', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }, { + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with groups + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + extras: ({ + lower: ({ name }) => sql`lower(${name})`.as('lower_name'), + }), + with: { + groups: { + orderBy: { + id: 'asc', + }, + extras: ({ + lower: ({ name }) => sql`lower(${name})`.as('lower_name'), + }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + groups: { + id: number; + name: string; + description: string | null; + lower: string; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + }], + }, { + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + groups: [ + { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + }, + { + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get groups with users + custom', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + extras: ({ + lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), + }), + with: { + users: { + extras: ({ + lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), + }), + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf< + { + id: number; + name: string; + description: string | null; + lower: string; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + lower: string; + }[]; + }[] + >(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + lower: 'group1', + description: null, + users: [{ + id: 1, + name: 'Dan', + lower: 'dan', + verified: false, + invitedBy: null, + }], + }, { + id: 2, + name: 'Group2', + lower: 'group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + lower: 'andrew', + verified: false, + invitedBy: null, + }, { + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + lower: 'group3', + description: null, + users: [{ + id: 3, + name: 'Alex', + lower: 'alex', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with first group', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 3, name: 'Group3' }, + { id: 2, name: 'Group2' }, + { id: 1, name: 'Group1' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 3, groupId: 2 }, + { userId: 2, groupId: 3 }, + { userId: 2, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + group: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + group: { + id: number; + name: string; + description: string | null; + } | null; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + group: null, + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + group: { + id: 3, + name: 'Group3', + description: null, + }, + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + group: { + id: 2, + name: 'Group2', + description: null, + }, + }]); +}); + +test('[Find Many .through] Get groups with first user', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + user: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + user: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + } | null; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + user: null, + }, { + id: 2, + name: 'Group2', + description: null, + user: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + }, { + id: 3, + name: 'Group3', + description: null, + user: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + }]); +}); + +test('[Find Many .through] Get users with filtered groups', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groupsFiltered: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groupsFiltered: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groupsFiltered: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groupsFiltered: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groupsFiltered: [ + { + id: 2, + name: 'Group2', + description: null, + }, + { + id: 3, + name: 'Group3', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get groups with filtered users', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + usersFiltered: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersFiltered: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + usersFiltered: [], + }, { + id: 2, + name: 'Group2', + description: null, + usersFiltered: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + usersFiltered: [{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }], + }]); +}); + +test('[Find Many .through] Get users with filtered groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groupsFiltered: { + where: { + id: { + lt: 3, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groupsFiltered: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groupsFiltered: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groupsFiltered: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groupsFiltered: [ + { + id: 2, + name: 'Group2', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get groups with filtered users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + usersFiltered: { + where: { id: { lt: 3 } }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersFiltered: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + usersFiltered: [], + }, { + id: 2, + name: 'Group2', + description: null, + usersFiltered: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + usersFiltered: [], + }]); +}); + +test('[Find Many] Get users with filtered posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1.1' }, + { id: 2, ownerId: 2, content: 'Post2.1' }, + { id: 3, ownerId: 3, content: 'Post3.1' }, + { id: 4, ownerId: 1, content: 'Post1.2' }, + { id: 5, ownerId: 2, content: 'Post2.2' }, + { id: 6, ownerId: 3, content: 'Post3.2' }, + { id: 7, ownerId: 1, content: 'Post1.3' }, + { id: 8, ownerId: 2, content: 'Post2.3' }, + { id: 9, ownerId: 3, content: 'Post3.3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + postsFiltered: { + columns: { + ownerId: true, + content: true, + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + postsFiltered: { + ownerId: number | null; + content: string; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + postsFiltered: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + postsFiltered: [ + { ownerId: 2, content: 'Post2.1' }, + { ownerId: 2, content: 'Post2.2' }, + { ownerId: 2, content: 'Post2.3' }, + ], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + postsFiltered: [], + }]); +}); + +test('[Find Many] Get posts with filtered authors', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1.1' }, + { id: 2, ownerId: 2, content: 'Post2.1' }, + { id: 3, ownerId: 3, content: 'Post3.1' }, + { id: 4, ownerId: 1, content: 'Post1.2' }, + { id: 5, ownerId: 2, content: 'Post2.2' }, + { id: 6, ownerId: 3, content: 'Post3.2' }, + ]); + + const posts = await db.query.postsTable.findMany({ + columns: { + id: true, + content: true, + }, + with: { + authorFiltered: { + columns: { + name: true, + id: true, + }, + }, + }, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + authorFiltered: { + id: number; + name: string; + }; + }[]>(); + + posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(posts).toStrictEqual([ + { id: 1, content: 'Post1.1', authorFiltered: null }, + { + id: 2, + content: 'Post2.1', + authorFiltered: { + id: 2, + name: 'Andrew', + }, + }, + { id: 3, content: 'Post3.1', authorFiltered: null }, + { id: 4, content: 'Post1.2', authorFiltered: null }, + { + id: 5, + content: 'Post2.2', + authorFiltered: { + id: 2, + name: 'Andrew', + }, + }, + { id: 6, content: 'Post3.2', authorFiltered: null }, + ]); +}); + +test('[Find Many] Get users with filtered posts + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1.1' }, + { id: 2, ownerId: 2, content: 'Post2.1' }, + { id: 3, ownerId: 3, content: 'Post3.1' }, + { id: 4, ownerId: 1, content: 'Post1.2' }, + { id: 5, ownerId: 2, content: 'Post2.2' }, + { id: 6, ownerId: 3, content: 'Post3.2' }, + { id: 7, ownerId: 1, content: 'Post1.3' }, + { id: 8, ownerId: 2, content: 'Post2.3' }, + { id: 9, ownerId: 3, content: 'Post3.3' }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + postsFiltered: { + columns: { + ownerId: true, + content: true, + }, + where: { + content: { + like: '%.2', + }, + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + postsFiltered: { + ownerId: number | null; + content: string; + }[]; + }[]>(); + + usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(usersWithPosts).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + postsFiltered: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + postsFiltered: [ + { ownerId: 2, content: 'Post2.2' }, + ], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + postsFiltered: [], + }]); +}); + +test('[Find Many] Get posts with filtered authors + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1.1' }, + { id: 2, ownerId: 2, content: 'Post2.1' }, + { id: 3, ownerId: 3, content: 'Post3.1' }, + { id: 4, ownerId: 1, content: 'Post1.2' }, + { id: 5, ownerId: 2, content: 'Post2.2' }, + { id: 6, ownerId: 3, content: 'Post3.2' }, + ]); + + const posts = await db.query.postsTable.findMany({ + columns: { + id: true, + content: true, + }, + with: { + authorAltFiltered: { + columns: { + name: true, + id: true, + }, + where: { + id: 2, + }, + }, + }, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + authorAltFiltered: { + id: number; + name: string; + } | null; + }[]>(); + + posts.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(posts).toStrictEqual([ + { id: 1, content: 'Post1.1', authorAltFiltered: null }, + { + id: 2, + content: 'Post2.1', + authorAltFiltered: { + id: 2, + name: 'Andrew', + }, + }, + { id: 3, content: 'Post3.1', authorAltFiltered: null }, + { id: 4, content: 'Post1.2', authorAltFiltered: null }, + { id: 5, content: 'Post2.2', authorAltFiltered: null }, + { id: 6, content: 'Post3.2', authorAltFiltered: null }, + ]); +}); + +test('[Find Many] Get custom schema users with filtered posts + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaPosts).values([ + { id: 1, ownerId: 1, content: 'Message1.1' }, + { id: 2, ownerId: 2, content: 'Message2.1' }, + { id: 3, ownerId: 3, content: 'Message3.1' }, + { id: 4, ownerId: 1, content: 'Message1.2' }, + { id: 5, ownerId: 2, content: 'Message2.2' }, + { id: 6, ownerId: 3, content: 'Message3.2' }, + { id: 7, ownerId: 1, content: 'Message1.3' }, + { id: 8, ownerId: 2, content: 'Message2.3' }, + { id: 9, ownerId: 3, content: 'Message3.3' }, + { id: 10, ownerId: 1, content: 'Post1.1' }, + { id: 11, ownerId: 2, content: 'Post2.1' }, + { id: 12, ownerId: 3, content: 'Post3.1' }, + { id: 13, ownerId: 1, content: 'Post1.2' }, + { id: 14, ownerId: 2, content: 'Post2.2' }, + { id: 15, ownerId: 3, content: 'Post3.2' }, + { id: 16, ownerId: 1, content: 'Post1.3' }, + { id: 17, ownerId: 2, content: 'Post2.3' }, + { id: 18, ownerId: 3, content: 'Post3.3' }, + ]); + + const usersWithPosts = await db.query.schemaUsers.findMany({ + with: { + posts: { + columns: { + ownerId: true, + content: true, + }, + where: { + content: { + like: '%2.%', + }, + }, + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'desc', + }, + where: { + id: { + gte: 2, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + ownerId: number | null; + content: string; + }[]; + }[]>(); + + expect(usersWithPosts).toStrictEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [ + { ownerId: 2, content: 'Message2.1' }, + { ownerId: 2, content: 'Message2.2' }, + { ownerId: 2, content: 'Message2.3' }, + ], + }]); +}); + +test('[Find Many] Get custom schema posts with filtered authors + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaPosts).values([ + { id: 1, ownerId: 1, content: 'Message1.1' }, + { id: 2, ownerId: 2, content: 'Message2.1' }, + { id: 3, ownerId: 3, content: 'Message3.1' }, + { id: 4, ownerId: 1, content: 'Message1.2' }, + { id: 5, ownerId: 2, content: 'Message2.2' }, + { id: 6, ownerId: 3, content: 'Message3.2' }, + { id: 7, ownerId: 1, content: 'Message1.3' }, + { id: 8, ownerId: 2, content: 'Message2.3' }, + { id: 9, ownerId: 3, content: 'Message3.3' }, + { id: 10, ownerId: 1, content: 'Post1.1' }, + { id: 11, ownerId: 2, content: 'Post2.1' }, + { id: 12, ownerId: 3, content: 'Post3.1' }, + { id: 13, ownerId: 1, content: 'Post1.2' }, + { id: 14, ownerId: 2, content: 'Post2.2' }, + { id: 15, ownerId: 3, content: 'Post3.2' }, + { id: 16, ownerId: 1, content: 'Post1.3' }, + { id: 17, ownerId: 2, content: 'Post2.3' }, + { id: 18, ownerId: 3, content: 'Post3.3' }, + ]); + + const posts = await db.query.schemaPosts.findMany({ + columns: { + content: true, + }, + with: { + author: { + columns: { + name: true, + id: true, + }, + where: { + id: 2, + }, + }, + }, + orderBy: { + id: 'desc', + }, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + content: string; + author: { + id: number; + name: string; + } | null; + }[]>(); + + expect(posts).toStrictEqual([ + { content: 'Post3.3', author: null }, + { content: 'Post2.3', author: null }, + { content: 'Post1.3', author: null }, + { content: 'Post3.2', author: null }, + { content: 'Post2.2', author: null }, + { content: 'Post1.2', author: null }, + { content: 'Post3.1', author: null }, + { content: 'Post2.1', author: null }, + { content: 'Post1.1', author: null }, + { content: 'Message3.3', author: null }, + { content: 'Message2.3', author: { id: 2, name: 'Andrew' } }, + { content: 'Message1.3', author: null }, + { content: 'Message3.2', author: null }, + { content: 'Message2.2', author: { id: 2, name: 'Andrew' } }, + { content: 'Message1.2', author: null }, + { content: 'Message3.1', author: null }, + { content: 'Message2.1', author: { id: 2, name: 'Andrew' } }, + { content: 'Message1.1', author: null }, + ]); +}); + +test('[Find Many .through] Get custom schema users with filtered groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaGroups).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(schemaUsersToGroups).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.schemaUsers.findMany({ + with: { + groups: { + where: { + id: { + lt: 3, + }, + }, + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [ + { + id: 2, + name: 'Group2', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get custom schema groups with filtered users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaGroups).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(schemaUsersToGroups).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.schemaGroups.findMany({ + with: { + users: { + where: { id: { lt: 3 } }, + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + users: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + users: [], + }, { + id: 2, + name: 'Group2', + description: null, + users: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + users: [], + }]); +}); + +test('[Find Many] Get view users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersView.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + id: { + lt: 3, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + createdAt: Date | null; + postContent: string | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post1', + createdAt: date1, + posts: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }]); +}); + +test('[Find Many] Get view users with posts + filter by SQL.Aliased field', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersView.findMany({ + columns: { + id: true, + name: true, + verified: true, + invitedBy: true, + counter: true, + }, + with: { + posts: true, + }, + orderBy: { + id: 'desc', + }, + where: { + counter: { + ne: '0', + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + counter: 3, + posts: [], + }]); +}); + +test('[Find Many] Get view users with posts + filter by joined field', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersView.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + postContent: 'Post2', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }]); +}); + +test('[Find Many] Get posts with view users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.postsTable.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many] Get posts with view users + filter with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.postsTable.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + id: { + ne: 2, + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many] Get posts with view users + filter by joined column with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.postsTable.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + postContent: { + notLike: '%2', + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); +test('[Find Many] Get posts with view users + filter by SQL.Aliased with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.postsTable.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + counter: { + ne: '0', + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many .through] Get view users with filtered groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.usersView.findMany({ + with: { + groups: { + where: { + id: { + lt: 3, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: 3, + groups: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: 3, + groups: [ + { + id: 2, + name: 'Group2', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get groups with filtered view users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.groupsTable.findMany({ + with: { + usersView: { + columns: { + createdAt: false, + postContent: false, + }, + where: { id: { lt: 3 } }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersView: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.usersView.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + usersView: [], + }, { + id: 2, + name: 'Group2', + description: null, + usersView: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + usersView: [], + }]); +}); + +test('[Find Many] Get schema view users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.schemaUsersView.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + id: { + lt: 3, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + createdAt: Date | null; + postContent: string | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post1', + createdAt: date1, + posts: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }]); +}); + +test('[Find Many] Get schema view users with posts + filter by SQL.Aliased field', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.schemaUsersView.findMany({ + columns: { + id: true, + name: true, + verified: true, + invitedBy: true, + counter: true, + }, + with: { + posts: true, + }, + orderBy: { + id: 'desc', + }, + where: { + counter: { + ne: '0', + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + counter: 3, + posts: [], + }]); +}); + +test('[Find Many] Get schema view users with posts + filter by joined field', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.schemaUsersView.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + postContent: 'Post2', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }]); +}); + +test('[Find Many] Get schema posts with view users with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.schemaPosts.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + postContent: 'Post2', + createdAt: date2, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many] Get schema posts with view users + filter with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.schemaPosts.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + id: { + ne: 2, + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many] Get schema posts with view users + filter by joined column with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.schemaPosts.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + postContent: { + notLike: '%2', + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); +test('[Find Many] Get schema posts with view users + filter by SQL.Aliased with posts', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(50000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(schemaPosts).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const result = await db.query.schemaPosts.findMany({ + with: { + viewAuthor: { + with: { + posts: true, + }, + where: { + counter: { + ne: '0', + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + viewAuthor: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + } | null; + }[]>(); + + expect(result).toEqual([ + { + id: 1, + ownerId: 1, + content: 'Post1', + createdAt: date1, + viewAuthor: null, + }, + { + id: 2, + ownerId: 2, + content: 'Post2', + createdAt: date2, + viewAuthor: null, + }, + { + id: 3, + ownerId: 3, + content: 'Post3', + createdAt: date3, + viewAuthor: { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + counter: 3, + postContent: 'Post3', + createdAt: date3, + posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], + }, + }, + ]); +}); + +test('[Find Many .through] Get schema view users with filtered groups + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaGroups).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(schemaUsersToGroups).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.schemaUsersView.findMany({ + with: { + groups: { + where: { + id: { + lt: 3, + }, + }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + createdAt: Date | null; + postContent: string | null; + counter: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: 3, + groups: [], + }, { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + createdAt: null, + postContent: null, + counter: 3, + groups: [ + { + id: 2, + name: 'Group2', + description: null, + }, + ], + }]); +}); + +test('[Find Many .through] Get schema groups with filtered view users + where', async (t) => { + const { mysqlDbV2: db } = t; + + await db.insert(schemaUsers).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(schemaGroups).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(schemaUsersToGroups).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 2 }, + ]); + + const response = await db.query.schemaGroups.findMany({ + with: { + usersView: { + columns: { + createdAt: false, + postContent: false, + }, + where: { id: { lt: 3 } }, + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + description: string | null; + usersView: { + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + counter: number | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.usersView.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Group1', + description: null, + usersView: [], + }, { + id: 2, + name: 'Group2', + description: null, + usersView: [{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + counter: null, + }], + }, { + id: 3, + name: 'Group3', + description: null, + usersView: [], + }]); +}); + +test('[Find Many] Get users + filter users by posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(5000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + orderBy: { + id: 'asc', + }, + where: { + posts: { + content: { + like: '%2', + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many] Get users with posts + filter users by posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(5000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1', createdAt: date1 }, + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + posts: { + content: { + like: '%2', + }, + }, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], + }]); +}); + +test('[Find Many] Get users filtered by existing posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + orderBy: { + id: 'asc', + }, + where: { + posts: true, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many] Get users with posts + filter users by existing posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + posts: true, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ id: 1, ownerId: 2, content: 'Post2', createdAt: date2 }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [{ id: 2, ownerId: 3, content: 'Post3', createdAt: date3 }], + }]); +}); + +test('[Find Many] Get users filtered by nonexisting posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + orderBy: { + id: 'asc', + }, + where: { + posts: false, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many] Get users with posts + filter users by existing posts', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 2, content: 'Post2', createdAt: date2 }, + { ownerId: 3, content: 'Post3', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: true, + }, + orderBy: { + id: 'asc', + }, + where: { + posts: false, + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [], + }]); +}); + +test('[Find Many] Get users with posts + filter posts by author', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(5000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + where: { + author: { + id: 2, + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([ + { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + posts: [], + }, + { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 3, + ownerId: 2, + content: 'Post2U.1', + createdAt: date2, + }, { + id: 4, + ownerId: 2, + content: 'Post2U.2', + createdAt: date2, + }], + }, + { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + posts: [], + }, + ]); +}); + +test('[Find Many] Get users filtered by own columns and posts with filtered posts by own columns and author', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(5000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const usersWithPosts = await db.query.usersTable.findMany({ + with: { + posts: { + where: { + content: { + like: '%2', + }, + author: { + id: 2, + }, + }, + }, + }, + where: { + id: { + gt: 1, + }, + posts: { + content: { + like: 'M%', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(usersWithPosts).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + posts: { + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]; + }[]>(); + + expect(usersWithPosts).toEqual([ + { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + posts: [{ + id: 6, + ownerId: 2, + content: 'Post2U.2', + createdAt: date2, + }, { + id: 8, + ownerId: 2, + content: 'MessageU.2', + createdAt: date2, + }], + }, + ]); +}); + +test('[Find Many .through] Get users filtered by groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + groups: { + name: 'Group2', + }, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many .through] Get users filtered by existing groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + groups: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many .through] Get users with existing groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: true, + }, + where: { + groups: true, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }, { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }, { + id: 3, + name: 'Group3', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get users filtered by nonexisting groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + where: { + groups: false, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }]); +}); + +test('[Find Many .through] Get users with nonexisting groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: true, + }, + where: { + groups: false, + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + response.sort((a, b) => (a.id > b.id) ? 1 : -1); + for (const e of response) { + e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); + } + + expect(response).toStrictEqual([{ + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [], + }]); +}); + +test('[Find Many .through] Get users filtered by groups with groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: { + orderBy: { + id: 'asc', + }, + }, + }, + where: { + groups: { + name: 'Group2', + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + groups: [{ + id: 2, + name: 'Group2', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get users filtered by groups with groups filtered by users', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: { + orderBy: { + id: 'asc', + }, + where: { + users: { + id: 1, + }, + }, + }, + }, + where: { + groups: { + name: 'Group3', + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([{ + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }]); +}); + +test('[Find Many .through] Get users filtered by users of groups with groups', async (ctx) => { + const { mysqlDbV2: db } = ctx; + + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + await db.insert(groupsTable).values([ + { id: 1, name: 'Group1' }, + { id: 2, name: 'Group2' }, + { id: 3, name: 'Group3' }, + ]); + + await db.insert(usersToGroupsTable).values([ + { userId: 1, groupId: 1 }, + { userId: 2, groupId: 2 }, + { userId: 3, groupId: 3 }, + { userId: 3, groupId: 1 }, + ]); + + const response = await db.query.usersTable.findMany({ + with: { + groups: { + orderBy: { + id: 'asc', + }, + }, + }, + where: { + groups: { + users: { + id: 3, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(response).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + groups: { + id: number; + name: string; + description: string | null; + }[]; + }[]>(); + + expect(response).toStrictEqual([ + { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }], + }, + { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + groups: [{ + id: 1, + name: 'Group1', + description: null, + }, { + id: 3, + name: 'Group3', + description: null, + }], + }, + ]); +}); + +test('[Find Many] Shortcut form placeholders in filters - eq', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const query = await db.query.postsTable.findMany({ + where: { + ownerId: sql.placeholder('id'), + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const posts = await query.execute({ + id: 1, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]>(); + + expect(posts).toEqual([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + ]); +}); + +test('[Find Many] Shortcut form placeholders in filters - or', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const query = await db.query.postsTable.findMany({ + where: { + OR: [{ + ownerId: sql.placeholder('id1'), + }, { + ownerId: sql.placeholder('id2'), + }], + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const posts = await query.execute({ + id1: 1, + id2: 2, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]>(); + + expect(posts).toEqual([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + ]); +}); + +test('[Find Many] Shortcut form placeholders in filters - column or', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const query = await db.query.postsTable.findMany({ + where: { + ownerId: { + OR: [sql.placeholder('id1'), sql.placeholder('id2')], + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const posts = await query.execute({ + id1: 1, + id2: 2, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]>(); + + expect(posts).toEqual([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + ]); +}); + +test('[Find Many] Shortcut form placeholders in filters - column not', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const query = await db.query.postsTable.findMany({ + where: { + ownerId: { + NOT: sql.placeholder('id'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const posts = await query.execute({ + id: 3, + }); + + expectTypeOf(posts).toEqualTypeOf<{ + id: number; + content: string; + ownerId: number | null; + createdAt: Date; + }[]>(); + + expect(posts).toEqual([ + { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + ]); +}); + +test('[Find Many] Get users filtered by posts with AND', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, + { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const users = await db.query.usersTable.findMany({ + where: { + AND: [{ + posts: { + content: { + like: 'M%', + }, + }, + }, { + posts: { + ownerId: { + ne: 2, + }, + }, + }], + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(users).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(users).toEqual([ + { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + ]); +}); + +test('[Find Many] Get users filtered by posts with OR', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, + { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const users = await db.query.usersTable.findMany({ + where: { + OR: [{ + posts: { + content: { + like: 'M%', + }, + }, + }, { + posts: { + ownerId: { + eq: 3, + }, + }, + }], + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(users).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(users).toEqual([ + { + id: 1, + name: 'Dan', + verified: false, + invitedBy: null, + }, + { + id: 2, + name: 'Andrew', + verified: false, + invitedBy: null, + }, + { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + ]); +}); + +test('[Find Many] Get users filtered by posts with NOT', async () => { + await db.insert(usersTable).values([ + { id: 1, name: 'Dan' }, + { id: 2, name: 'Andrew' }, + { id: 3, name: 'Alex' }, + ]); + + const date1 = new Date(45000); + const date2 = new Date(1000); + const date3 = new Date(10000); + + await db.insert(postsTable).values([ + { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, + { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, + { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, + { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, + { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, + { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, + { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, + ]); + + const users = await db.query.usersTable.findMany({ + where: { + NOT: { + posts: { + content: { + like: 'M%', + }, + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expectTypeOf(users).toEqualTypeOf<{ + id: number; + name: string; + verified: boolean; + invitedBy: number | null; + }[]>(); + + expect(users).toEqual([ + { + id: 3, + name: 'Alex', + verified: false, + invitedBy: null, + }, + ]); +}); + +test('[Find Many .through] Through with uneven relation column count', async () => { + await db.insert(students).values([{ + studentId: 1, + name: 'First', + }, { + studentId: 2, + name: 'Second', + }, { + studentId: 3, + name: 'Third', + }, { + studentId: 4, + name: 'Fourth', + }]); + + await db.insert(studentGrades).values([ + { + studentId: 1, + courseId: 1, + semester: 's1', + grade: '44', + }, + { + studentId: 1, + courseId: 2, + semester: 's2', + grade: '35', + }, + { + studentId: 2, + courseId: 1, + semester: 's1', + grade: '58', + }, + { + studentId: 2, + courseId: 3, + semester: 's2', + grade: '72', + }, + { + studentId: 3, + courseId: 4, + semester: 's4', + grade: '99', + }, + { + studentId: 3, + courseId: 2, + semester: 's3', + grade: '85', + }, + { + studentId: 3, + courseId: 1, + semester: 's2', + grade: '48', + }, + { + studentId: 4, + courseId: 3, + semester: 's1', + grade: '63', + }, + { + studentId: 4, + courseId: 4, + semester: 's3', + grade: '51', + }, + ]); + + await db.insert(courseOfferings).values([{ + courseId: 1, + semester: 's3', + }, { + courseId: 2, + semester: 's4', + }, { + courseId: 4, + semester: 's1', + }, { + courseId: 4, + semester: 's3', + }, { + courseId: 1, + semester: 's1', + }, { + courseId: 1, + semester: 's2', + }, { + courseId: 2, + semester: 's1', + }, { + courseId: 2, + semester: 's2', + }, { + courseId: 2, + semester: 's3', + }, { + courseId: 3, + semester: 's3', + }, { + courseId: 3, + semester: 's4', + }, { + courseId: 4, + semester: 's4', + }, { + courseId: 3, + semester: 's1', + }]); + + const res = await db.query.students.findMany({ + with: { + courseOfferings: { + orderBy: { + courseId: 'asc', + semester: 'asc', + }, + }, + }, + orderBy: { + studentId: 'asc', + }, + }); + + expectTypeOf(res).toEqualTypeOf<{ + studentId: number; + name: string; + courseOfferings: { + courseId: number; + semester: string; + }[]; + }[]>(); + + expect(res).toStrictEqual([ + { + name: 'First', + studentId: 1, + courseOfferings: [ + { + courseId: 1, + semester: 's1', + }, + { + courseId: 2, + semester: 's2', + }, + ], + }, + { + name: 'Second', + studentId: 2, + courseOfferings: [ + { + courseId: 1, + semester: 's1', + }, + ], + }, + { + name: 'Third', + studentId: 3, + courseOfferings: [ + { + courseId: 1, + semester: 's2', + }, + { + courseId: 2, + semester: 's3', + }, + { + courseId: 4, + semester: 's4', + }, + ], + }, + { + name: 'Fourth', + studentId: 4, + courseOfferings: [ + { + courseId: 3, + semester: 's1', + }, + { + courseId: 4, + semester: 's3', + }, + ], + }, + ]); +}); + +test('[Find Many .through] Through with uneven relation column count - reverse', async () => { + await db.insert(students).values([{ + studentId: 1, + name: 'First', + }, { + studentId: 2, + name: 'Second', + }, { + studentId: 3, + name: 'Third', + }, { + studentId: 4, + name: 'Fourth', + }]); + + await db.insert(studentGrades).values([ + { + studentId: 1, + courseId: 1, + semester: 's1', + grade: '44', + }, + { + studentId: 1, + courseId: 2, + semester: 's2', + grade: '35', + }, + { + studentId: 2, + courseId: 1, + semester: 's1', + grade: '58', + }, + { + studentId: 2, + courseId: 3, + semester: 's2', + grade: '72', + }, + { + studentId: 3, + courseId: 4, + semester: 's4', + grade: '99', + }, + { + studentId: 3, + courseId: 2, + semester: 's3', + grade: '85', + }, + { + studentId: 3, + courseId: 1, + semester: 's2', + grade: '48', + }, + { + studentId: 4, + courseId: 3, + semester: 's1', + grade: '63', + }, + { + studentId: 4, + courseId: 4, + semester: 's3', + grade: '51', + }, + ]); + + await db.insert(courseOfferings).values([{ + courseId: 1, + semester: 's3', + }, { + courseId: 2, + semester: 's4', + }, { + courseId: 4, + semester: 's1', + }, { + courseId: 4, + semester: 's3', + }, { + courseId: 1, + semester: 's1', + }, { + courseId: 1, + semester: 's2', + }, { + courseId: 2, + semester: 's1', + }, { + courseId: 2, + semester: 's2', + }, { + courseId: 2, + semester: 's3', + }, { + courseId: 3, + semester: 's3', + }, { + courseId: 3, + semester: 's4', + }, { + courseId: 4, + semester: 's4', + }, { + courseId: 3, + semester: 's1', + }]); + + const res = await db.query.courseOfferings.findMany({ + with: { + students: { + orderBy: { + studentId: 'asc', + }, + }, + }, + orderBy: { + courseId: 'asc', + semester: 'asc', + }, + }); + + expectTypeOf(res).toEqualTypeOf<{ + courseId: number; + semester: string; + students: { + studentId: number; + name: string; + }[]; + }[]>(); + + expect(res).toStrictEqual([ + { + courseId: 1, + semester: 's1', + students: [ + { + name: 'First', + studentId: 1, + }, + { + name: 'Second', + studentId: 2, + }, + ], + }, + { + courseId: 1, + semester: 's2', + students: [ + { + name: 'Third', + studentId: 3, + }, + ], + }, + { + courseId: 1, + semester: 's3', + students: [], + }, + { + courseId: 2, + semester: 's1', + students: [], + }, + { + courseId: 2, + semester: 's2', + students: [ + { + name: 'First', + studentId: 1, + }, + ], + }, + { + courseId: 2, + semester: 's3', + students: [ + { + name: 'Third', + studentId: 3, + }, + ], + }, + { + courseId: 2, + semester: 's4', + students: [], + }, + { + courseId: 3, + semester: 's1', + students: [ + { + name: 'Fourth', + studentId: 4, + }, + ], + }, + { + courseId: 3, + semester: 's3', + students: [], + }, + { + courseId: 3, + semester: 's4', + students: [], + }, + { + courseId: 4, + semester: 's1', + students: [], + }, + { + courseId: 4, + semester: 's3', + students: [ + { + name: 'Fourth', + studentId: 4, + }, + ], + }, + { + courseId: 4, + semester: 's4', + students: [ + { + name: 'Third', + studentId: 3, + }, + ], + }, + ]); +}); + +test('alltypes', async () => { + await db.execute(sql` + CREATE TABLE \`all_types\` ( + \`serial\` serial AUTO_INCREMENT, + \`bigint53\` bigint, + \`bigint64\` bigint, + \`bigint_string\` bigint, + \`binary\` binary, + \`boolean\` boolean, + \`char\` char, + \`date\` date, + \`date_str\` date, + \`datetime\` datetime, + \`datetime_str\` datetime, + \`decimal\` decimal, + \`decimal_num\` decimal(30), + \`decimal_big\` decimal(30), + \`double\` double, + \`float\` float, + \`int\` int, + \`json\` json, + \`med_int\` mediumint, + \`small_int\` smallint, + \`real\` real, + \`text\` text, + \`time\` time, + \`timestamp\` timestamp, + \`timestamp_str\` timestamp, + \`tiny_int\` tinyint, + \`varbin\` varbinary(16), + \`varchar\` varchar(255), + \`year\` year, + \`enum\` enum('enV1','enV2') + ); + `); + + await db.insert(usersTable).values({ + id: 1, + name: 'First', + }); + + await db.insert(allTypesTable).values({ + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date(1741743161623), + dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + datetime: new Date(1741743161623), + datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + enum: 'enV1', + float: 1.048596, + real: 1.048596, + text: 'C4-', + int: 621, + json: { + str: 'strval', + arr: ['str', 10], + }, + medInt: 560, + smallInt: 14, + time: '04:13:22', + timestamp: new Date(1741743161623), + timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + }); + + const rawRes = await db.select().from(allTypesTable); + const relationRootRes = await db.query.allTypesTable.findMany(); + const { alltypes: nestedRelationRes } = (await db.query.usersTable.findFirst({ + with: { + alltypes: true, + }, + }))!; + + expectTypeOf(relationRootRes).toEqualTypeOf(rawRes); + expectTypeOf(nestedRelationRes).toEqualTypeOf(rawRes); + + expect(nestedRelationRes).toStrictEqual(rawRes); + expect(relationRootRes).toStrictEqual(rawRes); + + const expectedRes = [ + { + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date('2025-03-12T00:00:00.000Z'), + dateStr: '2025-03-12', + datetime: new Date('2025-03-12T01:32:42.000Z'), + datetimeStr: '2025-03-12 01:32:41', + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + float: 1.0486, + int: 621, + json: { arr: ['str', 10], str: 'strval' }, + medInt: 560, + smallInt: 14, + real: 1.048596, + text: 'C4-', + time: '04:13:22', + timestamp: new Date('2025-03-12T01:32:42.000Z'), + timestampStr: '2025-03-12 01:32:41', + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + enum: 'enV1', + }, + ]; + + expect(rawRes).toStrictEqual(expectedRes); +}); + +test('custom types', async () => { + await db.execute(sql` + CREATE TABLE \`custom_types\` ( + \`id\` int, + \`big\` bigint, + \`bytes\` blob, + \`time\` timestamp, + \`int\` int + ); + `); + + await db.insert(customTypesTable).values({ + id: 1, + big: 5044565289845416380n, + bytes: Buffer.from('BYTES'), + time: new Date(1741743161000), + int: 250, + }); + + const rawRes = await db.select().from(customTypesTable); + const relationRootRes = await db.query.customTypesTable.findMany(); + const { self: nestedRelationRes } = (await db.query.customTypesTable.findFirst({ + with: { + self: true, + }, + }))!; + + type ExpectedType = { + id: number | null; + big: bigint | null; + bytes: Buffer | null; + time: Date | null; + int: number | null; + }[]; + + expectTypeOf().toEqualTypeOf(rawRes); + expectTypeOf(relationRootRes).toEqualTypeOf(rawRes); + expectTypeOf(nestedRelationRes).toEqualTypeOf(rawRes); + + expect(nestedRelationRes).toStrictEqual(rawRes); + expect(relationRootRes).toStrictEqual(rawRes); + + const expectedRes: ExpectedType = [ + { + id: 1, + big: 5044565289845416380n, + bytes: Buffer.from('BYTES'), + time: new Date(1741743161000), + int: 250, + }, + ]; + + expect(rawRes).toStrictEqual(expectedRes); +}); + +test('.toSQL()', () => { + const query = db.query.usersTable.findFirst().toSQL(); + + expect(query).toHaveProperty('sql', expect.any(String)); + expect(query).toHaveProperty('params', expect.any(Array)); +}); diff --git a/integration-tests/tests/mysql/planetscale.test.ts b/integration-tests/tests/mysql/planetscale.test.ts new file mode 100644 index 0000000000..8223e7acb7 --- /dev/null +++ b/integration-tests/tests/mysql/planetscale.test.ts @@ -0,0 +1,183 @@ +import { sql } from 'drizzle-orm'; +import { getTableConfig } from 'drizzle-orm/mysql-core'; +import { migrate } from 'drizzle-orm/planetscale-serverless/migrator'; +import { describe, expect } from 'vitest'; +import { planetscaleTest as test } from './instrumentation'; +import { tests } from './mysql-common'; +import { runTests as cacheTests } from './mysql-common-cache'; +import { usersMigratorTable } from './schema2'; + +const omit = new Set([ + 'mySchema :: view', + 'mySchema :: select from tables with same name from different schema using alias', + 'mySchema :: prepared statement with placeholder in .where', + 'mySchema :: insert with spaces', + 'mySchema :: select with group by as column + sql', + 'mySchema :: select with group by as field', + 'mySchema :: insert many', + 'mySchema :: insert with overridden default values', + 'mySchema :: insert + select', + 'mySchema :: delete with returning all fields', + 'mySchema :: update with returning partial', + 'mySchema :: delete returning sql', + 'mySchema :: insert returning sql', + 'mySchema :: select typed sql', + 'mySchema :: select sql', + 'mySchema :: select all fields', + 'mySchema :: select distinct', + 'mySchema :: build query', + 'test $onUpdateFn and $onUpdate works updating', + 'test $onUpdateFn and $onUpdate works as $default', + 'set operations (mixed all) as function with subquery', + 'set operations (mixed) from query builder', + 'set operations (except all) as function', + 'set operations (except all) from query builder', + 'set operations (except) as function', + 'set operations (except) from query builder', + 'set operations (intersect all) as function', + 'set operations (intersect all) from query builder', + 'set operations (intersect) as function', + 'set operations (intersect) from query builder', + 'select iterator w/ prepared statement', + 'select iterator', + 'subquery with view', + 'join on aliased sql from with clause', + 'with ... delete', + 'with ... update', + 'with ... select', + + // to redefine in this file + 'utc config for datetime', + 'transaction', + 'transaction with options (set isolationLevel)', + 'having', + 'select count()', + 'insert via db.execute w/ query builder', + 'insert via db.execute + select via db.execute', + 'insert many with returning', + 'delete with returning partial', + 'delete with returning all fields', + 'update with returning partial', + 'update with returning all fields', + 'update returning sql', + 'delete returning sql', + 'insert returning sql', +]); + +tests(test, omit); +cacheTests('planetscale', test); + +describe('migrator', () => { + test('migrator', async ({ db }) => { + await db.execute(sql`drop table if exists ${sql.identifier('__drizzle_migrations')}`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + }); + + test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: 'string' }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!(Number(res.rows[0]?.tableExists ?? 0))).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: 'string' }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!(Number(res.rows[0]?.tableExists ?? 0))).toStrictEqual(false); + }); + + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: 'string' }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!(Number(res.rows[0]?.tableExists ?? 0))).toStrictEqual(true); + }); +}); diff --git a/integration-tests/tests/mysql/ps.ts b/integration-tests/tests/mysql/ps.ts new file mode 100644 index 0000000000..105a5924f6 --- /dev/null +++ b/integration-tests/tests/mysql/ps.ts @@ -0,0 +1,104 @@ +import { setTimeout as delay } from 'node:timers/promises'; + +const PS_API = 'https://api.planetscale.com/v1'; +const ORG = 'drizzle-team'; +const DB = 'pathtrami'; +const TOKEN = process.env['PS_TOKEN']!; // service token or OAuth bearer + +console.log(TOKEN); + +type BranchOpts = { + name?: string; + parentBranch?: string; // usually "main" + region?: string; // optional; defaults to DB's default region + // For Data Branching® (clone schema+data) use one of: + backupId?: string; // create from a specific backup + restorePoint?: string; // RFC3339 timestamp +}; + +async function ps(path: string, init?: RequestInit): Promise { + const res = await fetch(`${PS_API}${path}`, { + ...init, + headers: { + Authorization: TOKEN, + 'Content-Type': 'application/json', + ...init?.headers, + }, + }); + if (!res.ok) { + const body = await res.text(); + throw new Error(`[${res.status}] ${res.statusText} — ${body}`); + } + return res.json() as Promise; +} + +export async function createBranch(opts: BranchOpts = {}) { + const name = opts.name ?? `test_${Date.now().toString(36)}`; + const result = await ps( + `/organizations/${ORG}/databases/${DB}/branches`, + { + method: 'POST', + body: JSON.stringify({ + name, + parent_branch: opts.parentBranch ?? 'main', + region: opts.region, + backup_id: opts.backupId, + restore_point: opts.restorePoint, + }), + }, + ); + + console.time(); + for (let i = 0; i < 60; i++) { + const b = await ps<{ state: string }>( + `/organizations/${ORG}/databases/${DB}/branches/${name}`, + ); + if (b.state?.toLowerCase() === 'ready') break; + await delay(1000); // ~1–7s total typical + } + console.timeEnd(); + + return result; +} + +export async function createEphemeralPassword(branch: string, ttlSeconds = 3600) { + // role: "writer" for tests that need DDL/DML; "reader" routes to replicas + const p = await ps<{ + username: string; + plaintext: string; + access_host_url: string; + }>( + `/organizations/${ORG}/databases/${DB}/branches/${branch}/passwords`, + { + method: 'POST', + body: JSON.stringify({ + name: `pw_${branch}`, + role: 'writer', + ttl: ttlSeconds, // auto-expires to reduce cleanup needs + replica: false, + }), + }, + ); + + // Build a standard MySQL connection URL + const url = `mysql://${encodeURIComponent(p.username)}:${ + encodeURIComponent( + p.plaintext, + ) + }@${p.access_host_url}/?ssl={"rejectUnauthorized":true}`; + return { ...p, url }; +} + +export async function deleteBranch(branch: string) { + await ps( + `/organizations/${ORG}/databases/${DB}/branches/${branch}`, + { method: 'DELETE' }, + ); +} + +export async function listBranches() { + await ps( + `/organizations/${ORG}/databases/${DB}/branches/`, + { method: 'GET' }, + ); +} diff --git a/integration-tests/tests/replicas/mysql.test.ts b/integration-tests/tests/mysql/replicas.test.ts similarity index 100% rename from integration-tests/tests/replicas/mysql.test.ts rename to integration-tests/tests/mysql/replicas.test.ts diff --git a/integration-tests/tests/mysql/schema.ts b/integration-tests/tests/mysql/schema.ts index f1e485d0c4..f1b744ba01 100644 --- a/integration-tests/tests/mysql/schema.ts +++ b/integration-tests/tests/mysql/schema.ts @@ -1,5 +1,5 @@ -import { sql } from 'drizzle-orm'; -import { bigint, type MySqlDatabase, mysqlTable, serial, text, timestamp } from 'drizzle-orm/mysql-core'; +import { defineRelations } from 'drizzle-orm'; +import { bigint, int, mysqlTable, serial, text, timestamp } from 'drizzle-orm/mysql-core'; export const rqbUser = mysqlTable('user_rqb_test', { id: serial().primaryKey().notNull(), @@ -20,25 +20,16 @@ export const rqbPost = mysqlTable('post_rqb_test', { }).notNull(), }); -export const init = async (db: MySqlDatabase) => { - await db.execute(sql` - CREATE TABLE ${rqbUser} ( - \`id\` SERIAL PRIMARY KEY NOT NULL, - \`name\` TEXT NOT NULL, - \`created_at\` TIMESTAMP NOT NULL - ) - `); - await db.execute(sql` - CREATE TABLE ${rqbPost} ( - \`id\` SERIAL PRIMARY KEY NOT NULL, - \`user_id\` BIGINT(20) UNSIGNED NOT NULL, - \`content\` TEXT, - \`created_at\` TIMESTAMP NOT NULL - ) - `); -}; +export const empty = mysqlTable('empty', { id: int() }); -export const clear = async (db: MySqlDatabase) => { - await db.execute(sql`DROP TABLE IF EXISTS ${rqbUser} CASCADE;`).catch(() => null); - await db.execute(sql`DROP TABLE IF EXISTS ${rqbPost} CASCADE;`).catch(() => null); -}; +export const relations = defineRelations({ rqbUser, rqbPost, empty }, (r) => ({ + rqbUser: { + posts: r.many.rqbPost(), + }, + rqbPost: { + author: r.one.rqbUser({ + from: r.rqbPost.userId, + to: r.rqbUser.id, + }), + }, +})); diff --git a/integration-tests/tests/mysql/schema2.ts b/integration-tests/tests/mysql/schema2.ts new file mode 100644 index 0000000000..0bab1c3b6b --- /dev/null +++ b/integration-tests/tests/mysql/schema2.ts @@ -0,0 +1,221 @@ +import { sql } from 'drizzle-orm'; +import { + bigint, + binary, + blob, + boolean, + char, + date, + datetime, + decimal, + double, + float, + int, + json, + longblob, + mediumblob, + mediumint, + type MySqlColumn, + mysqlEnum, + mysqlSchema, + mysqlTable, + type MySqlTableWithColumns, + real, + serial, + smallint, + text, + time, + timestamp, + tinyblob, + tinyint, + uniqueIndex, + varbinary, + varchar, + year, +} from 'drizzle-orm/mysql-core'; + +export const allTypesTable = mysqlTable('all_types', { + serial: serial('serial'), + bigint53: bigint('bigint53', { + mode: 'number', + }), + bigint64: bigint('bigint64', { + mode: 'bigint', + }), + bigintString: bigint('bigint_string', { + mode: 'string', + }), + binary: binary('binary'), + boolean: boolean('boolean'), + char: char('char'), + date: date('date', { + mode: 'date', + }), + dateStr: date('date_str', { + mode: 'string', + }), + datetime: datetime('datetime', { + mode: 'date', + }), + datetimeStr: datetime('datetime_str', { + mode: 'string', + }), + decimal: decimal('decimal'), + decimalNum: decimal('decimal_num', { + precision: 30, + mode: 'number', + }), + decimalBig: decimal('decimal_big', { + precision: 30, + mode: 'bigint', + }), + double: double('double'), + float: float('float'), + int: int('int'), + json: json('json'), + medInt: mediumint('med_int'), + smallInt: smallint('small_int'), + real: real('real'), + text: text('text'), + time: time('time'), + timestamp: timestamp('timestamp', { + mode: 'date', + }), + timestampStr: timestamp('timestamp_str', { + mode: 'string', + }), + tinyInt: tinyint('tiny_int'), + varbin: varbinary('varbin', { + length: 16, + }), + varchar: varchar('varchar', { + length: 255, + }), + year: year('year'), + enum: mysqlEnum('enum', ['enV1', 'enV2']), + blob: blob('blob'), + tinyblob: tinyblob('tinyblob'), + mediumblob: mediumblob('mediumblob'), + longblob: longblob('longblob'), + stringblob: blob('stringblob', { mode: 'string' }), + stringtinyblob: tinyblob('stringtinyblob', { mode: 'string' }), + stringmediumblob: mediumblob('stringmediumblob', { mode: 'string' }), + stringlongblob: longblob('stringlongblob', { mode: 'string' }), +}); + +export const createUserTable = (name: string) => { + return mysqlTable(name, { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), + }); +}; + +export const createCitiesTable = (name: string) => + mysqlTable(name, { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + +export const createUsers2Table = ( + name: string, + citiesTable: MySqlTableWithColumns<{ + name: string; + schema: undefined; + dialect: 'mysql'; + columns: { id: MySqlColumn }; + }>, +) => + mysqlTable(name, { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => citiesTable.id), + }); + +export const createUsersOnUpdateTable = (name: string) => + mysqlTable(name, { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date', fsp: 3 }).$onUpdate(() => new Date()), + uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value + }); + +export const createCountTestTable = (name: string) => + mysqlTable(name, { + id: int('id').notNull(), + name: text('name').notNull(), + }); + +export const datesTable = mysqlTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time', { fsp: 1 }), + datetime: datetime('datetime', { fsp: 2 }), + datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), + timestamp: timestamp('timestamp', { fsp: 3 }), + timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), + year: year('year'), +}); + +export const coursesTable = mysqlTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id').references(() => courseCategoriesTable.id), +}); + +export const courseCategoriesTable = mysqlTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +export const createOrdersTable = (name: string) => + mysqlTable(name, { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), + }); + +export const usersMigratorTable = mysqlTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => [uniqueIndex('').on(table.name).using('btree')]); + +// To test aggregate functions +export const createAggregateTable = (name: string) => + mysqlTable(name, { + id: serial('id').notNull(), + name: text('name').notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), + }); + +// To test another schema and multischema +export const mySchema = mysqlSchema(`mySchema`); + +export const usersMySchemaTable = mySchema.table('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), +}); + +export const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').references(() => citiesMySchemaTable.id), +}); + +export const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); diff --git a/integration-tests/tests/mysql/tidb-serverless.test.ts b/integration-tests/tests/mysql/tidb-serverless.test.ts index 0fbd35fc42..2de5ac64e0 100644 --- a/integration-tests/tests/mysql/tidb-serverless.test.ts +++ b/integration-tests/tests/mysql/tidb-serverless.test.ts @@ -1,42 +1,19 @@ -import 'dotenv/config'; +import { sql } from 'drizzle-orm'; +import { getTableConfig } from 'drizzle-orm/mysql-core'; +import { migrate } from 'drizzle-orm/tidb-serverless/migrator'; +import { describe, expect } from 'vitest'; +import { tidbTest as test } from './instrumentation'; +import { tests } from './mysql-common'; +import { runTests as cacheTests } from './mysql-common-cache'; +import { usersMigratorTable } from './schema2'; -import { connect } from '@tidbcloud/serverless'; -import type { TiDBServerlessDatabase } from 'drizzle-orm/tidb-serverless'; -import { drizzle } from 'drizzle-orm/tidb-serverless'; -import { beforeAll, beforeEach } from 'vitest'; -import { skipTests } from '~/common.ts'; -import { tests } from './mysql-common.ts'; -import relations from './relations.ts'; - -const ENABLE_LOGGING = false; - -let db: TiDBServerlessDatabase; - -beforeAll(async () => { - const connectionString = process.env['TIDB_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('TIDB_CONNECTION_STRING is not set'); - } - - const client = connect({ url: connectionString }); - db = drizzle(client!, { logger: ENABLE_LOGGING, relations }); -}); - -beforeEach((ctx) => { - ctx.mysql = { - db, - }; -}); - -skipTests([ +const skip = new Set([ 'mySchema :: select with group by as field', 'mySchema :: delete with returning all fields', 'mySchema :: update with returning partial', 'mySchema :: delete returning sql', 'mySchema :: insert returning sql', 'test $onUpdateFn and $onUpdate works updating', - 'set operations (mixed all) as function with subquery', - 'set operations (union) from query builder with subquery', 'join on aliased sql from with clause', 'join on aliased sql from select', 'select from raw sql with joins', @@ -56,13 +33,19 @@ skipTests([ 'update returning sql', 'delete returning sql', 'insert returning sql', + 'test $onUpdateFn and $onUpdate works as $default', + 'MySqlTable :: select with join `use index` + `force index` incompatible hints', + 'MySqlTable :: select with `use index` + `force index` incompatible hints', // not supported + 'set operations (mixed all) as function with subquery', + 'set operations (union) from query builder with subquery', 'set operations (except all) as function', 'set operations (except all) from query builder', 'set operations (intersect all) as function', 'set operations (intersect all) from query builder', 'set operations (union all) as function', + 'set operations (union) as function', 'tc config for datetime', 'select iterator w/ prepared statement', 'select iterator', @@ -72,6 +55,133 @@ skipTests([ 'Insert all defaults in 1 row', '$default with empty array', 'utc config for datetime', + 'insert into ... select', + 'RQB v2 transaction find many - with relation', + 'RQB v2 transaction find first - with relation', + 'RQB v2 simple find many - with relation', + 'RQB v2 simple find first - with relation', + 'cross join (lateral)', + 'inner join (lateral)', + 'left join (lateral)', + 'update with returning all fields + partial', + 'insert+update+delete returning sql', + 'all types', ]); -tests(); +tests(test, skip); +cacheTests('mysql', test); + +describe('migrator', () => { + test('migrator', async ({ db }) => { + await db.execute(sql`drop table if exists ${sql.identifier('__drizzle_migrations')}`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { migrationsFolder: './drizzle2/mysql' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + }); + + test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop table if exists ${sql.identifier(migrationsTable)} cascade;`); + await db.execute(sql`drop table if exists ${usersMigratorTable}`); + await db.execute(sql`drop table if exists ${sql.identifier('cities_migration')}`); + await db.execute(sql`drop table if exists ${sql.identifier('users_migration')}`); + + await migrate(db, { + migrationsFolder: './drizzle2/mysql', + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/mysql-init', + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean | number }>(sql`SELECT EXISTS ( + SELECT 1 + FROM INFORMATION_SCHEMA.TABLES + WHERE TABLE_NAME = ${getTableConfig(usersMigratorTable).name} + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res[0]?.[0]?.tableExists).toStrictEqual(true); + }); +}); diff --git a/integration-tests/tests/pg/awsdatapi.test.ts b/integration-tests/tests/pg/awsdatapi.test.ts index 3c59bf7360..ce8e1ea663 100644 --- a/integration-tests/tests/pg/awsdatapi.test.ts +++ b/integration-tests/tests/pg/awsdatapi.test.ts @@ -1,2237 +1,2241 @@ -import 'dotenv/config'; - -import { RDSDataClient } from '@aws-sdk/client-rds-data'; -import * as dotenv from 'dotenv'; -import { asc, eq, inArray, notInArray, sql, TransactionRollbackError } from 'drizzle-orm'; -import { relations } from 'drizzle-orm/_relations'; -import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; -import { drizzle } from 'drizzle-orm/aws-data-api/pg'; -import { migrate } from 'drizzle-orm/aws-data-api/pg/migrator'; -import { - alias, - boolean, - date, - integer, - jsonb, - pgTable, - pgTableCreator, - serial, - text, - time, - timestamp, - uuid, -} from 'drizzle-orm/pg-core'; -import { Resource } from 'sst'; -import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; - -import type { Equal } from '../utils.ts'; -import { Expect, randomString } from '../utils.ts'; -import relationsV2 from './relations.ts'; -import { clear, init, rqbPost, rqbUser } from './schema.ts'; - -dotenv.config(); - -const ENABLE_LOGGING = false; - -const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: jsonb('jsonb').$type(), - bestTexts: text('best_texts') - .array() - .default(sql`'{}'`) - .notNull(), - createdAt: timestamp('created_at', { withTimezone: true }) - .notNull() - .defaultNow(), -}); - -const usersMigratorTable = pgTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}); - -const todo = pgTable('todo', { - id: uuid('id').primaryKey(), - title: text('title').notNull(), - description: text('description'), -}); - -const todoRelations = relations(todo, (ctx) => ({ - user: ctx.many(todoUser), -})); - -const user = pgTable('user', { - id: uuid('id').primaryKey(), - email: text('email').notNull(), -}); - -const userRelations = relations(user, (ctx) => ({ - todos: ctx.many(todoUser), -})); - -const todoUser = pgTable('todo_user', { - todoId: uuid('todo_id').references(() => todo.id), - userId: uuid('user_id').references(() => user.id), -}); - -const todoToGroupRelations = relations(todoUser, (ctx) => ({ - todo: ctx.one(todo, { - fields: [todoUser.todoId], - references: [todo.id], - }), - user: ctx.one(user, { - fields: [todoUser.userId], - references: [user.id], - }), -})); - -const schema = { - todo, - todoRelations, - user, - userRelations, - todoUser, - todoToGroupRelations, -}; - -let db: AwsDataApiPgDatabase; - -beforeAll(async () => { - const rdsClient = new RDSDataClient(); - - db = drizzle(rdsClient, { - // @ts-ignore - database: Resource.Postgres.database, - // @ts-ignore - secretArn: Resource.Postgres.secretArn, - // @ts-ignore - resourceArn: Resource.Postgres.clusterArn, - logger: ENABLE_LOGGING, - schema, - relations: relationsV2, - }); -}); - -beforeEach(async () => { - await db.execute(sql`drop schema public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - best_texts text[] not null default '{}', - created_at timestamptz not null default now() - ) - `, - ); - - await db.execute( - sql` - create table todo ( - id uuid primary key, - title text not null, - description text - ) - `, - ); - - await db.execute( - sql` - create table "user" ( - id uuid primary key, - email text not null - ) - - `, - ); - - await db.execute( - sql` - create table todo_user ( - todo_id uuid references todo(id), - user_id uuid references "user"(id) - ) - `, - ); -}); - -test('select all fields', async () => { - const insertResult = await db.insert(usersTable).values({ name: 'John' }); - - expect(insertResult.numberOfRecordsUpdated).toBe(1); - - const result = await db.select().from(usersTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); - expect(result).toEqual([ - { - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: result[0]!.createdAt, - }, - ]); -}); - -test('select sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('select with empty array in inArray', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(inArray(usersTable.id, [])); - - expect(users).toEqual([]); -}); - -test('select with empty array in notInArray', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - const result = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(notInArray(usersTable.id, [])); - - expect(result).toEqual([ - { name: 'JOHN' }, - { name: 'JANE' }, - { name: 'JANE' }, - ]); -}); - -test('select typed sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('select distinct', async () => { - const usersDistinctTable = pgTable('users_distinct', { - id: integer('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute( - sql`create table ${usersDistinctTable} (id integer, name text)`, - ); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users1 = await db - .selectDistinct() - .from(usersDistinctTable) - .orderBy(usersDistinctTable.id, usersDistinctTable.name); - const users2 = await db - .selectDistinctOn([usersDistinctTable.id]) - .from(usersDistinctTable) - .orderBy(usersDistinctTable.id); - const users3 = await db - .selectDistinctOn([usersDistinctTable.name], { - name: usersDistinctTable.name, - }) - .from(usersDistinctTable) - .orderBy(usersDistinctTable.name); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users1).toEqual([ - { id: 1, name: 'Jane' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - ]); - - expect(users2.length).toEqual(2); - expect(users2[0]?.id).toEqual(1); - expect(users2[1]?.id).toEqual(2); - - expect(users3.length).toEqual(2); - expect(users3[0]?.name).toEqual('Jane'); - expect(users3[1]?.name).toEqual('John'); -}); - -test('insert returning sql', async () => { - const users = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('delete returning sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JOHN' }]); -}); - -test('update returning sql', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - name: sql`upper(${usersTable.name})`, - }); - - expect(users).toEqual([{ name: 'JANE' }]); -}); - -test('update with returning all fields', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); - - expect(users[0]!.createdAt).toBeInstanceOf(Date); - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - expect(users).toEqual([ - { - id: 1, - bestTexts: [], - name: 'Jane', - verified: false, - jsonb: null, - createdAt: users[0]!.createdAt, - }, - ]); -}); - -test('update with returning partial', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); -}); - -test('delete with returning all fields', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning(); - - expect(users[0]!.createdAt).toBeInstanceOf(Date); - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); - expect(users).toEqual([ - { - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: users[0]!.createdAt, - }, - ]); -}); - -test('delete with returning partial', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .delete(usersTable) - .where(eq(usersTable.name, 'John')) - .returning({ - id: usersTable.id, - name: usersTable.name, - }); - - expect(users).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert + select', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - expect(result).toEqual([ - { - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: result[0]!.createdAt, - }, - ]); - - await db.insert(usersTable).values({ name: 'Jane' }); - const result2 = await db.select().from(usersTable); - expect(result2).toEqual([ - { - bestTexts: [], - id: 1, - name: 'John', - verified: false, - jsonb: null, - createdAt: result2[0]!.createdAt, - }, - { - bestTexts: [], - id: 2, - name: 'Jane', - verified: false, - jsonb: null, - createdAt: result2[1]!.createdAt, - }, - ]); -}); - -test('json insert', async () => { - await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }) - .from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); -}); - -test('insert with overridden default values', async () => { - await db.insert(usersTable).values({ name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - expect(result).toEqual([ - { - bestTexts: [], - id: 1, - name: 'John', - verified: true, - jsonb: null, - createdAt: result[0]!.createdAt, - }, - ]); -}); - -test('insert many', async () => { - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test('insert many with returning', async () => { - const result = await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]) - .returning({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); -}); - -test('select with group by as field', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.name); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by as sql', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`); - - expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); -}); - -test('select with group by as sql + column', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id); - - expect(result).toEqual([ - { name: 'Jane' }, - { name: 'Jane' }, - { name: 'John' }, - ]); -}); - -test('select with group by as column + sql', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`); - - expect(result).toEqual([ - { name: 'Jane' }, - { name: 'Jane' }, - { name: 'John' }, - ]); -}); - -test('select with group by complex query', async () => { - await db - .insert(usersTable) - .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - - const result = await db - .select({ name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - expect(result).toEqual([{ name: 'Jane' }]); -}); - -test('build query', async () => { - const query = db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', - params: [], - // typings: [] - }); -}); - -test('insert sql', async () => { - await db.insert(usersTable).values({ name: sql`${'John'}` }); - const result = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('partial join with alias', async () => { - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([ - { id: 10, name: 'Ivan' }, - { id: 11, name: 'Hans' }, - ]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }) - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - expect(result).toEqual([ - { - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }, - ]); -}); - -test('full join with alias', async () => { - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([ - { id: 10, name: 'Ivan' }, - { id: 11, name: 'Hans' }, - ]); - - const result = await db - .select() - .from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)); - - expect(result).toEqual([ - { - users: { - id: 10, - bestTexts: [], - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.users.createdAt, - }, - customer: { - bestTexts: [], - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, - }, - }, - ]); -}); - -test('select from alias', async () => { - const pgTable = pgTableCreator((name) => `prefixed_${name}`); - - const users = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute( - sql`create table ${users} (id serial primary key, name text not null)`, - ); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([ - { id: 10, name: 'Ivan' }, - { id: 11, name: 'Hans' }, - ]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)); - - expect(result).toEqual([ - { - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }, - ]); - - await db.execute(sql`drop table ${users}`); -}); - -test('insert with spaces', async () => { - await db.insert(usersTable).values({ name: sql`'Jo h n'` }); - const result = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); -}); - -test('prepared statement', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const statement = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .prepare('statement1'); - const result = await statement.execute(); - - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('prepared statement reuse', async () => { - const stmt = db - .insert(usersTable) - .values({ - verified: true, - name: sql.placeholder('name'), - }) - .prepare('stmt2'); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ name: `John ${i}` }); - } - - const result = await db - .select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); -}); - -test('prepared statement with placeholder in .where', async () => { - await db.insert(usersTable).values({ name: 'John' }); - const stmt = db - .select({ - id: usersTable.id, - name: usersTable.name, - }) - .from(usersTable) - .where(eq(usersTable.id, sql.placeholder('id'))) - .prepare('stmt3'); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test('migrator : migrate with custom schema', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsSchema: 'custom_migrations', - }); - - // test if the custom migrations table was created - const { rows } = await db.execute( - sql`select * from custom_migrations."__drizzle_migrations";`, - ); - expect(rows).toBeTruthy(); - expect(rows!.length).toBeGreaterThan(0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute( - sql`drop table custom_migrations."__drizzle_migrations"`, - ); -}); - -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - }); - - // test if the custom migrations table was created - const { rows } = await db.execute( - sql`select * from "drizzle".${sql.identifier(customTable)};`, - ); - expect(rows).toBeTruthy(); - expect(rows!.length).toBeGreaterThan(0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test('migrator : migrate with custom table and custom schema', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: 'custom_migrations', - }); - - // test if the custom migrations table was created - const { rows } = await db.execute( - sql`select * from custom_migrations.${ - sql.identifier( - customTable, - ) - };`, - ); - expect(rows).toBeTruthy(); - expect(rows!.length).toBeGreaterThan(0); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute( - sql`drop table custom_migrations.${ - sql.identifier( - customTable, - ) - }`, - ); -}); - -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - expectTypeOf(result.rows).toEqualTypeOf<{ id: number; name: string }[]>(); - expect(result.rows).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute + returning', async () => { - const inserted = await db.execute( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); - -test('build query insert with onConflict do update', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do update set "name" = :3', - params: ['John', '["foo","bar"]', 'John1'], - // typings: ['none', 'json', 'none'] - }); -}); - -test('build query insert with onConflict do update / multiple columns', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoUpdate({ - target: [usersTable.id, usersTable.name], - set: { name: 'John1' }, - }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id","name") do update set "name" = :3', - params: ['John', '["foo","bar"]', 'John1'], - // typings: ['none', 'json', 'none'] - }); -}); - -test('build query insert with onConflict do nothing', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing() - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict do nothing', - params: ['John', '["foo","bar"]'], - // typings: ['none', 'json'] - }); -}); - -test('build query insert with onConflict do nothing + target', async () => { - const query = db - .insert(usersTable) - .values({ name: 'John', jsonb: ['foo', 'bar'] }) - .onConflictDoNothing({ target: usersTable.id }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do nothing', - params: ['John', '["foo","bar"]'], - // typings: ['none', 'json'] - }); -}); - -test('insert with onConflict do update', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John1' }]); -}); - -test('insert with onConflict do nothing', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing(); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert with onConflict do nothing + target', async () => { - await db.insert(usersTable).values({ name: 'John' }); - - await db - .insert(usersTable) - .values({ id: 1, name: 'John' }) - .onConflictDoNothing({ target: usersTable.id }); - - const res = await db - .select({ id: usersTable.id, name: usersTable.name }) - .from(usersTable) - .where(eq(usersTable.id, 1)); - - expect(res).toEqual([{ id: 1, name: 'John' }]); -}); - -test('transaction', async () => { - const users = pgTable('users_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - const products = pgTable('products_transactions', { - id: serial('id').primaryKey(), - price: integer('price').notNull(), - stock: integer('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute( - sql`create table users_transactions (id serial not null primary key, balance integer not null)`, - ); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, - ); - - const user = await db - .insert(users) - .values({ balance: 100 }) - .returning() - .then((rows) => rows[0]!); - const product = await db - .insert(products) - .values({ price: 10, stock: 10 }) - .returning() - .then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx - .update(users) - .set({ balance: user.balance - product.price }) - .where(eq(users.id, user.id)); - await tx - .update(products) - .set({ stock: product.stock - 1 }) - .where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); -}); - -test('transaction rollback', async () => { - const users = pgTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await expect( - db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }), - ).rejects.toThrowError(TransactionRollbackError); - - const result = await db.select().from(users); - - expect(result).toEqual([]); - - await db.execute(sql`drop table ${users}`); -}); - -test('nested transaction', async () => { - const users = pgTable('users_nested_transactions', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await tx.transaction(async (tx) => { - await tx.update(users).set({ balance: 200 }); - }); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 200 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('nested transaction rollback', async () => { - const users = pgTable('users_nested_transactions_rollback', { - id: serial('id').primaryKey(), - balance: integer('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, - ); - - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - - await expect( - tx.transaction(async (tx2) => { - await tx2.update(users).set({ balance: 200 }); - tx2.rollback(); - }), - ).rejects.toThrowError(TransactionRollbackError); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 100 }]); - - await db.execute(sql`drop table ${users}`); -}); - -test('select from raw sql', async () => { - const result = await db.execute(sql`select 1 as id, 'John' as name`); - - expect(result.rows).toEqual([{ id: 1, name: 'John' }]); -}); - -test('select from raw sql with mapped values', async () => { - const result = await db - .select({ - id: sql`id`, - name: sql`name`, - }) - .from(sql`(select 1 as id, 'John' as name) as users`); - - expect(result).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert with array values works', async () => { - const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db - .insert(usersTable) - .values({ - name: 'John', - bestTexts, - }) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('update with array values works', async () => { - const [newUser] = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning(); - - const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db - .update(usersTable) - .set({ - bestTexts, - }) - .where(eq(usersTable.id, newUser!.id)) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('insert with array values works', async () => { - const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db - .insert(usersTable) - .values({ - name: 'John', - bestTexts, - }) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('update with array values works', async () => { - const [newUser] = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning(); - - const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db - .update(usersTable) - .set({ - bestTexts, - }) - .where(eq(usersTable.id, newUser!.id)) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('insert with array values works', async () => { - const bestTexts = ['text1', 'text2', 'text3']; - const [insertResult] = await db - .insert(usersTable) - .values({ - name: 'John', - bestTexts, - }) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('update with array values works', async () => { - const [newUser] = await db - .insert(usersTable) - .values({ name: 'John' }) - .returning(); - - const bestTexts = ['text4', 'text5', 'text6']; - const [insertResult] = await db - .update(usersTable) - .set({ - bestTexts, - }) - .where(eq(usersTable.id, newUser!.id)) - .returning(); - - expect(insertResult?.bestTexts).toEqual(bestTexts); -}); - -test('all date and time columns', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - dateString: date('date_string', { mode: 'string' }).notNull(), - time: time('time', { precision: 3 }).notNull(), - datetime: timestamp('datetime').notNull(), - // datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), - datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), - datetimeFullPrecision: timestamp('datetime_full_precision', { - precision: 6, - mode: 'string', - }).notNull(), - // datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - date_string date not null, - time time(3) not null, - datetime timestamp not null, - -- datetime_wtz timestamp with time zone not null, - datetime_string timestamp not null, - datetime_full_precision timestamp(6) not null - -- datetime_wtz_string timestamp with time zone not null - ) - `); - - const someDatetime = new Date('2022-01-01T00:00:00.123Z'); - const fullPrecision = '2022-01-01T00:00:00.123456'; - const someTime = '23:23:12.432'; - - await db.insert(table).values({ - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - // datetimeWTZ: someDatetime, - datetimeString: '2022-01-01T00:00:00.123Z', - datetimeFullPrecision: fullPrecision, - // datetimeWTZString: '2022-01-01T00:00:00.123Z', - }); - - const result = await db.select().from(table); - - Expect< - Equal< - { - id: number; - dateString: string; - time: string; - datetime: Date; - // datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - // datetimeWTZString: string; - }[], - typeof result - > - >; - - Expect< - Equal< - { - dateString: string; - time: string; - datetime: Date; - // datetimeWTZ: Date; - datetimeString: string; - datetimeFullPrecision: string; - // datetimeWTZString: string; - id?: number | undefined; - }, - typeof table.$inferInsert - > - >; - - expect(result).toEqual([ - { - id: 1, - dateString: '2022-01-01', - time: someTime, - datetime: someDatetime, - // datetimeWTZ: someDatetime, - datetimeString: '2022-01-01 00:00:00.123', - datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), - // datetimeWTZString: '2022-01-01 00:00:00.123+00', - }, - ]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test.skip('all date and time columns with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { - mode: 'string', - withTimezone: true, - precision: 6, - }).notNull(), - timestampAsDate: timestamp('timestamp_date', { - withTimezone: true, - precision: 3, - }).notNull(), - timestampTimeZones: timestamp('timestamp_date_2', { - withTimezone: true, - precision: 3, - }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null, - timestamp_date timestamp(3) with time zone not null, - timestamp_date_2 timestamp(3) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - const timestampDate = new Date(); - const timestampDateWTZ = new Date('2022-01-01 00:00:00.123 +0500'); - - const timestampString2 = '2022-01-01 00:00:00.123456-0400'; - const timestampDate2 = new Date(); - const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); - - await db.insert(table).values([ - { - timestamp: timestampString, - timestampAsDate: timestampDate, - timestampTimeZones: timestampDateWTZ, - }, - { - timestamp: timestampString2, - timestampAsDate: timestampDate2, - timestampTimeZones: timestampDateWTZ2, - }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_date: string; - timestamp_date_2: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - expect(result).toEqual([ - { - id: 1, - timestamp: '2022-01-01 02:00:00.123456+00', - timestampAsDate: timestampDate, - timestampTimeZones: timestampDateWTZ, - }, - { - id: 2, - timestamp: '2022-01-01 04:00:00.123456+00', - timestampAsDate: timestampDate2, - timestampTimeZones: timestampDateWTZ2, - }, - ]); - - expect(result2.rows).toEqual([ - { - id: 1, - timestamp_string: '2022-01-01 02:00:00.123456+00', - timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') - + '+00', - }, - { - id: 2, - timestamp_string: '2022-01-01 04:00:00.123456+00', - timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', - timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') - + '+00', - }, - ]); - - expect(result[0]?.timestampTimeZones.getTime()).toEqual( - new Date((result2.rows?.[0] as any).timestamp_date_2 as any).getTime(), - ); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('all date and time columns without timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestampString: timestamp('timestamp_string', { - mode: 'string', - precision: 6, - }).notNull(), - timestampString2: timestamp('timestamp_string2', { - precision: 3, - mode: 'string', - }).notNull(), - timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null, - timestamp_string2 timestamp(3) not null, - timestamp_date timestamp(3) not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456'; - // const timestampString2 = '2022-01-02 00:00:00.123 -0300'; - const timestampString2 = '2022-01-02 00:00:00.123'; - const timestampDate = new Date('2022-01-01 00:00:00.123Z'); - - const timestampString_2 = '2022-01-01 00:00:00.123456'; - // const timestampString2_2 = '2022-01-01 00:00:00.123 -0300'; - const timestampString2_2 = '2022-01-01 00:00:00.123'; - // const timestampDate2 = new Date('2022-01-01 00:00:00.123 +0200'); - const timestampDate2 = new Date('2022-01-01 00:00:00.123'); - - await db.insert(table).values([ - { timestampString, timestampString2, timestampDate }, - { - timestampString: timestampString_2, - timestampString2: timestampString2_2, - timestampDate: timestampDate2, - }, - ]); - - const result = await db.select().from(table); - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - timestamp_string2: string; - timestamp_date: string; - }>(sql`select * from ${table}`); - - // Whatever you put in, you get back when you're using the date mode - // But when using the string mode, postgres returns a string transformed into UTC - expect(result).toEqual([ - { - id: 1, - timestampString: timestampString, - timestampString2: '2022-01-02 00:00:00.123', - timestampDate: timestampDate, - }, - { - id: 2, - timestampString: timestampString_2, - timestampString2: '2022-01-01 00:00:00.123', - timestampDate: timestampDate2, - }, - ]); - - expect(result2.rows).toEqual([ - { - id: 1, - timestamp_string: timestampString, - timestamp_string2: '2022-01-02 00:00:00.123', - timestamp_date: timestampDate - .toISOString() - .replace('T', ' ') - .replace('Z', ''), - }, - { - id: 2, - timestamp_string: timestampString_2, - timestamp_string2: '2022-01-01 00:00:00.123', - timestamp_date: timestampDate2 - .toISOString() - .replace('T', ' ') - .replace('Z', ''), - }, - ]); - - expect((result2.rows?.[0] as any).timestamp_string).toEqual( - '2022-01-01 00:00:00.123456', - ); - // need to add the 'Z', otherwise javascript assumes it's in local time - expect( - new Date(((result2.rows?.[0] as any).timestamp_date + 'Z') as any).getTime(), - ).toEqual(timestampDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('Typehints mix for RQB', async () => { - const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; - - const res = await db._query.user.findMany({ - where: eq(user.id, uuid), - with: { - todos: { - with: { - todo: true, - }, - }, - }, - }); - - expect(res).toStrictEqual([]); -}); - -test('Typehints mix for findFirst', async () => { - const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; - - await db.insert(user).values({ id: uuid, email: 'd' }); - - const res = await db._query.user.findFirst({ - where: eq(user.id, uuid), - }); - - expect(res).toStrictEqual({ id: 'd997d46d-5769-4c78-9a35-93acadbe6076', email: 'd' }); -}); - -test('RQB v2 simple find first - no rows', async () => { - try { - await init(db); - - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find first - multiple rows', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find first - with relation', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find first - placeholders', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_first_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find many - no rows', async () => { - try { - await init(db); - - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find many - multiple rows', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find many - with relation', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - } finally { - await clear(db); - } -}); - -test('RQB v2 simple find many - placeholders', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_many_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find first - no rows', async () => { - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find first - multiple rows', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find first - with relation', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find first - placeholders', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_first_tx_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find many - no rows', async () => { - try { - await init(db); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find many - multiple rows', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find many - with relation', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - } finally { - await clear(db); - } -}); - -test('RQB v2 transaction find many - placeholders', async () => { - try { - await init(db); - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare('rqb_v2_find_many_placeholders'); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - } finally { - await clear(db); - } -}); - -afterAll(async () => { - await db.execute(sql`drop table if exists "users"`); - await db.execute(sql`drop table if exists "todo_user"`); - await db.execute(sql`drop table if exists "user"`); - await db.execute(sql`drop table if exists "todo"`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); -}); +// import 'dotenv/config'; + +import { test } from 'vitest'; + +// import { RDSDataClient } from '@aws-sdk/client-rds-data'; +// import * as dotenv from 'dotenv'; +// import { asc, eq, inArray, notInArray, sql, TransactionRollbackError } from 'drizzle-orm'; +// import { relations } from 'drizzle-orm/_relations'; +// import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; +// import { drizzle } from 'drizzle-orm/aws-data-api/pg'; +// import { migrate } from 'drizzle-orm/aws-data-api/pg/migrator'; +// import { +// alias, +// boolean, +// date, +// integer, +// jsonb, +// pgTable, +// pgTableCreator, +// serial, +// text, +// time, +// timestamp, +// uuid, +// } from 'drizzle-orm/pg-core'; +// import { Resource } from 'sst'; +// import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; + +// import type { Equal } from '../utils'; +// import { Expect, randomString } from '../utils'; +// import { clear, init, rqbPost, rqbUser } from './schema'; + +// dotenv.config(); + +// const ENABLE_LOGGING = false; + +test('mock', () => {}); + +// const usersTable = pgTable('users', { +// id: serial('id').primaryKey(), +// name: text('name').notNull(), +// verified: boolean('verified').notNull().default(false), +// jsonb: jsonb('jsonb').$type(), +// bestTexts: text('best_texts') +// .array() +// .default(sql`'{}'`) +// .notNull(), +// createdAt: timestamp('created_at', { withTimezone: true }) +// .notNull() +// .defaultNow(), +// }); + +// const usersMigratorTable = pgTable('users12', { +// id: serial('id').primaryKey(), +// name: text('name').notNull(), +// email: text('email').notNull(), +// }); + +// const todo = pgTable('todo', { +// id: uuid('id').primaryKey(), +// title: text('title').notNull(), +// description: text('description'), +// }); + +// const todoRelations = relations(todo, (ctx) => ({ +// user: ctx.many(todoUser), +// })); + +// const user = pgTable('user', { +// id: uuid('id').primaryKey(), +// email: text('email').notNull(), +// }); + +// const userRelations = relations(user, (ctx) => ({ +// todos: ctx.many(todoUser), +// })); + +// const todoUser = pgTable('todo_user', { +// todoId: uuid('todo_id').references(() => todo.id), +// userId: uuid('user_id').references(() => user.id), +// }); + +// const todoToGroupRelations = relations(todoUser, (ctx) => ({ +// todo: ctx.one(todo, { +// fields: [todoUser.todoId], +// references: [todo.id], +// }), +// user: ctx.one(user, { +// fields: [todoUser.userId], +// references: [user.id], +// }), +// })); + +// const schema = { +// todo, +// todoRelations, +// user, +// userRelations, +// todoUser, +// todoToGroupRelations, +// }; + +// let db: AwsDataApiPgDatabase; + +// beforeAll(async () => { +// const rdsClient = new RDSDataClient(); + +// db = drizzle({ +// client: rdsClient, +// // @ts-ignore +// database: Resource.Postgres.database, +// // @ts-ignore +// secretArn: Resource.Postgres.secretArn, +// // @ts-ignore +// resourceArn: Resource.Postgres.clusterArn, +// logger: ENABLE_LOGGING, +// schema, +// relations: relationsV2, +// }); +// }); + +// beforeEach(async () => { +// await db.execute(sql`drop schema public cascade`); +// await db.execute(sql`create schema public`); +// await db.execute( +// sql` +// create table users ( +// id serial primary key, +// name text not null, +// verified boolean not null default false, +// jsonb jsonb, +// best_texts text[] not null default '{}', +// created_at timestamptz not null default now() +// ) +// `, +// ); + +// await db.execute( +// sql` +// create table todo ( +// id uuid primary key, +// title text not null, +// description text +// ) +// `, +// ); + +// await db.execute( +// sql` +// create table "user" ( +// id uuid primary key, +// email text not null +// ) + +// `, +// ); + +// await db.execute( +// sql` +// create table todo_user ( +// todo_id uuid references todo(id), +// user_id uuid references "user"(id) +// ) +// `, +// ); +// }); + +// test('select all fields', async () => { +// const insertResult = await db.insert(usersTable).values({ name: 'John' }); + +// expect(insertResult.numberOfRecordsUpdated).toBe(1); + +// const result = await db.select().from(usersTable); + +// expect(result[0]!.createdAt).toBeInstanceOf(Date); +// // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); +// expect(result).toEqual([ +// { +// bestTexts: [], +// id: 1, +// name: 'John', +// verified: false, +// jsonb: null, +// createdAt: result[0]!.createdAt, +// }, +// ]); +// }); + +// test('select sql', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .select({ +// name: sql`upper(${usersTable.name})`, +// }) +// .from(usersTable); + +// expect(users).toEqual([{ name: 'JOHN' }]); +// }); + +// test('select with empty array in inArray', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +// const users = await db +// .select({ +// name: sql`upper(${usersTable.name})`, +// }) +// .from(usersTable) +// .where(inArray(usersTable.id, [])); + +// expect(users).toEqual([]); +// }); + +// test('select with empty array in notInArray', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); +// const result = await db +// .select({ +// name: sql`upper(${usersTable.name})`, +// }) +// .from(usersTable) +// .where(notInArray(usersTable.id, [])); + +// expect(result).toEqual([ +// { name: 'JOHN' }, +// { name: 'JANE' }, +// { name: 'JANE' }, +// ]); +// }); + +// test('select typed sql', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .select({ +// name: sql`upper(${usersTable.name})`, +// }) +// .from(usersTable); + +// expect(users).toEqual([{ name: 'JOHN' }]); +// }); + +// test('select distinct', async () => { +// const usersDistinctTable = pgTable('users_distinct', { +// id: integer('id').notNull(), +// name: text('name').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${usersDistinctTable}`); +// await db.execute( +// sql`create table ${usersDistinctTable} (id integer, name text)`, +// ); + +// await db.insert(usersDistinctTable).values([ +// { id: 1, name: 'John' }, +// { id: 1, name: 'John' }, +// { id: 2, name: 'John' }, +// { id: 1, name: 'Jane' }, +// ]); +// const users1 = await db +// .selectDistinct() +// .from(usersDistinctTable) +// .orderBy(usersDistinctTable.id, usersDistinctTable.name); +// const users2 = await db +// .selectDistinctOn([usersDistinctTable.id]) +// .from(usersDistinctTable) +// .orderBy(usersDistinctTable.id); +// const users3 = await db +// .selectDistinctOn([usersDistinctTable.name], { +// name: usersDistinctTable.name, +// }) +// .from(usersDistinctTable) +// .orderBy(usersDistinctTable.name); + +// await db.execute(sql`drop table ${usersDistinctTable}`); + +// expect(users1).toEqual([ +// { id: 1, name: 'Jane' }, +// { id: 1, name: 'John' }, +// { id: 2, name: 'John' }, +// ]); + +// expect(users2.length).toEqual(2); +// expect(users2[0]?.id).toEqual(1); +// expect(users2[1]?.id).toEqual(2); + +// expect(users3.length).toEqual(2); +// expect(users3[0]?.name).toEqual('Jane'); +// expect(users3[1]?.name).toEqual('John'); +// }); + +// test('insert returning sql', async () => { +// const users = await db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning({ +// name: sql`upper(${usersTable.name})`, +// }); + +// expect(users).toEqual([{ name: 'JOHN' }]); +// }); + +// test('delete returning sql', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .delete(usersTable) +// .where(eq(usersTable.name, 'John')) +// .returning({ +// name: sql`upper(${usersTable.name})`, +// }); + +// expect(users).toEqual([{ name: 'JOHN' }]); +// }); + +// test('update returning sql', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .update(usersTable) +// .set({ name: 'Jane' }) +// .where(eq(usersTable.name, 'John')) +// .returning({ +// name: sql`upper(${usersTable.name})`, +// }); + +// expect(users).toEqual([{ name: 'JANE' }]); +// }); + +// test('update with returning all fields', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .update(usersTable) +// .set({ name: 'Jane' }) +// .where(eq(usersTable.name, 'John')) +// .returning(); + +// expect(users[0]!.createdAt).toBeInstanceOf(Date); +// // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); +// expect(users).toEqual([ +// { +// id: 1, +// bestTexts: [], +// name: 'Jane', +// verified: false, +// jsonb: null, +// createdAt: users[0]!.createdAt, +// }, +// ]); +// }); + +// test('update with returning partial', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .update(usersTable) +// .set({ name: 'Jane' }) +// .where(eq(usersTable.name, 'John')) +// .returning({ +// id: usersTable.id, +// name: usersTable.name, +// }); + +// expect(users).toEqual([{ id: 1, name: 'Jane' }]); +// }); + +// test('delete with returning all fields', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .delete(usersTable) +// .where(eq(usersTable.name, 'John')) +// .returning(); + +// expect(users[0]!.createdAt).toBeInstanceOf(Date); +// // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); +// expect(users).toEqual([ +// { +// bestTexts: [], +// id: 1, +// name: 'John', +// verified: false, +// jsonb: null, +// createdAt: users[0]!.createdAt, +// }, +// ]); +// }); + +// test('delete with returning partial', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const users = await db +// .delete(usersTable) +// .where(eq(usersTable.name, 'John')) +// .returning({ +// id: usersTable.id, +// name: usersTable.name, +// }); + +// expect(users).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert + select', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const result = await db.select().from(usersTable); +// expect(result).toEqual([ +// { +// bestTexts: [], +// id: 1, +// name: 'John', +// verified: false, +// jsonb: null, +// createdAt: result[0]!.createdAt, +// }, +// ]); + +// await db.insert(usersTable).values({ name: 'Jane' }); +// const result2 = await db.select().from(usersTable); +// expect(result2).toEqual([ +// { +// bestTexts: [], +// id: 1, +// name: 'John', +// verified: false, +// jsonb: null, +// createdAt: result2[0]!.createdAt, +// }, +// { +// bestTexts: [], +// id: 2, +// name: 'Jane', +// verified: false, +// jsonb: null, +// createdAt: result2[1]!.createdAt, +// }, +// ]); +// }); + +// test('json insert', async () => { +// await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); +// const result = await db +// .select({ +// id: usersTable.id, +// name: usersTable.name, +// jsonb: usersTable.jsonb, +// }) +// .from(usersTable); + +// expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +// }); + +// test('insert with overridden default values', async () => { +// await db.insert(usersTable).values({ name: 'John', verified: true }); +// const result = await db.select().from(usersTable); + +// expect(result).toEqual([ +// { +// bestTexts: [], +// id: 1, +// name: 'John', +// verified: true, +// jsonb: null, +// createdAt: result[0]!.createdAt, +// }, +// ]); +// }); + +// test('insert many', async () => { +// await db +// .insert(usersTable) +// .values([ +// { name: 'John' }, +// { name: 'Bruce', jsonb: ['foo', 'bar'] }, +// { name: 'Jane' }, +// { name: 'Austin', verified: true }, +// ]); +// const result = await db +// .select({ +// id: usersTable.id, +// name: usersTable.name, +// jsonb: usersTable.jsonb, +// verified: usersTable.verified, +// }) +// .from(usersTable); + +// expect(result).toEqual([ +// { id: 1, name: 'John', jsonb: null, verified: false }, +// { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, +// { id: 3, name: 'Jane', jsonb: null, verified: false }, +// { id: 4, name: 'Austin', jsonb: null, verified: true }, +// ]); +// }); + +// test('insert many with returning', async () => { +// const result = await db +// .insert(usersTable) +// .values([ +// { name: 'John' }, +// { name: 'Bruce', jsonb: ['foo', 'bar'] }, +// { name: 'Jane' }, +// { name: 'Austin', verified: true }, +// ]) +// .returning({ +// id: usersTable.id, +// name: usersTable.name, +// jsonb: usersTable.jsonb, +// verified: usersTable.verified, +// }); + +// expect(result).toEqual([ +// { id: 1, name: 'John', jsonb: null, verified: false }, +// { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, +// { id: 3, name: 'Jane', jsonb: null, verified: false }, +// { id: 4, name: 'Austin', jsonb: null, verified: true }, +// ]); +// }); + +// test('select with group by as field', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + +// const result = await db +// .select({ name: usersTable.name }) +// .from(usersTable) +// .groupBy(usersTable.name); + +// expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +// }); + +// test('select with group by as sql', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + +// const result = await db +// .select({ name: usersTable.name }) +// .from(usersTable) +// .groupBy(sql`${usersTable.name}`); + +// expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +// }); + +// test('select with group by as sql + column', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + +// const result = await db +// .select({ name: usersTable.name }) +// .from(usersTable) +// .groupBy(sql`${usersTable.name}`, usersTable.id); + +// expect(result).toEqual([ +// { name: 'Jane' }, +// { name: 'Jane' }, +// { name: 'John' }, +// ]); +// }); + +// test('select with group by as column + sql', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + +// const result = await db +// .select({ name: usersTable.name }) +// .from(usersTable) +// .groupBy(usersTable.id, sql`${usersTable.name}`); + +// expect(result).toEqual([ +// { name: 'Jane' }, +// { name: 'Jane' }, +// { name: 'John' }, +// ]); +// }); + +// test('select with group by complex query', async () => { +// await db +// .insert(usersTable) +// .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + +// const result = await db +// .select({ name: usersTable.name }) +// .from(usersTable) +// .groupBy(usersTable.id, sql`${usersTable.name}`) +// .orderBy(asc(usersTable.name)) +// .limit(1); + +// expect(result).toEqual([{ name: 'Jane' }]); +// }); + +// test('build query', async () => { +// const query = db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable) +// .groupBy(usersTable.id, usersTable.name) +// .toSQL(); + +// expect(query).toEqual({ +// sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', +// params: [], +// // typings: [] +// }); +// }); + +// test('insert sql', async () => { +// await db.insert(usersTable).values({ name: sql`${'John'}` }); +// const result = await db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable); +// expect(result).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('partial join with alias', async () => { +// const customerAlias = alias(usersTable, 'customer'); + +// await db.insert(usersTable).values([ +// { id: 10, name: 'Ivan' }, +// { id: 11, name: 'Hans' }, +// ]); +// const result = await db +// .select({ +// user: { +// id: usersTable.id, +// name: usersTable.name, +// }, +// customer: { +// id: customerAlias.id, +// name: customerAlias.name, +// }, +// }) +// .from(usersTable) +// .leftJoin(customerAlias, eq(customerAlias.id, 11)) +// .where(eq(usersTable.id, 10)); + +// expect(result).toEqual([ +// { +// user: { id: 10, name: 'Ivan' }, +// customer: { id: 11, name: 'Hans' }, +// }, +// ]); +// }); + +// test('full join with alias', async () => { +// const customerAlias = alias(usersTable, 'customer'); + +// await db.insert(usersTable).values([ +// { id: 10, name: 'Ivan' }, +// { id: 11, name: 'Hans' }, +// ]); + +// const result = await db +// .select() +// .from(usersTable) +// .leftJoin(customerAlias, eq(customerAlias.id, 11)) +// .where(eq(usersTable.id, 10)); + +// expect(result).toEqual([ +// { +// users: { +// id: 10, +// bestTexts: [], +// name: 'Ivan', +// verified: false, +// jsonb: null, +// createdAt: result[0]!.users.createdAt, +// }, +// customer: { +// bestTexts: [], +// id: 11, +// name: 'Hans', +// verified: false, +// jsonb: null, +// createdAt: result[0]!.customer!.createdAt, +// }, +// }, +// ]); +// }); + +// test('select from alias', async () => { +// const pgTable = pgTableCreator((name) => `prefixed_${name}`); + +// const users = pgTable('users', { +// id: serial('id').primaryKey(), +// name: text('name').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${users}`); +// await db.execute( +// sql`create table ${users} (id serial primary key, name text not null)`, +// ); + +// const user = alias(users, 'user'); +// const customers = alias(users, 'customer'); + +// await db.insert(users).values([ +// { id: 10, name: 'Ivan' }, +// { id: 11, name: 'Hans' }, +// ]); +// const result = await db +// .select() +// .from(user) +// .leftJoin(customers, eq(customers.id, 11)) +// .where(eq(user.id, 10)); + +// expect(result).toEqual([ +// { +// user: { +// id: 10, +// name: 'Ivan', +// }, +// customer: { +// id: 11, +// name: 'Hans', +// }, +// }, +// ]); + +// await db.execute(sql`drop table ${users}`); +// }); + +// test('insert with spaces', async () => { +// await db.insert(usersTable).values({ name: sql`'Jo h n'` }); +// const result = await db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable); + +// expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +// }); + +// test('prepared statement', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const statement = db +// .select({ +// id: usersTable.id, +// name: usersTable.name, +// }) +// .from(usersTable) +// .prepare('statement1'); +// const result = await statement.execute(); + +// expect(result).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('prepared statement reuse', async () => { +// const stmt = db +// .insert(usersTable) +// .values({ +// verified: true, +// name: sql.placeholder('name'), +// }) +// .prepare('stmt2'); + +// for (let i = 0; i < 10; i++) { +// await stmt.execute({ name: `John ${i}` }); +// } + +// const result = await db +// .select({ +// id: usersTable.id, +// name: usersTable.name, +// verified: usersTable.verified, +// }) +// .from(usersTable); + +// expect(result).toEqual([ +// { id: 1, name: 'John 0', verified: true }, +// { id: 2, name: 'John 1', verified: true }, +// { id: 3, name: 'John 2', verified: true }, +// { id: 4, name: 'John 3', verified: true }, +// { id: 5, name: 'John 4', verified: true }, +// { id: 6, name: 'John 5', verified: true }, +// { id: 7, name: 'John 6', verified: true }, +// { id: 8, name: 'John 7', verified: true }, +// { id: 9, name: 'John 8', verified: true }, +// { id: 10, name: 'John 9', verified: true }, +// ]); +// }); + +// test('prepared statement with placeholder in .where', async () => { +// await db.insert(usersTable).values({ name: 'John' }); +// const stmt = db +// .select({ +// id: usersTable.id, +// name: usersTable.name, +// }) +// .from(usersTable) +// .where(eq(usersTable.id, sql.placeholder('id'))) +// .prepare('stmt3'); +// const result = await stmt.execute({ id: 1 }); + +// expect(result).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('migrator : default migration strategy', async () => { +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { migrationsFolder: './drizzle2/pg' }); + +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + +// const result = await db.select().from(usersMigratorTable); + +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +// }); + +// test('migrator : migrate with custom schema', async () => { +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { +// migrationsFolder: './drizzle2/pg', +// migrationsSchema: 'custom_migrations', +// }); + +// // test if the custom migrations table was created +// const { rows } = await db.execute( +// sql`select * from custom_migrations."__drizzle_migrations";`, +// ); +// expect(rows).toBeTruthy(); +// expect(rows!.length).toBeGreaterThan(0); + +// // test if the migrated table are working as expected +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); +// const result = await db.select().from(usersMigratorTable); +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute( +// sql`drop table custom_migrations."__drizzle_migrations"`, +// ); +// }); + +// test('migrator : migrate with custom table', async () => { +// const customTable = randomString(); +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { +// migrationsFolder: './drizzle2/pg', +// migrationsTable: customTable, +// }); + +// // test if the custom migrations table was created +// const { rows } = await db.execute( +// sql`select * from "drizzle".${sql.identifier(customTable)};`, +// ); +// expect(rows).toBeTruthy(); +// expect(rows!.length).toBeGreaterThan(0); + +// // test if the migrated table are working as expected +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); +// const result = await db.select().from(usersMigratorTable); +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +// }); + +// test('migrator : migrate with custom table and custom schema', async () => { +// const customTable = randomString(); +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { +// migrationsFolder: './drizzle2/pg', +// migrationsTable: customTable, +// migrationsSchema: 'custom_migrations', +// }); + +// // test if the custom migrations table was created +// const { rows } = await db.execute( +// sql`select * from custom_migrations.${ +// sql.identifier( +// customTable, +// ) +// };`, +// ); +// expect(rows).toBeTruthy(); +// expect(rows!.length).toBeGreaterThan(0); + +// // test if the migrated table are working as expected +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); +// const result = await db.select().from(usersMigratorTable); +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute( +// sql`drop table custom_migrations.${ +// sql.identifier( +// customTable, +// ) +// }`, +// ); +// }); + +// test('insert via db.execute + select via db.execute', async () => { +// await db.execute( +// sql`insert into ${usersTable} (${ +// sql.identifier( +// usersTable.name.name, +// ) +// }) values (${'John'})`, +// ); + +// const result = await db.execute<{ id: number; name: string }>( +// sql`select id, name from "users"`, +// ); +// expectTypeOf(result.rows).toEqualTypeOf<{ id: number; name: string }[]>(); +// expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert via db.execute + returning', async () => { +// const inserted = await db.execute( +// sql`insert into ${usersTable} (${ +// sql.identifier( +// usersTable.name.name, +// ) +// }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, +// ); +// expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert via db.execute w/ query builder', async () => { +// const inserted = await db.execute( +// db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning({ id: usersTable.id, name: usersTable.name }), +// ); +// expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('build query insert with onConflict do update', async () => { +// const query = db +// .insert(usersTable) +// .values({ name: 'John', jsonb: ['foo', 'bar'] }) +// .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) +// .toSQL(); + +// expect(query).toEqual({ +// sql: +// 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do update set "name" = :3', +// params: ['John', '["foo","bar"]', 'John1'], +// // typings: ['none', 'json', 'none'] +// }); +// }); + +// test('build query insert with onConflict do update / multiple columns', async () => { +// const query = db +// .insert(usersTable) +// .values({ name: 'John', jsonb: ['foo', 'bar'] }) +// .onConflictDoUpdate({ +// target: [usersTable.id, usersTable.name], +// set: { name: 'John1' }, +// }) +// .toSQL(); + +// expect(query).toEqual({ +// sql: +// 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id","name") do update set "name" = :3', +// params: ['John', '["foo","bar"]', 'John1'], +// // typings: ['none', 'json', 'none'] +// }); +// }); + +// test('build query insert with onConflict do nothing', async () => { +// const query = db +// .insert(usersTable) +// .values({ name: 'John', jsonb: ['foo', 'bar'] }) +// .onConflictDoNothing() +// .toSQL(); + +// expect(query).toEqual({ +// sql: +// 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict do nothing', +// params: ['John', '["foo","bar"]'], +// // typings: ['none', 'json'] +// }); +// }); + +// test('build query insert with onConflict do nothing + target', async () => { +// const query = db +// .insert(usersTable) +// .values({ name: 'John', jsonb: ['foo', 'bar'] }) +// .onConflictDoNothing({ target: usersTable.id }) +// .toSQL(); + +// expect(query).toEqual({ +// sql: +// 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do nothing', +// params: ['John', '["foo","bar"]'], +// // typings: ['none', 'json'] +// }); +// }); + +// test('insert with onConflict do update', async () => { +// await db.insert(usersTable).values({ name: 'John' }); + +// await db +// .insert(usersTable) +// .values({ id: 1, name: 'John' }) +// .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); + +// const res = await db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable) +// .where(eq(usersTable.id, 1)); + +// expect(res).toEqual([{ id: 1, name: 'John1' }]); +// }); + +// test('insert with onConflict do nothing', async () => { +// await db.insert(usersTable).values({ name: 'John' }); + +// await db +// .insert(usersTable) +// .values({ id: 1, name: 'John' }) +// .onConflictDoNothing(); + +// const res = await db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable) +// .where(eq(usersTable.id, 1)); + +// expect(res).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert with onConflict do nothing + target', async () => { +// await db.insert(usersTable).values({ name: 'John' }); + +// await db +// .insert(usersTable) +// .values({ id: 1, name: 'John' }) +// .onConflictDoNothing({ target: usersTable.id }); + +// const res = await db +// .select({ id: usersTable.id, name: usersTable.name }) +// .from(usersTable) +// .where(eq(usersTable.id, 1)); + +// expect(res).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('transaction', async () => { +// const users = pgTable('users_transactions', { +// id: serial('id').primaryKey(), +// balance: integer('balance').notNull(), +// }); +// const products = pgTable('products_transactions', { +// id: serial('id').primaryKey(), +// price: integer('price').notNull(), +// stock: integer('stock').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${users}`); +// await db.execute(sql`drop table if exists ${products}`); + +// await db.execute( +// sql`create table users_transactions (id serial not null primary key, balance integer not null)`, +// ); +// await db.execute( +// sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, +// ); + +// const user = await db +// .insert(users) +// .values({ balance: 100 }) +// .returning() +// .then((rows) => rows[0]!); +// const product = await db +// .insert(products) +// .values({ price: 10, stock: 10 }) +// .returning() +// .then((rows) => rows[0]!); + +// await db.transaction(async (tx) => { +// await tx +// .update(users) +// .set({ balance: user.balance - product.price }) +// .where(eq(users.id, user.id)); +// await tx +// .update(products) +// .set({ stock: product.stock - 1 }) +// .where(eq(products.id, product.id)); +// }); + +// const result = await db.select().from(users); + +// expect(result).toEqual([{ id: 1, balance: 90 }]); + +// await db.execute(sql`drop table ${users}`); +// await db.execute(sql`drop table ${products}`); +// }); + +// test('transaction rollback', async () => { +// const users = pgTable('users_transactions_rollback', { +// id: serial('id').primaryKey(), +// balance: integer('balance').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${users}`); + +// await db.execute( +// sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, +// ); + +// await expect( +// db.transaction(async (tx) => { +// await tx.insert(users).values({ balance: 100 }); +// tx.rollback(); +// }), +// ).rejects.toThrowError(TransactionRollbackError); + +// const result = await db.select().from(users); + +// expect(result).toEqual([]); + +// await db.execute(sql`drop table ${users}`); +// }); + +// test('nested transaction', async () => { +// const users = pgTable('users_nested_transactions', { +// id: serial('id').primaryKey(), +// balance: integer('balance').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${users}`); + +// await db.execute( +// sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, +// ); + +// await db.transaction(async (tx) => { +// await tx.insert(users).values({ balance: 100 }); + +// await tx.transaction(async (tx) => { +// await tx.update(users).set({ balance: 200 }); +// }); +// }); + +// const result = await db.select().from(users); + +// expect(result).toEqual([{ id: 1, balance: 200 }]); + +// await db.execute(sql`drop table ${users}`); +// }); + +// test('nested transaction rollback', async () => { +// const users = pgTable('users_nested_transactions_rollback', { +// id: serial('id').primaryKey(), +// balance: integer('balance').notNull(), +// }); + +// await db.execute(sql`drop table if exists ${users}`); + +// await db.execute( +// sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, +// ); + +// await db.transaction(async (tx) => { +// await tx.insert(users).values({ balance: 100 }); + +// await expect( +// tx.transaction(async (tx2) => { +// await tx2.update(users).set({ balance: 200 }); +// tx2.rollback(); +// }), +// ).rejects.toThrowError(TransactionRollbackError); +// }); + +// const result = await db.select().from(users); + +// expect(result).toEqual([{ id: 1, balance: 100 }]); + +// await db.execute(sql`drop table ${users}`); +// }); + +// test('select from raw sql', async () => { +// const result = await db.execute(sql`select 1 as id, 'John' as name`); + +// expect(result.rows).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('select from raw sql with mapped values', async () => { +// const result = await db +// .select({ +// id: sql`id`, +// name: sql`name`, +// }) +// .from(sql`(select 1 as id, 'John' as name) as users`); + +// expect(result).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert with array values works', async () => { +// const bestTexts = ['text1', 'text2', 'text3']; +// const [insertResult] = await db +// .insert(usersTable) +// .values({ +// name: 'John', +// bestTexts, +// }) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('update with array values works', async () => { +// const [newUser] = await db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning(); + +// const bestTexts = ['text4', 'text5', 'text6']; +// const [insertResult] = await db +// .update(usersTable) +// .set({ +// bestTexts, +// }) +// .where(eq(usersTable.id, newUser!.id)) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('insert with array values works', async () => { +// const bestTexts = ['text1', 'text2', 'text3']; +// const [insertResult] = await db +// .insert(usersTable) +// .values({ +// name: 'John', +// bestTexts, +// }) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('update with array values works', async () => { +// const [newUser] = await db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning(); + +// const bestTexts = ['text4', 'text5', 'text6']; +// const [insertResult] = await db +// .update(usersTable) +// .set({ +// bestTexts, +// }) +// .where(eq(usersTable.id, newUser!.id)) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('insert with array values works', async () => { +// const bestTexts = ['text1', 'text2', 'text3']; +// const [insertResult] = await db +// .insert(usersTable) +// .values({ +// name: 'John', +// bestTexts, +// }) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('update with array values works', async () => { +// const [newUser] = await db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning(); + +// const bestTexts = ['text4', 'text5', 'text6']; +// const [insertResult] = await db +// .update(usersTable) +// .set({ +// bestTexts, +// }) +// .where(eq(usersTable.id, newUser!.id)) +// .returning(); + +// expect(insertResult?.bestTexts).toEqual(bestTexts); +// }); + +// test('all date and time columns', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// dateString: date('date_string', { mode: 'string' }).notNull(), +// time: time('time', { precision: 3 }).notNull(), +// datetime: timestamp('datetime').notNull(), +// // datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), +// datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), +// datetimeFullPrecision: timestamp('datetime_full_precision', { +// precision: 6, +// mode: 'string', +// }).notNull(), +// // datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// date_string date not null, +// time time(3) not null, +// datetime timestamp not null, +// -- datetime_wtz timestamp with time zone not null, +// datetime_string timestamp not null, +// datetime_full_precision timestamp(6) not null +// -- datetime_wtz_string timestamp with time zone not null +// ) +// `); + +// const someDatetime = new Date('2022-01-01T00:00:00.123Z'); +// const fullPrecision = '2022-01-01T00:00:00.123456'; +// const someTime = '23:23:12.432'; + +// await db.insert(table).values({ +// dateString: '2022-01-01', +// time: someTime, +// datetime: someDatetime, +// // datetimeWTZ: someDatetime, +// datetimeString: '2022-01-01T00:00:00.123Z', +// datetimeFullPrecision: fullPrecision, +// // datetimeWTZString: '2022-01-01T00:00:00.123Z', +// }); + +// const result = await db.select().from(table); + +// Expect< +// Equal< +// { +// id: number; +// dateString: string; +// time: string; +// datetime: Date; +// // datetimeWTZ: Date; +// datetimeString: string; +// datetimeFullPrecision: string; +// // datetimeWTZString: string; +// }[], +// typeof result +// > +// >; + +// Expect< +// Equal< +// { +// dateString: string; +// time: string; +// datetime: Date; +// // datetimeWTZ: Date; +// datetimeString: string; +// datetimeFullPrecision: string; +// // datetimeWTZString: string; +// id?: number | undefined; +// }, +// typeof table.$inferInsert +// > +// >; + +// expect(result).toEqual([ +// { +// id: 1, +// dateString: '2022-01-01', +// time: someTime, +// datetime: someDatetime, +// // datetimeWTZ: someDatetime, +// datetimeString: '2022-01-01 00:00:00.123', +// datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), +// // datetimeWTZString: '2022-01-01 00:00:00.123+00', +// }, +// ]); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test.skip('all date and time columns with timezone', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { +// mode: 'string', +// withTimezone: true, +// precision: 6, +// }).notNull(), +// timestampAsDate: timestamp('timestamp_date', { +// withTimezone: true, +// precision: 3, +// }).notNull(), +// timestampTimeZones: timestamp('timestamp_date_2', { +// withTimezone: true, +// precision: 3, +// }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) with time zone not null, +// timestamp_date timestamp(3) with time zone not null, +// timestamp_date_2 timestamp(3) with time zone not null +// ) +// `); + +// const timestampString = '2022-01-01 00:00:00.123456-0200'; +// const timestampDate = new Date(); +// const timestampDateWTZ = new Date('2022-01-01 00:00:00.123 +0500'); + +// const timestampString2 = '2022-01-01 00:00:00.123456-0400'; +// const timestampDate2 = new Date(); +// const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); + +// await db.insert(table).values([ +// { +// timestamp: timestampString, +// timestampAsDate: timestampDate, +// timestampTimeZones: timestampDateWTZ, +// }, +// { +// timestamp: timestampString2, +// timestampAsDate: timestampDate2, +// timestampTimeZones: timestampDateWTZ2, +// }, +// ]); + +// const result = await db.select().from(table); +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// timestamp_date: string; +// timestamp_date_2: string; +// }>(sql`select * from ${table}`); + +// // Whatever you put in, you get back when you're using the date mode +// // But when using the string mode, postgres returns a string transformed into UTC +// expect(result).toEqual([ +// { +// id: 1, +// timestamp: '2022-01-01 02:00:00.123456+00', +// timestampAsDate: timestampDate, +// timestampTimeZones: timestampDateWTZ, +// }, +// { +// id: 2, +// timestamp: '2022-01-01 04:00:00.123456+00', +// timestampAsDate: timestampDate2, +// timestampTimeZones: timestampDateWTZ2, +// }, +// ]); + +// expect(result2.rows).toEqual([ +// { +// id: 1, +// timestamp_string: '2022-01-01 02:00:00.123456+00', +// timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', +// timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') +// + '+00', +// }, +// { +// id: 2, +// timestamp_string: '2022-01-01 04:00:00.123456+00', +// timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', +// timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') +// + '+00', +// }, +// ]); + +// expect(result[0]?.timestampTimeZones.getTime()).toEqual( +// new Date((result2.rows?.[0]?.timestamp_date_2) as any).getTime(), +// ); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test('all date and time columns without timezone', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestampString: timestamp('timestamp_string', { +// mode: 'string', +// precision: 6, +// }).notNull(), +// timestampString2: timestamp('timestamp_string2', { +// precision: 3, +// mode: 'string', +// }).notNull(), +// timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) not null, +// timestamp_string2 timestamp(3) not null, +// timestamp_date timestamp(3) not null +// ) +// `); + +// const timestampString = '2022-01-01 00:00:00.123456'; +// // const timestampString2 = '2022-01-02 00:00:00.123 -0300'; +// const timestampString2 = '2022-01-02 00:00:00.123'; +// const timestampDate = new Date('2022-01-01 00:00:00.123Z'); + +// const timestampString_2 = '2022-01-01 00:00:00.123456'; +// // const timestampString2_2 = '2022-01-01 00:00:00.123 -0300'; +// const timestampString2_2 = '2022-01-01 00:00:00.123'; +// // const timestampDate2 = new Date('2022-01-01 00:00:00.123 +0200'); +// const timestampDate2 = new Date('2022-01-01 00:00:00.123'); + +// await db.insert(table).values([ +// { timestampString, timestampString2, timestampDate }, +// { +// timestampString: timestampString_2, +// timestampString2: timestampString2_2, +// timestampDate: timestampDate2, +// }, +// ]); + +// const result = await db.select().from(table); +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// timestamp_string2: string; +// timestamp_date: string; +// }>(sql`select * from ${table}`); + +// // Whatever you put in, you get back when you're using the date mode +// // But when using the string mode, postgres returns a string transformed into UTC +// expect(result).toEqual([ +// { +// id: 1, +// timestampString: timestampString, +// timestampString2: '2022-01-02 00:00:00.123', +// timestampDate: timestampDate, +// }, +// { +// id: 2, +// timestampString: timestampString_2, +// timestampString2: '2022-01-01 00:00:00.123', +// timestampDate: timestampDate2, +// }, +// ]); + +// expect(result2.rows).toEqual([ +// { +// id: 1, +// timestamp_string: timestampString, +// timestamp_string2: '2022-01-02 00:00:00.123', +// timestamp_date: timestampDate +// .toISOString() +// .replace('T', ' ') +// .replace('Z', ''), +// }, +// { +// id: 2, +// timestamp_string: timestampString_2, +// timestamp_string2: '2022-01-01 00:00:00.123', +// timestamp_date: timestampDate2 +// .toISOString() +// .replace('T', ' ') +// .replace('Z', ''), +// }, +// ]); + +// expect(result2.rows?.[0]?.timestamp_string).toEqual( +// '2022-01-01 00:00:00.123456', +// ); +// // need to add the 'Z', otherwise javascript assumes it's in local time +// expect( +// new Date((result2.rows?.[0]?.timestamp_date + 'Z') as any).getTime(), +// ).toEqual(timestampDate.getTime()); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test('Typehints mix for RQB', async () => { +// const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; + +// const res = await db._query.user.findMany({ +// where: eq(user.id, uuid), +// with: { +// todos: { +// with: { +// todo: true, +// }, +// }, +// }, +// }); + +// expect(res).toStrictEqual([]); +// }); + +// test('Typehints mix for findFirst', async () => { +// const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; + +// await db.insert(user).values({ id: uuid, email: 'd' }); + +// const res = await db._query.user.findFirst({ +// where: eq(user.id, uuid), +// }); + +// expect(res).toStrictEqual({ id: 'd997d46d-5769-4c78-9a35-93acadbe6076', email: 'd' }); +// }); + +// test('RQB v2 simple find first - no rows', async () => { +// try { +// await init(db); + +// const result = await db.query.rqbUser.findFirst(); + +// expect(result).toStrictEqual(undefined); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find first - multiple rows', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// const result = await db.query.rqbUser.findFirst({ +// orderBy: { +// id: 'desc', +// }, +// }); + +// expect(result).toStrictEqual({ +// id: 2, +// createdAt: date, +// name: 'Second', +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find first - with relation', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.insert(rqbPost).values([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }]); + +// const result = await db.query.rqbUser.findFirst({ +// with: { +// posts: { +// orderBy: { +// id: 'asc', +// }, +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }); + +// expect(result).toStrictEqual({ +// id: 1, +// createdAt: date, +// name: 'First', +// posts: [{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }], +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find first - placeholders', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// const query = db.query.rqbUser.findFirst({ +// where: { +// id: { +// eq: sql.placeholder('filter'), +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }).prepare('rqb_v2_find_first_placeholders'); + +// const result = await query.execute({ +// filter: 2, +// }); + +// expect(result).toStrictEqual({ +// id: 2, +// createdAt: date, +// name: 'Second', +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find many - no rows', async () => { +// try { +// await init(db); + +// const result = await db.query.rqbUser.findMany(); + +// expect(result).toStrictEqual([]); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find many - multiple rows', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// const result = await db.query.rqbUser.findMany({ +// orderBy: { +// id: 'desc', +// }, +// }); + +// expect(result).toStrictEqual([{ +// id: 2, +// createdAt: date, +// name: 'Second', +// }, { +// id: 1, +// createdAt: date, +// name: 'First', +// }]); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find many - with relation', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.insert(rqbPost).values([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }]); + +// const result = await db.query.rqbPost.findMany({ +// with: { +// author: true, +// }, +// orderBy: { +// id: 'asc', +// }, +// }); + +// expect(result).toStrictEqual([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// author: { +// id: 1, +// createdAt: date, +// name: 'First', +// }, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// author: { +// id: 1, +// createdAt: date, +// name: 'First', +// }, +// }]); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 simple find many - placeholders', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// const query = db.query.rqbUser.findMany({ +// where: { +// id: { +// eq: sql.placeholder('filter'), +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }).prepare('rqb_v2_find_many_placeholders'); + +// const result = await query.execute({ +// filter: 2, +// }); + +// expect(result).toStrictEqual([{ +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find first - no rows', async () => { +// try { +// await init(db); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbUser.findFirst(); + +// expect(result).toStrictEqual(undefined); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find first - multiple rows', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbUser.findFirst({ +// orderBy: { +// id: 'desc', +// }, +// }); + +// expect(result).toStrictEqual({ +// id: 2, +// createdAt: date, +// name: 'Second', +// }); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find first - with relation', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.insert(rqbPost).values([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }]); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbUser.findFirst({ +// with: { +// posts: { +// orderBy: { +// id: 'asc', +// }, +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }); + +// expect(result).toStrictEqual({ +// id: 1, +// createdAt: date, +// name: 'First', +// posts: [{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }], +// }); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find first - placeholders', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.transaction(async (db) => { +// const query = db.query.rqbUser.findFirst({ +// where: { +// id: { +// eq: sql.placeholder('filter'), +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }).prepare('rqb_v2_find_first_tx_placeholders'); + +// const result = await query.execute({ +// filter: 2, +// }); + +// expect(result).toStrictEqual({ +// id: 2, +// createdAt: date, +// name: 'Second', +// }); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find many - no rows', async () => { +// try { +// await init(db); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbUser.findMany(); + +// expect(result).toStrictEqual([]); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find many - multiple rows', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbUser.findMany({ +// orderBy: { +// id: 'desc', +// }, +// }); + +// expect(result).toStrictEqual([{ +// id: 2, +// createdAt: date, +// name: 'Second', +// }, { +// id: 1, +// createdAt: date, +// name: 'First', +// }]); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find many - with relation', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.insert(rqbPost).values([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// }]); + +// await db.transaction(async (db) => { +// const result = await db.query.rqbPost.findMany({ +// with: { +// author: true, +// }, +// orderBy: { +// id: 'asc', +// }, +// }); + +// expect(result).toStrictEqual([{ +// id: 1, +// userId: 1, +// createdAt: date, +// content: null, +// author: { +// id: 1, +// createdAt: date, +// name: 'First', +// }, +// }, { +// id: 2, +// userId: 1, +// createdAt: date, +// content: 'Has message this time', +// author: { +// id: 1, +// createdAt: date, +// name: 'First', +// }, +// }]); +// }); +// } finally { +// await clear(db); +// } +// }); + +// test('RQB v2 transaction find many - placeholders', async () => { +// try { +// await init(db); + +// const date = new Date(120000); + +// await db.insert(rqbUser).values([{ +// id: 1, +// createdAt: date, +// name: 'First', +// }, { +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); + +// await db.transaction(async (db) => { +// const query = db.query.rqbUser.findMany({ +// where: { +// id: { +// eq: sql.placeholder('filter'), +// }, +// }, +// orderBy: { +// id: 'asc', +// }, +// }).prepare('rqb_v2_find_many_placeholders'); + +// const result = await query.execute({ +// filter: 2, +// }); + +// expect(result).toStrictEqual([{ +// id: 2, +// createdAt: date, +// name: 'Second', +// }]); +// }); +// } finally { +// await clear(db); +// } +// }); + +// afterAll(async () => { +// await db.execute(sql`drop table if exists "users"`); +// await db.execute(sql`drop table if exists "todo_user"`); +// await db.execute(sql`drop table if exists "user"`); +// await db.execute(sql`drop table if exists "todo"`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); +// }); diff --git a/integration-tests/tests/pg/common-cache.ts b/integration-tests/tests/pg/common-cache.ts new file mode 100644 index 0000000000..b045ea9715 --- /dev/null +++ b/integration-tests/tests/pg/common-cache.ts @@ -0,0 +1,300 @@ +import { eq, sql } from 'drizzle-orm'; +import { alias } from 'drizzle-orm/pg-core'; +import { describe, expect, vi } from 'vitest'; +import type { Test } from './instrumentation'; +import { postsTable, usersTable } from './schema'; + +export function tests(test: Test) { + describe('caches', () => { + test.beforeEach(async ({ caches }) => { + const { all, explicit } = caches; + + await explicit.execute(sql`drop schema if exists public cascade`); + await explicit.$cache?.invalidate({ tables: 'users' }); + await all.$cache?.invalidate({ tables: 'users' }); + await explicit.execute(sql`create schema public`); + // public users + await explicit.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + }); + + test('test force invalidate', async ({ caches }) => { + const { explicit: db } = caches; + + using spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); + await db.$cache?.invalidate({ tables: 'users' }); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test('default global config - no cache should be hit', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test('default global config + enable cache on select: get, put', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test('default global config + enable cache on select + write: get, put, onMutate', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + spyPut.mockClear(); + spyGet.mockClear(); + spyInvalidate.mockClear(); + + await db.insert(usersTable).values({ name: 'John' }); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test('default global config + enable cache on select + disable invalidate: get, put', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + await db.insert(usersTable).values({ name: 'John' }); + + // invalidate force + await db.$cache?.invalidate({ tags: ['custom'] }); + }); + + test('global: true + disable cache', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(false); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test('global: true - cache should be hit', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test('global: true - cache: false on select - no cache hit', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(false); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test('global: true - disable invalidate - cache hit + no invalidate', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ autoInvalidate: false }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + spyPut.mockClear(); + spyGet.mockClear(); + spyInvalidate.mockClear(); + + await db.insert(usersTable).values({ name: 'John' }); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test('global: true - with custom tag', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + await db.insert(usersTable).values({ name: 'John' }); + + // invalidate force + await db.$cache?.invalidate({ tags: ['custom'] }); + }); + + test('global: true - with custom tag + with autoinvalidate', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ tag: 'custom' }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + + await db.insert(usersTable).values({ name: 'John' }); + + expect(spyInvalidate).toHaveBeenCalledTimes(1); + + // invalidate force + await db.$cache?.invalidate({ tags: ['custom'] }); + }); + + // check select used tables + test('check simple select used tables', ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); + // @ts-expect-error + expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); + }); + // check select+join used tables + test('select+join', ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) + .toStrictEqual(['users', 'posts']); + expect( + // @ts-expect-error + db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), + ).toStrictEqual(['users', 'posts']); + }); + // check select+2join used tables + test('select+2joins', ({ caches }) => { + const { explicit: db } = caches; + + expect( + db.select().from(usersTable).leftJoin( + postsTable, + eq(usersTable.id, postsTable.userId), + ).leftJoin( + alias(postsTable, 'post2'), + eq(usersTable.id, postsTable.userId), + ) + // @ts-expect-error + .getUsedTables(), + ) + .toStrictEqual(['users', 'posts']); + expect( + db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( + alias(postsTable, 'post2'), + eq(usersTable.id, postsTable.userId), + // @ts-expect-error + ).getUsedTables(), + ).toStrictEqual(['users', 'posts']); + }); + // select subquery used tables + test('select+join', ({ caches }) => { + const { explicit: db } = caches; + + const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); + db.select().from(sq); + + // @ts-expect-error + expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); + }); + }); +} diff --git a/integration-tests/tests/pg/common-pt1.ts b/integration-tests/tests/pg/common-pt1.ts new file mode 100644 index 0000000000..8d6eca0fa2 --- /dev/null +++ b/integration-tests/tests/pg/common-pt1.ts @@ -0,0 +1,1707 @@ +// eslint-disable-next-line @typescript-eslint/consistent-type-imports +import { and, asc, eq, exists, gt, inArray, lt, notInArray, sql } from 'drizzle-orm'; +import { + alias, + boolean, + char, + cidr, + inet, + integer, + jsonb, + macaddr, + macaddr8, + numeric, + pgTable, + pgTableCreator, + serial, + text, + timestamp, +} from 'drizzle-orm/pg-core'; +import { describe, expect } from 'vitest'; +import type { Test } from './instrumentation'; + +export function tests(test: Test) { + describe('common', () => { + test.concurrent('select all fields', async ({ db, push }) => { + const users = pgTable('users_1', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + const now = Date.now(); + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(5000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('select sql', async ({ db, push }) => { + const users = pgTable('users_2', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const res = await db.select({ name: sql`upper(${users.name})` }).from(users); + + expect(res).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('select typed sql', async ({ db, push }) => { + const users = pgTable('users_3', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + + const usersResult = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(usersResult).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('select with empty array in inArray', async ({ db, push }) => { + const users = pgTable('users_4', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${users.name})`, + }) + .from(users) + .where(inArray(users.id, [])); + + expect(result).toEqual([]); + }); + + test.concurrent('select with empty array in notInArray', async ({ db, push }) => { + const users = pgTable('users_5', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${users.name})`, + }) + .from(users) + .where(notInArray(users.id, [])); + + expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); + }); + + test.concurrent('$default function', async ({ db, push }) => { + const orders = pgTable('orders_1', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), + }); + + await push({ orders }); + + const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) + .returning(); + const selectedOrder = await db.select().from(orders); + + expect(insertedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test.concurrent('select distinct', async ({ db, push }) => { + const usersDistinctTable = pgTable('users_distinct_101', { + id: integer('id').notNull(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await push({ usersDistinctTable }); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.id, usersDistinctTable.age); + + expect(users1).toEqual([ + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + ]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); + + expect(users4).toEqual([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 2, name: 'John', age: 25 }, + ]); + }); + + test.concurrent('insert returning sql', async ({ db, push }) => { + const users = pgTable('users_6', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const usersResult = await db + .insert(users) + .values({ name: 'John' }) + .returning({ + name: sql`upper(${users.name})`, + }); + + expect(usersResult).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('delete returning sql', async ({ db, push }) => { + const users = pgTable('users_7', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db + .delete(users) + .where(eq(users.name, 'John')) + .returning({ + name: sql`upper(${users.name})`, + }); + + expect(usersResult).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('update returning sql', async ({ db, push }) => { + const users = pgTable('users_8', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db + .update(users) + .set({ name: 'Jane' }) + .where(eq(users.name, 'John')) + .returning({ + name: sql`upper(${users.name})`, + }); + + expect(usersResult).toEqual([{ name: 'JANE' }]); + }); + + test.concurrent('update with returning all fields', async ({ db, push }) => { + const users = pgTable('users_9', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + const now = Date.now(); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db + .update(users) + .set({ name: 'Jane' }) + .where(eq(users.name, 'John')) + .returning(); + + expect(usersResult[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(usersResult[0]!.createdAt.getTime() - now)).toBeLessThan(300); + expect(usersResult).toEqual([ + { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: usersResult[0]!.createdAt }, + ]); + }); + + test.concurrent('update with returning partial', async ({ db, push }) => { + const users = pgTable('users_10', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db + .update(users) + .set({ name: 'Jane' }) + .where(eq(users.name, 'John')) + .returning({ + id: users.id, + name: users.name, + }); + + expect(usersResult).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test.concurrent('delete with returning all fields', async ({ db, push }) => { + const users = pgTable('users_11', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + const now = Date.now(); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db.delete(users).where(eq(users.name, 'John')).returning(); + + expect(usersResult[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(usersResult[0]!.createdAt.getTime() - now)).toBeLessThan(300); + expect(usersResult).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: usersResult[0]!.createdAt }, + ]); + }); + + test.concurrent('delete with returning partial', async ({ db, push }) => { + const users = pgTable('users_12', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db.delete(users).where(eq(users.name, 'John')).returning({ + id: users.id, + name: users.name, + }); + + expect(usersResult).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert + select', async ({ db, push }) => { + const users = pgTable('users_13', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + expect(result).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, + ]); + + await db.insert(users).values({ name: 'Jane' }); + const result2 = await db.select().from(users); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test.concurrent('json insert', async ({ db, push }) => { + const users = pgTable('users_14', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + jsonb: jsonb('jsonb').$type(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db + .select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + }) + .from(users); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test.concurrent('char insert', async ({ db, push }) => { + const cities = pgTable('cities_15', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }); + + await push({ cities }); + + await db.insert(cities).values({ name: 'Austin', state: 'TX' }); + const result = await db + .select({ id: cities.id, name: cities.name, state: cities.state }) + .from(cities); + + expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); + }); + + test.concurrent('char update', async ({ db, push }) => { + const cities = pgTable('cities_16', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }); + + await push({ cities }); + + await db.insert(cities).values({ name: 'Austin', state: 'TX' }); + await db.update(cities).set({ name: 'Atlanta', state: 'GA' }).where(eq(cities.id, 1)); + const result = await db + .select({ id: cities.id, name: cities.name, state: cities.state }) + .from(cities); + + expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); + }); + + test.concurrent('char delete', async ({ db, push }) => { + const cities = pgTable('cities_17', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }); + + await push({ cities }); + + await db.insert(cities).values({ name: 'Austin', state: 'TX' }); + await db.delete(cities).where(eq(cities.state, 'TX')); + const result = await db + .select({ id: cities.id, name: cities.name, state: cities.state }) + .from(cities); + + expect(result).toEqual([]); + }); + + test.concurrent('insert with overridden default values', async ({ db, push }) => { + const users = pgTable('users_18', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John', verified: true }); + const result = await db.select().from(users); + + expect(result).toEqual([ + { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, + ]); + }); + + test.concurrent('insert many', async ({ db, push }) => { + const users = pgTable('users_19', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + }); + + await push({ users }); + + await db + .insert(users) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db + .select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + verified: users.verified, + }) + .from(users); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('insert many with returning', async ({ db, push }) => { + const users = pgTable('users_20', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + }); + + await push({ users }); + + const result = await db + .insert(users) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) + .returning({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + verified: users.verified, + }); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('select with group by as field', async ({ db, push }) => { + const users = pgTable('users_121', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: users.name }) + .from(users) + .groupBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with exists', async ({ db, push }) => { + const users = pgTable('users_122', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const user = alias(users, 'user'); + const result = await db.select({ name: users.name }).from(users).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(users.name, 'John'), eq(user.id, users.id))), + ), + ); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test.concurrent('select with group by as sql', async ({ db, push }) => { + const users = pgTable('users_23', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: users.name }) + .from(users) + .groupBy(sql`${users.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with group by as sql + column', async ({ db, push }) => { + const users = pgTable('users_24', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: users.name }) + .from(users) + .groupBy(sql`${users.name}`, users.id) + .orderBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with group by as column + sql', async ({ db, push }) => { + const users = pgTable('users_25', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: users.name }) + .from(users) + .groupBy(users.id, sql`${users.name}`) + .orderBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('select with group by complex query', async ({ db, push }) => { + const users = pgTable('users_26', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: users.name }) + .from(users) + .groupBy(users.id, sql`${users.name}`) + .orderBy(asc(users.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test.concurrent('build query', async ({ db, push }) => { + const users = pgTable('users_27', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const query = db + .select({ id: users.id, name: users.name }) + .from(users) + .groupBy(users.id, users.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "users_27" group by "users_27"."id", "users_27"."name"', + params: [], + }); + }); + + test.concurrent('insert sql', async ({ db, push }) => { + const users = pgTable('users_128', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: sql`${'John'}` }); + const result = await db.select({ id: users.id, name: users.name }).from(users); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('partial join with alias', async ({ db, push }) => { + const users = pgTable('users_29', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const customerAlias = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: users.id, + name: users.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }) + .from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([ + { + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }, + ]); + }); + + test.concurrent('full join with alias', async ({ db, push }) => { + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users_30', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users_30: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + }); + + test.concurrent('select from alias', async ({ db, push }) => { + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users_31', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + }); + + test.concurrent('insert with spaces', async ({ db, push }) => { + const usersTable = pgTable('users_32', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test.concurrent('prepared statement', async ({ db, push }) => { + const usersTable = pgTable('users_33', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + const statement = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .prepare('statement1'); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert: placeholders on columns with encoder', async ({ db, push }) => { + const usersTable = pgTable('users_34', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + jsonb: jsonb('jsonb').$type(), + }); + + await push({ usersTable }); + + const statement = db.insert(usersTable).values({ + name: 'John', + jsonb: sql.placeholder('jsonb'), + }).prepare('encoder_statement'); + + await statement.execute({ jsonb: ['foo', 'bar'] }); + + const result = await db + .select({ + id: usersTable.id, + jsonb: usersTable.jsonb, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, jsonb: ['foo', 'bar'] }, + ]); + }); + + test.concurrent('prepared statement reuse', async ({ db, push }) => { + const usersTable = pgTable('users_35', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + }); + + await push({ usersTable }); + + const stmt = db + .insert(usersTable) + .values({ + verified: true, + name: sql.placeholder('name'), + }) + .prepare('stmt2'); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test.concurrent('prepared statement with placeholder in .where', async ({ db, push }) => { + const usersTable = pgTable('users_36', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare('stmt3'); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('prepared statement with placeholder in .limit', async ({ db, push }) => { + const usersTable = pgTable('users_37', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); + }); + + test.concurrent('prepared statement with placeholder in .offset', async ({ db, push }) => { + const usersTable = pgTable('users_38', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .offset(sql.placeholder('offset')) + .prepare('stmt_offset'); + + const result = await stmt.execute({ offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); + }); + + test.concurrent('prepared statement built using $dynamic', async ({ db, push }) => { + const usersTable = pgTable('users_39', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + function withLimitOffset(qb: any) { + return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); + } + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .$dynamic(); + withLimitOffset(stmt).prepare('stmt_limit'); + + const result = await stmt.execute({ limit: 1, offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); + expect(result).toHaveLength(1); + }); + + // https://github.com/drizzle-team/drizzle-orm/issues/4468 + test.concurrent('prepared statement with placeholder in .where', async ({ db, push }) => { + const usersTable = pgTable('users_391', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().default(sql`now()`), + }); + + await push({ usersTable }); + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select() + .from(usersTable) + .where(lt(usersTable.createdAt, sql`now() - ${sql.placeholder('timeWindow')}::interval`)) + .prepare('get_old_users'); + + const result = await stmt.execute({ timeWindow: '40 days' }); + + expect(result).toEqual([]); + }); + + test.concurrent('Insert all defaults in 1 row', async ({ db, push }) => { + const users = pgTable('users_42', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await push({ users }); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test.concurrent('Insert all defaults in multiple rows', async ({ db, push }) => { + const users = pgTable('users_43', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await push({ users }); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test.concurrent('insert with onConflict do update', async ({ db, push }) => { + const usersTable = pgTable('users_48', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test.concurrent('insert with onConflict do nothing', async ({ db, push }) => { + const usersTable = pgTable('users_49', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + + await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert with onConflict do nothing + target', async ({ db, push }) => { + const usersTable = pgTable('users_50', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing({ target: usersTable.id }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('left join (flat object fields)', async ({ db, push }) => { + const citiesTable = pgTable('cities_51', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_51', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test.concurrent('left join (grouped fields)', async ({ db, push }) => { + const citiesTable = pgTable('cities_52', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_52', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test.concurrent('left join (all fields)', async ({ db, push }) => { + const citiesTable = pgTable('cities_53', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }); + + const users2Table = pgTable('users2_53', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select() + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + users2_53: { + id: 1, + name: 'John', + cityId, + }, + cities_53: { + id: cityId, + name: 'Paris', + state: null, + }, + }, + { + users2_53: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities_53: null, + }, + ]); + }); + + test.concurrent('join subquery', async ({ db, push }) => { + const courseCategoriesTable = pgTable('course_categories_54', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const coursesTable = pgTable('courses_54', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: integer('category_id').references(() => courseCategoriesTable.id), + }); + + await push({ courseCategoriesTable, coursesTable }); + + await db + .insert(courseCategoriesTable) + .values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db + .insert(coursesTable) + .values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + }); + + test.concurrent('with ... select', async ({ db, push }) => { + const orders = pgTable('orders_55', { + region: text('region').notNull(), + product: text('product').notNull(), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), + }); + + await push({ orders }); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result1 = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result2 = await db + .with(regionalSales, topRegions) + .selectDistinct({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result3 = await db + .with(regionalSales, topRegions) + .selectDistinctOn([orders.region], { + region: orders.region, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region) + .orderBy(orders.region); + + expect(result1).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + expect(result2).toEqual(result1); + expect(result3).toEqual([ + { + region: 'Europe', + productUnits: 8, + productSales: 80, + }, + { + region: 'US', + productUnits: 16, + productSales: 160, + }, + ]); + }); + + test.concurrent('with ... update', async ({ db, push }) => { + const products = pgTable('products_56', { + id: serial('id').primaryKey(), + price: numeric('price').notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await push({ products }); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + const result = await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)) + .returning({ + id: products.id, + }); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test.concurrent('with ... insert', async ({ db, push }) => { + const users = pgTable('users_57', { + username: text('username').notNull(), + admin: boolean('admin').notNull().default(false), + }); + + await push({ users }); + + const userCount = db + .$with('user_count') + .as( + db + .select({ + value: sql`count(*)`.as('value'), + }) + .from(users), + ); + + const result = await db + .with(userCount) + .insert(users) + .values([ + { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, + ]) + .returning({ + admin: users.admin, + }); + + expect(result).toEqual([{ admin: true }]); + }); + + test.concurrent('with ... delete', async ({ db, push }) => { + const orders = pgTable('orders_58', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), + }); + + await push({ orders }); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + const result = await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) + .returning({ + id: orders.id, + }); + + expect(result).toEqual([ + { id: 6 }, + { id: 7 }, + { id: 8 }, + ]); + }); + + test.concurrent('select from subquery sql', async ({ db, push }) => { + const users2Table = pgTable('users2_59', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users2Table }); + + await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); + + const sq = db + .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test.concurrent('select count()', async ({ db, push }) => { + const usersTable = pgTable('users_62', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)::int` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test.concurrent('select count w/ custom mapper', async ({ db, push }) => { + const usersTable = pgTable('users_63', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable }); + + function count(value: any): any; + function count(value: any, alias: string): any; + function count(value: any, alias?: string): any { + const result = sql`count(${value})`.mapWith(Number); + if (!alias) { + return result; + } + return result.as(alias); + } + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: count(sql`*`) }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test.concurrent('network types', async ({ db, push }) => { + const network = pgTable('network_64', { + inet: inet('inet').notNull(), + cidr: cidr('cidr').notNull(), + macaddr: macaddr('macaddr').notNull(), + macaddr8: macaddr8('macaddr8').notNull(), + }); + + await push({ network }); + + const value = { + inet: '127.0.0.1', + cidr: '192.168.100.128/25', + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + }; + + await db.insert(network).values(value); + + const res = await db.select().from(network); + + expect(res).toEqual([value]); + }); + + test.concurrent('array types', async ({ db, push }) => { + const salEmp = pgTable('sal_emp_65', { + name: text('name').notNull(), + payByQuarter: integer('pay_by_quarter').array().notNull(), + schedule: text('schedule').array().array().notNull(), + }); + + await push({ salEmp }); + + const values = [ + { + name: 'John', + payByQuarter: [10000, 10000, 10000, 10000], + schedule: [['meeting', 'lunch'], ['training', 'presentation']], + }, + { + name: 'Carol', + payByQuarter: [20000, 25000, 25000, 25000], + schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], + }, + ]; + + await db.insert(salEmp).values(values); + + const res = await db.select().from(salEmp); + + expect(res).toEqual(values); + }); + + test.concurrent('having', async ({ db, push }) => { + const citiesTable = pgTable('cities_85', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_85', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})::int`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + }); +} diff --git a/integration-tests/tests/pg/common-pt2.ts b/integration-tests/tests/pg/common-pt2.ts new file mode 100644 index 0000000000..0b5da92b69 --- /dev/null +++ b/integration-tests/tests/pg/common-pt2.ts @@ -0,0 +1,3008 @@ +import { + and, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + getTableColumns, + gt, + gte, + ilike, + inArray, + like, + lt, + max, + min, + not, + or, + sql, + sum, + sumDistinct, +} from 'drizzle-orm'; +import { + alias, + bigint, + bigserial, + boolean, + bytea, + char, + cidr, + date, + doublePrecision, + except, + getMaterializedViewConfig, + getViewConfig, + inet, + integer, + interval, + json, + jsonb, + line, + macaddr, + macaddr8, + numeric, + pgEnum, + pgSchema, + pgTable, + pgView, + point, + primaryKey, + real, + serial, + smallint, + smallserial, + text, + time, + timestamp, + union, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; +import { describe, expect, expectTypeOf } from 'vitest'; +import type { Test } from './instrumentation'; + +const msDelay = 15000; + +export function tests(test: Test) { + describe('common', () => { + test.concurrent('set operations (mixed) from query builder with subquery', async ({ db, push }) => { + const cities2Table = pgTable('cities_1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => cities2Table.id), + }); + + await push({ cities2Table, users2Table }); + + await db.insert(cities2Table).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + + const sq = db + .select() + .from(cities2Table).where(gt(cities2Table.id, 1)).as('sq'); + + const result = await db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db.select().from(sq), + db.select().from(cities2Table).where(eq(cities2Table.id, 2)), + ), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db + .select({ name: cities2Table.name, id: cities2Table.id }) + .from(cities2Table).where(gt(cities2Table.id, 1)), + db.select().from(cities2Table).where(eq(cities2Table.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (mixed all) as function', async ({ db, push }) => { + const cities2Table = pgTable('cities_2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => cities2Table.id), + }); + + await push({ cities2Table, users2Table }); + + await db.insert(cities2Table).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(cities2Table.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(6); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 8, name: 'Sally' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(cities2Table.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test.concurrent('aggregate function: count', async ({ db, push }) => { + const aggregateTable = pgTable('aggregate_table_3', { + id: serial('id').notNull(), + name: text('name').notNull(), + value: integer('value'), + nullOnly: integer('null_only'), + }); + + await push({ aggregateTable }); + + await db.insert(aggregateTable).values([ + { name: 'value 1', value: 10 }, + { name: 'value 1', value: 20 }, + { name: 'value 2', value: 50 }, + { name: 'value 3', value: 20 }, + { name: 'value 4', value: 90 }, + { name: 'value 5', value: 10 }, + { name: 'value 6', nullOnly: null }, + ]); + + const result1 = await db.select({ value: count() }).from(aggregateTable); + const result2 = await db.select({ value: count(aggregateTable.value) }).from(aggregateTable); + const result3 = await db.select({ value: countDistinct(aggregateTable.name) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(6); + expect(result3[0]?.value).toBe(6); + }); + + test.concurrent('aggregate function: avg', async ({ db, push }) => { + const aggregateTable = pgTable('aggregate_table_4', { + id: serial('id').notNull(), + name: text('name').notNull(), + value: integer('value'), + nullOnly: integer('null_only'), + }); + + await push({ aggregateTable }); + + await db.insert(aggregateTable).values([ + { name: 'value 1', value: 10 }, + { name: 'value 1', value: 20 }, + { name: 'value 2', value: 50 }, + { name: 'value 3', value: 20 }, + { name: 'value 4', value: 90 }, + { name: 'value 5', value: 10 }, + { name: 'value 6', nullOnly: null }, + ]); + + const result1 = await db.select({ value: avg(aggregateTable.value) }).from(aggregateTable); + const result2 = await db.select({ value: avg(aggregateTable.nullOnly) }).from(aggregateTable); + const result3 = await db.select({ value: avgDistinct(aggregateTable.value) }).from(aggregateTable); + + expect(result1[0]?.value).toBe('33.3333333333333333'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('42.5000000000000000'); + }); + + test.concurrent('aggregate function: sum', async ({ db, push }) => { + const aggregateTable = pgTable('aggregate_table_5', { + id: serial('id').notNull(), + name: text('name').notNull(), + value: integer('value'), + nullOnly: integer('null_only'), + }); + + await push({ aggregateTable }); + + await db.insert(aggregateTable).values([ + { name: 'value 1', value: 10 }, + { name: 'value 1', value: 20 }, + { name: 'value 2', value: 50 }, + { name: 'value 3', value: 20 }, + { name: 'value 4', value: 90 }, + { name: 'value 5', value: 10 }, + { name: 'value 6', nullOnly: null }, + ]); + + const result1 = await db.select({ value: sum(aggregateTable.value) }).from(aggregateTable); + const result2 = await db.select({ value: sum(aggregateTable.nullOnly) }).from(aggregateTable); + const result3 = await db.select({ value: sumDistinct(aggregateTable.value) }).from(aggregateTable); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('170'); + }); + + test.concurrent('aggregate function: max', async ({ db, push }) => { + const aggregateTable = pgTable('aggregate_table_6', { + id: serial('id').notNull(), + name: text('name').notNull(), + value: integer('value'), + nullOnly: integer('null_only'), + }); + + await push({ aggregateTable }); + + await db.insert(aggregateTable).values([ + { name: 'value 1', value: 10 }, + { name: 'value 1', value: 20 }, + { name: 'value 2', value: 50 }, + { name: 'value 3', value: 20 }, + { name: 'value 4', value: 90 }, + { name: 'value 5', value: 10 }, + { name: 'value 6', nullOnly: null }, + ]); + + const result1 = await db.select({ value: max(aggregateTable.value) }).from(aggregateTable); + const result2 = await db.select({ value: max(aggregateTable.nullOnly) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBeNull(); + }); + + test.concurrent('aggregate function: min', async ({ db, push }) => { + const aggregateTable = pgTable('aggregate_table_7', { + id: serial('id').notNull(), + name: text('name').notNull(), + value: integer('value'), + nullOnly: integer('null_only'), + }); + + await push({ aggregateTable }); + + await db.insert(aggregateTable).values([ + { name: 'value 1', value: 10 }, + { name: 'value 1', value: 20 }, + { name: 'value 2', value: 50 }, + { name: 'value 3', value: 20 }, + { name: 'value 4', value: 90 }, + { name: 'value 5', value: 10 }, + { name: 'value 6', nullOnly: null }, + ]); + + const result1 = await db.select({ value: min(aggregateTable.value) }).from(aggregateTable); + const result2 = await db.select({ value: min(aggregateTable.nullOnly) }).from(aggregateTable); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBeNull(); + }); + + test.concurrent('array mapping and parsing', async ({ db, push }) => { + const arrays = pgTable('arrays_tests_7', { + id: serial('id').primaryKey(), + tags: text('tags').array(), + nested: text('nested').array().array(), + numbers: integer('numbers').notNull().array(), + }); + + await push({ arrays }); + + await db.insert(arrays).values({ + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }); + + const result = await db.select().from(arrays); + + expect(result).toEqual([{ + id: 1, + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }]); + }); + + test.concurrent('test $onUpdateFn and $onUpdate works as $default', async ({ db, push }) => { + const usersOnUpdate = pgTable('users_on_update_8', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdate(() => null), + }); + + await push({ usersOnUpdate }); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test.concurrent('test $onUpdateFn and $onUpdate works updating', async ({ db, push }) => { + const usersOnUpdate = pgTable('users_on_update_9', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdate(() => null), + }); + + await push({ usersOnUpdate }); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + + // expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test.concurrent('test if method with sql operators', async ({ db, push }) => { + const users = pgTable('users_106', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + city: text('city').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition1 = true; + + const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); + + expect(result1).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition2 = 1; + + const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); + + expect(result2).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition3 = 'non-empty string'; + + const result3 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), + ); + + expect(result3).toEqual([{ id: 1, name: 'John', age: 20, city: 'New York' }, { + id: 2, + name: 'Alice', + age: 21, + city: 'New York', + }]); + + const condtition4 = false; + + const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); + + expect(result4).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition5 = undefined; + + const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); + + expect(result5).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition6 = null; + + const result6 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), + ); + + expect(result6).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition7 = { + term1: 0, + term2: 1, + }; + + const result7 = await db.select().from(users).where( + and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), + ); + + expect(result7).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition8 = { + term1: '', + term2: 'non-empty string', + }; + + const result8 = await db.select().from(users).where( + or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), + ); + + expect(result8).toEqual([ + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition9 = { + term1: 1, + term2: true, + }; + + const result9 = await db.select().from(users).where( + and( + inArray(users.city, ['New York', 'London']).if(condition9.term1), + ilike(users.name, 'a%').if(condition9.term2), + ), + ); + + expect(result9).toEqual([ + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition10 = { + term1: 4, + term2: 19, + }; + + const result10 = await db.select().from(users).where( + and( + sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), + gt(users.age, condition10.term2).if(condition10.term2 > 20), + ), + ); + + expect(result10).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition11 = true; + + const result11 = await db.select().from(users).where( + or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), + ); + + expect(result11).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition12 = false; + + const result12 = await db.select().from(users).where( + and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), + ); + + expect(result12).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition13 = true; + + const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); + + expect(result13).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition14 = false; + + const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); + + expect(result14).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + }); + + // MySchema tests + test.concurrent('mySchema :: select all fields', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + const now = Date.now(); + + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(300); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('mySchema :: select sql', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_10', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(usersResult).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: select typed sql', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_111', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const usersResult = await db.select({ + name: sql`upper(${users.name})`, + }).from(users); + + expect(usersResult).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: select distinct', async ({ db, push }) => { + const usersDistinctTable = pgTable('users_distinct_1', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ usersDistinctTable }); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + + expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); + }); + + test.concurrent('mySchema :: insert returning sql', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const result = await db.insert(users).values({ name: 'John' }).returning({ + name: sql`upper(${users.name})`, + }); + + expect(result).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: delete returning sql', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_3', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db.delete(users).where(eq(users.name, 'John')).returning({ + name: sql`upper(${users.name})`, + }); + + expect(result).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: update with returning partial', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_4', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db.update(users).set({ name: 'Jane' }).where(eq(users.name, 'John')) + .returning({ + id: users.id, + name: users.name, + }); + + expect(result).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test.concurrent('mySchema :: delete with returning all fields', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_5', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + const now = Date.now(); + + await db.insert(users).values({ name: 'John' }); + const result = await db.delete(users).where(eq(users.name, 'John')).returning(); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(300); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('mySchema :: insert + select', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_6', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db.select().from(users); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(users).values({ name: 'Jane' }); + const result2 = await db.select().from(users); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test.concurrent('mySchema :: insert with overridden default values', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_7', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John', verified: true }); + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('mySchema :: insert many', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_8', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users }); + + await db.insert(users).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: users.id, + name: users.name, + jsonb: users.jsonb, + verified: users.verified, + }).from(users); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('mySchema :: select with group by as field', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_9', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('mySchema :: select with group by as column + sql', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_101', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: users.name }).from(users) + .groupBy(users.id, sql`${users.name}`) + .orderBy(users.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); + }); + + test.concurrent('mySchema :: build query', async ({ db }) => { + const mySchema = pgSchema('mySchema_11'); + const users = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const query = db.select({ id: users.id, name: users.name }).from(users) + .groupBy(users.id, users.name) + .toSQL(); + + expect(query).toEqual({ + sql: + 'select "id", "name" from "mySchema_11"."users" group by "mySchema_11"."users"."id", "mySchema_11"."users"."name"', + params: [], + }); + }); + + test.concurrent('mySchema :: partial join with alias', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_105', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const customerAlias = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: users.id, + name: users.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test.concurrent('mySchema :: insert with spaces', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_104', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: users.id, name: users.name }).from( + users, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test.concurrent('mySchema :: prepared statement with placeholder in .limit', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_103', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const stmt = db + .select({ + id: users.id, + name: users.name, + }) + .from(users) + .where(eq(users.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('mySchema_stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); + }); + + test + .concurrent( + 'mySchema :: build query insert with onConflict do update / multiple columns', + async ({ db }) => { + const mySchema = pgSchema('mySchema_15'); + const users = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db.insert(users) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [users.id, users.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema_15"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); + }, + ); + + test.concurrent('mySchema :: build query insert with onConflict do nothing + target', async ({ db }) => { + const mySchema = pgSchema('mySchema_16'); + const users = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db.insert(users) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: users.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema_16"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); + }); + + test + .concurrent( + 'mySchema :: select from tables with same name from different schema using alias', + async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + const users = mySchema.table('users_99', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const usersDefault = pgTable('users_17', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + await push({ users, usersDefault }); + + await db.insert(users).values({ id: 10, name: 'Ivan' }); + await db.insert(usersDefault).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersDefault, 'customer'); + + const result = await db + .select().from(users) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(customerAlias.id, 11)); + + expect(result).toEqual([{ + users_99: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.users_99.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }, + ); + + test.concurrent('mySchema :: view', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + + const users = mySchema.table('users_102', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const cities = mySchema.table('cities_101', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users} where ${eq(users.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(cities).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); + + test.concurrent('mySchema :: materialized view', async ({ db, push }) => { + const mySchema = pgSchema('mySchema'); + + const users = mySchema.table('users_100', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const cities = mySchema.table('cities_100', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + + const newYorkers1 = mySchema.materializedView('new_yorkers') + .as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + const newYorkers2 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users} where ${eq(users.cityId, 1)}`); + + const newYorkers3 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); + + await db.insert(cities).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([]); + } + + await db.refreshMaterializedView(newYorkers1); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop materialized view ${newYorkers1}`); + }); + + test.concurrent('limit 0', async ({ db, push }) => { + const users = pgTable('users_120', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db + .select() + .from(users) + .limit(0); + + expect(result).toEqual([]); + }); + + test.concurrent('limit -1', async ({ db, push }) => { + const users = pgTable('users_21', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values({ name: 'John' }); + const result = await db + .select() + .from(users) + .limit(-1); + + expect(result.length).toBeGreaterThan(0); + }); + + test.concurrent('Object keys as column names', async ({ db, push }) => { + // Tests the following: + // Column with required config + // Column with optional config without providing a value + // Column with optional config providing a value + // Column without config + const users = pgTable('users_22', { + id: bigserial({ mode: 'number' }).primaryKey(), + firstName: varchar(), + lastName: varchar({ length: 50 }), + admin: boolean(), + }); + + await push({ users }); + + await db.insert(users).values([ + { firstName: 'John', lastName: 'Doe', admin: true }, + { firstName: 'Jane', lastName: 'Smith', admin: false }, + ]); + const result = await db + .select({ id: users.id, firstName: users.firstName, lastName: users.lastName }) + .from(users) + .where(eq(users.admin, true)); + + expect(result).toEqual([ + { id: 1, firstName: 'John', lastName: 'Doe' }, + ]); + }); + + test.concurrent('proper json and jsonb handling', async ({ db, push }) => { + const jsonTable = pgTable('json_table_23', { + json: json('json').$type<{ name: string; age: number }>(), + jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), + }); + + await push({ jsonTable }); + + await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); + + const result = await db.select().from(jsonTable); + + const justNames = await db.select({ + name1: sql`${jsonTable.json}->>'name'`.as('name1'), + name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), + }).from(jsonTable); + + expect(result).toStrictEqual([ + { + json: { name: 'Tom', age: 75 }, + jsonb: { name: 'Pete', age: 23 }, + }, + ]); + + expect(justNames).toStrictEqual([ + { + name1: 'Tom', + name2: 'Pete', + }, + ]); + }); + + test + .concurrent( + 'set json/jsonb fields with objects and retrieve with the ->> operator', + async ({ db, push }) => { + const jsonTestTable_13 = pgTable('json_test_24', { + id: serial('id').primaryKey(), + json: json('json').notNull(), + jsonb: jsonb('jsonb').notNull(), + }); + + await push({ jsonTestTable_13 }); + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable_13).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable_13.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable_13.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable_13.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable_13.jsonb}->>'number'`, + }).from(jsonTestTable_13); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); + }, + ); + + test + .concurrent( + 'set json/jsonb fields with strings and retrieve with the ->> operator', + async ({ db, push }) => { + const jsonTestTable = pgTable('json_test_25', { + id: serial('id').primaryKey(), + json: json('json').notNull(), + jsonb: jsonb('jsonb').notNull(), + }); + + await push({ jsonTestTable }); + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); + }, + ); + + test + .concurrent('set json/jsonb fields with objects and retrieve with the -> operator', async ({ db, push }) => { + const jsonTestTable = pgTable('json_test_26', { + id: serial('id').primaryKey(), + json: json('json').notNull(), + jsonb: jsonb('jsonb').notNull(), + }); + + await push({ jsonTestTable }); + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); + }); + + test + .concurrent('set json/jsonb fields with strings and retrieve with the -> operator', async ({ db, push }) => { + const jsonTestTable = pgTable('json_test_27', { + id: serial('id').primaryKey(), + json: json('json').notNull(), + jsonb: jsonb('jsonb').notNull(), + }); + + await push({ jsonTestTable }); + + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); + }); + + test.concurrent('update ... from', async ({ db, push }) => { + const cities2Table = pgTable('cities_28', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2Table = pgTable('users_28', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + await push({ cities2Table, users2Table }); + + await db.insert(cities2Table).values([ + { name: 'New York City' }, + { name: 'Seattle' }, + ]); + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + ]); + + const result = await db + .update(users2Table) + .set({ + cityId: cities2Table.id, + }) + .from(cities2Table) + .where(and(eq(cities2Table.name, 'Seattle'), eq(users2Table.name, 'John'))) + .returning(); + + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + cities_28: { + id: 2, + name: 'Seattle', + }, + }]); + }); + + test.concurrent('update ... from with alias', async ({ db, push }) => { + const cities2Table = pgTable('cities_29', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2Table = pgTable('users_108', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + await push({ cities2Table, users2Table }); + + await db.insert(cities2Table).values([ + { name: 'New York City' }, + { name: 'Seattle' }, + ]); + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + ]); + + const users = alias(users2Table, 'u'); + const cities = alias(cities2Table, 'c'); + const result = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) + .returning(); + + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + c: { + id: 2, + name: 'Seattle', + }, + }]); + }); + + test.concurrent('update ... from with join', async ({ db, push }) => { + const states = pgTable('states_30', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const cities = pgTable('cities_30', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + stateId: integer('state_id').references(() => states.id), + }); + const users = pgTable('users_30', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull().references(() => cities.id), + }); + + await push({ states, cities, users }); + + await db.insert(states).values([ + { name: 'New York' }, + { name: 'Washington' }, + ]); + await db.insert(cities).values([ + { name: 'New York City', stateId: 1 }, + { name: 'Seattle', stateId: 2 }, + { name: 'London' }, + ]); + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 3 }, + ]); + + const result1 = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .leftJoin(states, eq(cities.stateId, states.id)) + .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) + .returning(); + const result2 = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .leftJoin(states, eq(cities.stateId, states.id)) + .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) + .returning(); + + expect(result1).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + cities_30: { + id: 2, + name: 'Seattle', + stateId: 2, + }, + states_30: { + id: 2, + name: 'Washington', + }, + }]); + expect(result2).toStrictEqual([{ + id: 3, + name: 'Jack', + cityId: 3, + cities_30: { + id: 3, + name: 'London', + stateId: null, + }, + states_30: null, + }]); + }); + + test.concurrent('insert into ... select', async ({ db, push }) => { + const notifications = pgTable('notifications_31', { + id: serial('id').primaryKey(), + sentAt: timestamp('sent_at').notNull().defaultNow(), + message: text('message').notNull(), + }); + const users = pgTable('users_31', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const userNotications = pgTable('user_notifications_31', { + userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), + notificationId: integer('notification_id').notNull().references(() => notifications.id, { + onDelete: 'cascade', + }), + }, (t) => [primaryKey({ columns: [t.userId, t.notificationId] })]); + + await push({ notifications, users, userNotications }); + + const newNotification = await db + .insert(notifications) + .values({ message: 'You are one of the 3 lucky winners!' }) + .returning({ id: notifications.id }) + .then((result) => result[0]); + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + const sentNotifications = await db + .insert(userNotications) + .select( + db + .select({ + userId: users.id, + notificationId: sql`${newNotification!.id}`.as('notification_id'), + }) + .from(users) + .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) + .orderBy(asc(users.id)), + ) + .returning(); + + expect(sentNotifications).toStrictEqual([ + { userId: 1, notificationId: newNotification!.id }, + { userId: 3, notificationId: newNotification!.id }, + { userId: 5, notificationId: newNotification!.id }, + ]); + }); + + test.concurrent('insert into ... select with keys in different order', async ({ db, push }) => { + const users1 = pgTable('users1_32', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2 = pgTable('users2_32', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users1, users2 }); + + expect( + () => + db + .insert(users1) + .select( + db + .select({ + name: users2.name, + id: users2.id, + }) + .from(users2), + ), + ).toThrowError(); + }); + + test.concurrent('$count separate', async ({ db, push }) => { + const countTestTable = pgTable('count_test_33', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable); + + expect(count).toStrictEqual(4); + }); + + test.concurrent('$count embedded', async ({ db, push }) => { + const countTestTable = pgTable('count_test_34', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + expect(count).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); + }); + + test.concurrent('$count separate reuse', async ({ db, push }) => { + const countTestTable = pgTable('count_test_35', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.$count(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + expect(count1).toStrictEqual(4); + expect(count2).toStrictEqual(5); + expect(count3).toStrictEqual(6); + }); + + test.concurrent('$count embedded reuse', async ({ db, push }) => { + const countTestTable = pgTable('count_test_36', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + expect(count1).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); + expect(count2).toStrictEqual([ + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + ]); + expect(count3).toStrictEqual([ + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + ]); + }); + + test.concurrent('$count separate with filters', async ({ db, push }) => { + const countTestTable = pgTable('count_test_37', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); + expect(count).toStrictEqual(3); + }); + + test.concurrent('$count embedded with filters', async ({ db, push }) => { + const countTestTable = pgTable('count_test_38', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await push({ countTestTable }); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable, gt(countTestTable.id, 1)), + }).from(countTestTable); + + expect(count).toStrictEqual([ + { count: 3 }, + { count: 3 }, + { count: 3 }, + { count: 3 }, + ]); + }); + + test.concurrent('insert multiple rows into table with generated identity column', async ({ db, push }) => { + const identityColumnsTable = pgTable('identity_columns_table_39', { + id: integer('id').generatedAlwaysAsIdentity(), + id1: integer('id1').generatedByDefaultAsIdentity(), + name: text('name').notNull(), + }); + + // not passing identity columns + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await push({ identityColumnsTable }); + + let result = await db.insert(identityColumnsTable).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Bob' }, + ]).returning(); + + expect(result).toEqual([ + { id: 1, id1: 1, name: 'John' }, + { id: 2, id1: 2, name: 'Jane' }, + { id: 3, id1: 3, name: 'Bob' }, + ]); + + // passing generated by default as identity column + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await push({ identityColumnsTable }); + + result = await db.insert(identityColumnsTable).values([ + { name: 'John', id1: 3 }, + { name: 'Jane', id1: 5 }, + { name: 'Bob', id1: 5 }, + ]).returning(); + + expect(result).toEqual([ + { id: 1, id1: 3, name: 'John' }, + { id: 2, id1: 5, name: 'Jane' }, + { id: 3, id1: 5, name: 'Bob' }, + ]); + + // passing all identity columns + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await push({ identityColumnsTable }); + + result = await db.insert(identityColumnsTable).overridingSystemValue().values([ + { name: 'John', id: 2, id1: 3 }, + { name: 'Jane', id: 4, id1: 5 }, + { name: 'Bob', id: 4, id1: 5 }, + ]).returning(); + + expect(result).toEqual([ + { id: 2, id1: 3, name: 'John' }, + { id: 4, id1: 5, name: 'Jane' }, + { id: 4, id1: 5, name: 'Bob' }, + ]); + }); + + test.concurrent('insert as cte', async ({ db, push }) => { + const users = pgTable('users_40', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + const sq1 = db.$with('sq').as( + db.insert(users).values({ name: 'John' }).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + const result2 = await db.with(sq1).select({ id: sq1.id }).from(sq1); + + const sq2 = db.$with('sq').as( + db.insert(users).values({ name: 'Jane' }).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ id: 2 }]); + expect(result3).toEqual([{ id: 3, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); + }); + + test.concurrent('update as cte', async ({ db, push }) => { + const users = pgTable('users_41', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([ + { name: 'John', age: 30 }, + { name: 'Jane', age: 30 }, + ]); + + const sq1 = db.$with('sq').as( + db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.update(users).set({ age: 30 }); + const result2 = await db.with(sq1).select({ age: sq1.age }).from(sq1); + + const sq2 = db.$with('sq').as( + db.update(users).set({ age: 20 }).where(eq(users.name, 'Jane')).returning({ name: users.name, age: users.age }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.update(users).set({ age: 30 }); + const result4 = await db.with(sq2).select({ age: sq2.age }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John', age: 25 }]); + expect(result2).toEqual([{ age: 25 }]); + expect(result3).toEqual([{ name: 'Jane', age: 20 }]); + expect(result4).toEqual([{ age: 20 }]); + }); + + test.concurrent('delete as cte', async ({ db, push }) => { + const users = pgTable('users_107', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.insert(users).values({ name: 'John' }); + const result2 = await db.with(sq1).select({ name: sq1.name }).from(sq1); + + const sq2 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'Jane')).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.insert(users).values({ name: 'Jane' }); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ name: 'John' }]); + expect(result3).toEqual([{ id: 2, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); + }); + + test.concurrent('sql operator as cte', async ({ db, push }) => { + const users = pgTable('users_109', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users }); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); + + test.concurrent('cross join', async ({ db, push }) => { + const usersTable = pgTable('users_44', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const citiesTable = pgTable('cities_44', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ usersTable, citiesTable }); + + await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + await db + .insert(citiesTable) + .values([ + { name: 'Seattle' }, + { name: 'New York City' }, + ]); + + const result = await db + .select({ + user: usersTable.name, + city: citiesTable.name, + }) + .from(usersTable) + .crossJoin(citiesTable) + .orderBy(usersTable.name, citiesTable.name); + + expect(result).toStrictEqual([ + { city: 'New York City', user: 'Jane' }, + { city: 'Seattle', user: 'Jane' }, + { city: 'New York City', user: 'John' }, + { city: 'Seattle', user: 'John' }, + ]); + }); + + test.concurrent('left join (lateral)', async ({ db, push }) => { + const citiesTable = pgTable('cities_45', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2Table = pgTable('users2_45', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id'), + }); + + await push({ citiesTable, users2Table }); + + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(eq(users2Table.cityId, citiesTable.id)) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .leftJoinLateral(sq, sql`true`); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + { cityId: 2, cityName: 'London', userId: null, userName: null }, + ]); + }); + + test.concurrent('inner join (lateral)', async ({ db, push }) => { + const citiesTable = pgTable('cities_46', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2Table = pgTable('users2_46', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(eq(users2Table.cityId, citiesTable.id)) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .innerJoinLateral(sq, sql`true`); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + ]); + }); + + test.concurrent('cross join (lateral)', async ({ db, push }) => { + const citiesTable = pgTable('cities_47', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2Table = pgTable('users2_47', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }, { id: 3, name: 'Berlin' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }, { + name: 'Patrick', + cityId: 2, + }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(not(like(citiesTable.name, 'L%'))) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .crossJoinLateral(sq) + .orderBy(citiesTable.id, sq.userId); + + expect(res).toStrictEqual([ + { + cityId: 1, + cityName: 'Paris', + userId: 1, + userName: 'John', + }, + { + cityId: 1, + cityName: 'Paris', + userId: 2, + userName: 'Jane', + }, + { + cityId: 1, + cityName: 'Paris', + userId: 3, + userName: 'Patrick', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 1, + userName: 'John', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 2, + userName: 'Jane', + }, + { + cityId: 3, + cityName: 'Berlin', + userId: 3, + userName: 'Patrick', + }, + ]); + }); + + test.concurrent('column.as', async ({ db, push }) => { + const users = pgTable('users_column_as', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => cities.id), + }); + + const cities = pgTable('cities_column_as', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const ucView = pgView('cities_users_column_as_view').as((qb) => + qb.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)) + ); + + await push({ users, cities, ucView }); + + const citiesInsRet = await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]).returning({ + cityId: cities.id.as('city_id'), + cityName: cities.name.as('city_name'), + }); + + expect(citiesInsRet).toStrictEqual(expect.arrayContaining([{ + cityId: 1, + cityName: 'Firstistan', + }, { + cityId: 2, + cityName: 'Secondaria', + }])); + + const usersInsRet = await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]).returning({ + userId: users.id.as('user_id'), + userName: users.name.as('users_name'), + userCityId: users.cityId, + }); + + expect(usersInsRet).toStrictEqual(expect.arrayContaining([{ userId: 1, userName: 'First', userCityId: 1 }, { + userId: 2, + userName: 'Second', + userCityId: 2, + }, { + userId: 3, + userName: 'Third', + userCityId: null, + }])); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewSelectReturn = await db.select().from(ucView); + + expect(viewSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewJoinReturn = await db.select({ + userId: ucView.userId.as('user_id_ucv'), + cityId: cities.id.as('city_id'), + userName: ucView.userName.as('user_name_ucv'), + cityName: cities.name.as('city_name'), + }).from(ucView).leftJoin(cities, eq(cities.id, ucView.cityId)); + + expect(viewJoinReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + }); + + test.concurrent('select from a many subquery', async ({ db, push }) => { + const citiesTable = pgTable('cities_many_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }); + + const users2Table = pgTable('users2_many_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + population: db.select({ count: count().as('count') }).from(users2Table).where( + eq(users2Table.cityId, citiesTable.id), + ).as( + 'population', + ), + name: citiesTable.name, + }).from(citiesTable); + + expectTypeOf(res).toEqualTypeOf<{ + population: number; + name: string; + }[]>(); + + expect(res).toStrictEqual([{ + population: 1, + name: 'Paris', + }, { + population: 2, + name: 'London', + }]); + }); + + test.concurrent('select from a one subquery', async ({ db, push }) => { + const citiesTable = pgTable('cities_one_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }); + + const users2Table = pgTable('users2_one_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), + }); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) + .as( + 'cityName', + ), + name: users2Table.name, + }).from(users2Table); + + expectTypeOf(res).toEqualTypeOf<{ + cityName: string; + name: string; + }[]>(); + + expect(res).toStrictEqual([{ + cityName: 'Paris', + name: 'John', + }, { + cityName: 'London', + name: 'Jane', + }, { + cityName: 'London', + name: 'Jack', + }]); + }); + + test.concurrent('test $onUpdateFn and $onUpdate works with sql value', async ({ db, push }) => { + const users = pgTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updatedAt: timestamp('updated_at', { mode: 'date' }).notNull().$onUpdate(() => sql`now()`), + }); + + await push({ users }); + + const insertResp = await db.insert(users).values({ + name: 'John', + }).returning({ + updatedAt: users.updatedAt, + }); + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const now = Date.now(); + await new Promise((resolve) => setTimeout(resolve, 1000)); + const updateResp = await db.update(users).set({ + name: 'John', + }).returning({ + updatedAt: users.updatedAt, + }); + + expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); + expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); + }); + + test.concurrent('all types', async ({ db, push }) => { + const en = pgEnum('en_48', ['enVal1', 'enVal2']); + const allTypesTable = pgTable('all_types_48', { + serial: serial('serial'), + bigserial53: bigserial('bigserial53', { + mode: 'number', + }), + bigserial64: bigserial('bigserial64', { + mode: 'bigint', + }), + int: integer('int'), + bigint53: bigint('bigint53', { + mode: 'number', + }), + bigint64: bigint('bigint64', { + mode: 'bigint', + }), + bigintString: bigint('bigint_string', { + mode: 'string', + }), + bool: boolean('bool'), + bytea: bytea('bytea'), + char: char('char'), + cidr: cidr('cidr'), + date: date('date', { + mode: 'date', + }), + dateStr: date('date_str', { + mode: 'string', + }), + double: doublePrecision('double'), + enum: en('enum'), + inet: inet('inet'), + interval: interval('interval'), + json: json('json'), + jsonb: jsonb('jsonb'), + line: line('line', { + mode: 'abc', + }), + lineTuple: line('line_tuple', { + mode: 'tuple', + }), + macaddr: macaddr('macaddr'), + macaddr8: macaddr8('macaddr8'), + numeric: numeric('numeric'), + numericNum: numeric('numeric_num', { + mode: 'number', + }), + numericBig: numeric('numeric_big', { + mode: 'bigint', + }), + point: point('point', { + mode: 'xy', + }), + pointTuple: point('point_tuple', { + mode: 'tuple', + }), + real: real('real'), + smallint: smallint('smallint'), + smallserial: smallserial('smallserial'), + text: text('text'), + time: time('time'), + timestamp: timestamp('timestamp', { + mode: 'date', + }), + timestampTz: timestamp('timestamp_tz', { + mode: 'date', + withTimezone: true, + }), + timestampStr: timestamp('timestamp_str', { + mode: 'string', + }), + timestampTzStr: timestamp('timestamp_tz_str', { + mode: 'string', + withTimezone: true, + }), + uuid: uuid('uuid'), + varchar: varchar('varchar'), + arrint: integer('arrint').array(), + arrbigint53: bigint('arrbigint53', { + mode: 'number', + }).array(), + arrbigint64: bigint('arrbigint64', { + mode: 'bigint', + }).array(), + arrbigintString: bigint('arrbigint_string', { + mode: 'string', + }).array(), + arrbool: boolean('arrbool').array(), + arrbytea: bytea('arrbytea').array(), + arrchar: char('arrchar').array(), + arrcidr: cidr('arrcidr').array(), + arrdate: date('arrdate', { + mode: 'date', + }).array(), + arrdateStr: date('arrdate_str', { + mode: 'string', + }).array(), + arrdouble: doublePrecision('arrdouble').array(), + arrenum: en('arrenum').array(), + arrinet: inet('arrinet').array(), + arrinterval: interval('arrinterval').array(), + arrjson: json('arrjson').array(), + arrjsonb: jsonb('arrjsonb').array(), + arrline: line('arrline', { + mode: 'abc', + }).array(), + arrlineTuple: line('arrline_tuple', { + mode: 'tuple', + }).array(), + arrmacaddr: macaddr('arrmacaddr').array(), + arrmacaddr8: macaddr8('arrmacaddr8').array(), + arrnumeric: numeric('arrnumeric').array(), + arrnumericNum: numeric('arrnumeric_num', { + mode: 'number', + }).array(), + arrnumericBig: numeric('arrnumeric_big', { + mode: 'bigint', + }).array(), + arrpoint: point('arrpoint', { + mode: 'xy', + }).array(), + arrpointTuple: point('arrpoint_tuple', { + mode: 'tuple', + }).array(), + arrreal: real('arrreal').array(), + arrsmallint: smallint('arrsmallint').array(), + arrtext: text('arrtext').array(), + arrtime: time('arrtime').array(), + arrtimestamp: timestamp('arrtimestamp', { + mode: 'date', + }).array(), + arrtimestampTz: timestamp('arrtimestamp_tz', { + mode: 'date', + withTimezone: true, + }).array(), + arrtimestampStr: timestamp('arrtimestamp_str', { + mode: 'string', + }).array(), + arrtimestampTzStr: timestamp('arrtimestamp_tz_str', { + mode: 'string', + withTimezone: true, + }).array(), + arruuid: uuid('arruuid').array(), + arrvarchar: varchar('arrvarchar').array(), + }); + + await push({ en, allTypesTable }); + + await db.insert(allTypesTable).values({ + serial: 1, + smallserial: 15, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + bigserial53: 9007199254740991, + bigserial64: 5044565289845416380n, + bool: true, + bytea: Buffer.from('BYTES'), + char: 'c', + cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', + inet: '192.168.0.1/24', + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + date: new Date(1741743161623), + dateStr: new Date(1741743161623).toISOString(), + double: 15.35325689124218, + enum: 'enVal1', + int: 621, + interval: '2 months ago', + json: { + str: 'strval', + arr: ['str', 10], + }, + jsonb: { + str: 'strvalb', + arr: ['strb', 11], + }, + line: { + a: 1, + b: 2, + c: 3, + }, + lineTuple: [1, 2, 3], + numeric: '475452353476', + numericNum: 9007199254740991, + numericBig: 5044565289845416380n, + point: { + x: 24.5, + y: 49.6, + }, + pointTuple: [57.2, 94.3], + real: 1.048596, + smallint: 10, + text: 'TEXT STRING', + time: '13:59:28', + timestamp: new Date(1741743161623), + timestampTz: new Date(1741743161623), + timestampStr: new Date(1741743161623).toISOString(), + timestampTzStr: new Date(1741743161623).toISOString(), + uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', + varchar: 'C4-', + arrbigint53: [9007199254740991], + arrbigint64: [5044565289845416380n], + arrbigintString: ['5044565289845416380'], + arrbool: [true], + arrbytea: [Buffer.from('BYTES')], + arrchar: ['c'], + arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], + arrinet: ['192.168.0.1/24'], + arrmacaddr: ['08:00:2b:01:02:03'], + arrmacaddr8: ['08:00:2b:01:02:03:04:05'], + arrdate: [new Date(1741743161623)], + arrdateStr: [new Date(1741743161623).toISOString()], + arrdouble: [15.35325689124218], + arrenum: ['enVal1'], + arrint: [621], + arrinterval: ['2 months ago'], + arrjson: [{ + str: 'strval', + arr: ['str', 10], + }], + arrjsonb: [{ + str: 'strvalb', + arr: ['strb', 11], + }], + arrline: [{ + a: 1, + b: 2, + c: 3, + }], + arrlineTuple: [[1, 2, 3]], + arrnumeric: ['475452353476'], + arrnumericNum: [9007199254740991], + arrnumericBig: [5044565289845416380n], + arrpoint: [{ + x: 24.5, + y: 49.6, + }], + arrpointTuple: [[57.2, 94.3]], + arrreal: [1.048596], + arrsmallint: [10], + arrtext: ['TEXT STRING'], + arrtime: ['13:59:28'], + arrtimestamp: [new Date(1741743161623)], + arrtimestampTz: [new Date(1741743161623)], + arrtimestampStr: [new Date(1741743161623).toISOString()], + arrtimestampTzStr: [new Date(1741743161623).toISOString()], + arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], + arrvarchar: ['C4-'], + }); + + const rawRes = await db.select().from(allTypesTable); + + type ExpectedType = { + serial: number; + bigserial53: number; + bigserial64: bigint; + int: number | null; + bigint53: number | null; + bigint64: bigint | null; + bigintString: string | null; + bool: boolean | null; + bytea: Buffer | null; + char: string | null; + cidr: string | null; + date: Date | null; + dateStr: string | null; + double: number | null; + enum: 'enVal1' | 'enVal2' | null; + inet: string | null; + interval: string | null; + json: unknown; + jsonb: unknown; + line: { + a: number; + b: number; + c: number; + } | null; + lineTuple: [number, number, number] | null; + macaddr: string | null; + macaddr8: string | null; + numeric: string | null; + numericNum: number | null; + numericBig: bigint | null; + point: { + x: number; + y: number; + } | null; + pointTuple: [number, number] | null; + real: number | null; + smallint: number | null; + smallserial: number; + text: string | null; + time: string | null; + timestamp: Date | null; + timestampTz: Date | null; + timestampStr: string | null; + timestampTzStr: string | null; + uuid: string | null; + varchar: string | null; + arrint: number[] | null; + arrbigint53: number[] | null; + arrbigint64: bigint[] | null; + arrbigintString: string[] | null; + arrbool: boolean[] | null; + arrbytea: Buffer[] | null; + arrchar: string[] | null; + arrcidr: string[] | null; + arrdate: Date[] | null; + arrdateStr: string[] | null; + arrdouble: number[] | null; + arrenum: ('enVal1' | 'enVal2')[] | null; + arrinet: string[] | null; + arrinterval: string[] | null; + arrjson: unknown[] | null; + arrjsonb: unknown[] | null; + arrline: { + a: number; + b: number; + c: number; + }[] | null; + arrlineTuple: [number, number, number][] | null; + arrmacaddr: string[] | null; + arrmacaddr8: string[] | null; + arrnumeric: string[] | null; + arrnumericNum: number[] | null; + arrnumericBig: bigint[] | null; + arrpoint: { x: number; y: number }[] | null; + arrpointTuple: [number, number][] | null; + arrreal: number[] | null; + arrsmallint: number[] | null; + arrtext: string[] | null; + arrtime: string[] | null; + arrtimestamp: Date[] | null; + arrtimestampTz: Date[] | null; + arrtimestampStr: string[] | null; + arrtimestampTzStr: string[] | null; + arruuid: string[] | null; + arrvarchar: string[] | null; + }[]; + + const expectedRes: ExpectedType = [ + { + serial: 1, + bigserial53: 9007199254740991, + bigserial64: 5044565289845416380n, + int: 621, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + bool: true, + bytea: Buffer.from('BYTES'), + char: 'c', + cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', + date: new Date('2025-03-12T00:00:00.000Z'), + dateStr: '2025-03-12', + double: 15.35325689124218, + enum: 'enVal1', + inet: '192.168.0.1/24', + interval: '-2 mons', + json: { str: 'strval', arr: ['str', 10] }, + jsonb: { arr: ['strb', 11], str: 'strvalb' }, + line: { a: 1, b: 2, c: 3 }, + lineTuple: [1, 2, 3], + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + numeric: '475452353476', + numericNum: 9007199254740991, + numericBig: 5044565289845416380n, + point: { x: 24.5, y: 49.6 }, + pointTuple: [57.2, 94.3], + real: 1.048596, + smallint: 10, + smallserial: 15, + text: 'TEXT STRING', + time: '13:59:28', + timestamp: new Date('2025-03-12T01:32:41.623Z'), + timestampTz: new Date('2025-03-12T01:32:41.623Z'), + timestampStr: '2025-03-12 01:32:41.623', + timestampTzStr: '2025-03-12 01:32:41.623+00', + uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', + varchar: 'C4-', + arrint: [621], + arrbigint53: [9007199254740991], + arrbigint64: [5044565289845416380n], + arrbigintString: ['5044565289845416380'], + arrbool: [true], + arrbytea: [Buffer.from('BYTES')], + arrchar: ['c'], + arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], + arrdate: [new Date('2025-03-12T00:00:00.000Z')], + arrdateStr: ['2025-03-12'], + arrdouble: [15.35325689124218], + arrenum: ['enVal1'], + arrinet: ['192.168.0.1/24'], + arrinterval: ['-2 mons'], + arrjson: [{ str: 'strval', arr: ['str', 10] }], + arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], + arrline: [{ a: 1, b: 2, c: 3 }], + arrlineTuple: [[1, 2, 3]], + arrmacaddr: ['08:00:2b:01:02:03'], + arrmacaddr8: ['08:00:2b:01:02:03:04:05'], + arrnumeric: ['475452353476'], + arrnumericNum: [9007199254740991], + arrnumericBig: [5044565289845416380n], + arrpoint: [{ x: 24.5, y: 49.6 }], + arrpointTuple: [[57.2, 94.3]], + arrreal: [1.048596], + arrsmallint: [10], + arrtext: ['TEXT STRING'], + arrtime: ['13:59:28'], + arrtimestamp: [new Date('2025-03-12T01:32:41.623Z')], + arrtimestampTz: [new Date('2025-03-12T01:32:41.623Z')], + arrtimestampStr: ['2025-03-12 01:32:41.623'], + arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], + arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], + arrvarchar: ['C4-'], + }, + ]; + + expectTypeOf(rawRes).toEqualTypeOf(); + expect(rawRes).toStrictEqual(expectedRes); + }); + }); +} diff --git a/integration-tests/tests/pg/common-rqb.ts b/integration-tests/tests/pg/common-rqb.ts new file mode 100644 index 0000000000..7a56a0cac0 --- /dev/null +++ b/integration-tests/tests/pg/common-rqb.ts @@ -0,0 +1,793 @@ +// eslint-disable-next-line @typescript-eslint/consistent-type-imports +import { sql } from 'drizzle-orm'; +import { integer, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; +import { describe, expect } from 'vitest'; +import type { Test } from './instrumentation'; + +export function tests(test: Test) { + describe('common', () => { + test.concurrent('RQB v2 simple find first - no rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_1', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const result = await db.query.users.findFirst(); + + expect(result).toStrictEqual(undefined); + }); + + test.concurrent('RQB v2 simple find first - multiple rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_2', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.users.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test.concurrent('RQB v2 simple find first - with relation', async ({ push, createDB }) => { + const users = pgTable('rqb_users_3', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + const posts = pgTable('rqb_posts_3', { + id: serial().primaryKey().notNull(), + userId: integer('user_id').notNull(), + content: text(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users, posts }); + const db = createDB({ users, posts }, (r) => ({ + users: { + posts: r.many.posts({ + from: r.users.id, + to: r.posts.userId, + }), + }, + posts: { + author: r.one.users({ + from: r.posts.userId, + to: r.users.id, + }), + }, + })); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(posts).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.users.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: date, + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }], + }); + }); + + test.concurrent('RQB v2 simple find first - placeholders', async ({ push, createDB }) => { + const users = pgTable('rqb_users_4', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.users.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_first_placeholders'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test.concurrent('RQB v2 simple find many - no rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_5', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const result = await db.query.users.findMany(); + + expect(result).toStrictEqual([]); + }); + + test.concurrent('RQB v2 simple find many - multiple rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_6', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.users.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }, { + id: 1, + createdAt: date, + name: 'First', + }]); + }); + + test.concurrent('RQB v2 simple find many - with relation', async ({ push, createDB }) => { + const users = pgTable('rqb_users_7', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + const posts = pgTable('rqb_posts_7', { + id: serial().primaryKey().notNull(), + userId: integer('user_id').notNull(), + content: text(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users, posts }); + const db = createDB({ users, posts }, (r) => ({ + posts: { + author: r.one.users({ + from: r.posts.userId, + to: r.users.id, + }), + }, + })); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(posts).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.posts.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }]); + }); + + test.concurrent('RQB v2 simple find many - placeholders', async ({ push, createDB }) => { + const users = pgTable('rqb_users_8', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.users.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_many_placeholders'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }]); + }); + + test.concurrent('RQB v2 transaction find first - no rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_9', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + await db.transaction(async (db) => { + const result = await db.query.users.findFirst(); + + expect(result).toStrictEqual(undefined); + }); + }); + + test.concurrent('RQB v2 transaction find first - multiple rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_10', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const result = await db.query.users.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + }); + + test.concurrent('RQB v2 transaction find first - with relation', async ({ push, createDB }) => { + const users = pgTable('rqb_users_11', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + const posts = pgTable('rqb_posts_11', { + id: serial().primaryKey().notNull(), + userId: integer('user_id').notNull(), + content: text(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users, posts }); + const db = createDB({ users, posts }, (r) => ({ + users: { + posts: r.many.posts({ + from: r.users.id, + to: r.posts.userId, + }), + }, + })); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(posts).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + await db.transaction(async (db) => { + const result = await db.query.users.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: date, + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }], + }); + }); + }); + + test.concurrent('RQB v2 transaction find first - placeholders', async ({ push, createDB }) => { + const users = pgTable('rqb_users_12', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const query = db.query.users.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_first_tx_placeholders'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + }); + + test.concurrent('RQB v2 transaction find many - no rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_13', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + await db.transaction(async (db) => { + const result = await db.query.users.findMany(); + + expect(result).toStrictEqual([]); + }); + }); + + test.concurrent('RQB v2 transaction find many - multiple rows', async ({ push, createDB }) => { + const users = pgTable('rqb_users_14', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const result = await db.query.users.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }, { + id: 1, + createdAt: date, + name: 'First', + }]); + }); + }); + + test.concurrent('RQB v2 transaction find many - with relation', async ({ push, createDB }) => { + const users = pgTable('rqb_users_15', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + const posts = pgTable('rqb_posts_15', { + id: serial().primaryKey().notNull(), + userId: integer('user_id').notNull(), + content: text(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users, posts }); + const db = createDB({ users, posts }, (r) => ({ + posts: { + author: r.one.users({ + from: r.posts.userId, + to: r.users.id, + }), + }, + })); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(posts).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + await db.transaction(async (db) => { + const result = await db.query.posts.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }]); + }); + }); + + test.concurrent('RQB v2 transaction find many - placeholders', async ({ push, createDB }) => { + const users = pgTable('rqb_users_16', { + id: serial().primaryKey().notNull(), + name: text().notNull(), + createdAt: timestamp('created_at', { + mode: 'date', + precision: 3, + }).notNull(), + }); + + await push({ users }); + const db = createDB({ users }); + + const date = new Date(120000); + + await db.insert(users).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const query = db.query.users.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare('rqb_v2_find_many_placeholders_10'); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }]); + }); + }); + }); +} diff --git a/integration-tests/tests/pg/common.ts b/integration-tests/tests/pg/common.ts new file mode 100644 index 0000000000..e489ee9102 --- /dev/null +++ b/integration-tests/tests/pg/common.ts @@ -0,0 +1,19 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; + +import { tests as tests4 } from './common-cache'; +import { tests as tests1 } from './common-pt1'; +import { tests as tests2 } from './common-pt2'; +import { tests as tests3 } from './common-rqb'; +import type { Test } from './instrumentation'; + +export function tests(test: Test, exclude: string[]) { + test.beforeEach(async ({ task, skip }) => { + if (exclude.includes(task.name)) skip(); + }); + + tests1(test); + tests2(test); + tests3(test); + tests4(test); +} diff --git a/integration-tests/tests/pg/instrumentation.ts b/integration-tests/tests/pg/instrumentation.ts new file mode 100644 index 0000000000..92260e0b08 --- /dev/null +++ b/integration-tests/tests/pg/instrumentation.ts @@ -0,0 +1,697 @@ +import { neon, neonConfig, type NeonQueryFunction, Pool as NeonPool } from '@neondatabase/serverless'; + +import { PGlite } from '@electric-sql/pglite'; +import { + type AnyRelationsBuilderConfig, + defineRelations, + type ExtractTablesFromSchema, + type ExtractTablesWithRelations, + getTableName, + is, + type RelationsBuilder, + type RelationsBuilderConfig, + Table, +} from 'drizzle-orm'; +import { Cache, type MutationOption } from 'drizzle-orm/cache/core'; +import type { CacheConfig } from 'drizzle-orm/cache/core/types'; +import { drizzle as drizzleNeonHttp, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; +import { drizzle as drizzleNeonWs } from 'drizzle-orm/neon-serverless'; +import { drizzle as drizzleNodePostgres } from 'drizzle-orm/node-postgres'; +import type { + PgDatabase, + PgEnum, + PgEnumObject, + PgMaterializedView, + PgPolicy, + PgRole, + PgSchema, + PgSequence, + PgTable, + PgView, +} from 'drizzle-orm/pg-core'; +import { drizzle as drizzleProxy } from 'drizzle-orm/pg-proxy'; +import { drizzle as drizzlePglite } from 'drizzle-orm/pglite'; +import { drizzle as drizzlePostgresjs } from 'drizzle-orm/postgres-js'; +import Keyv from 'keyv'; +import { Client as ClientNodePostgres, types as typesNodePostgres } from 'pg'; +import postgres from 'postgres'; +import { test as base } from 'vitest'; +import ws from 'ws'; +import { relations } from './relations'; + +export type PostgresSchema = Record< + string, + | PgTable + | PgEnum + | PgEnumObject + | PgSchema + | PgSequence + | PgView + | PgMaterializedView + | PgRole + | PgPolicy + | unknown +>; + +neonConfig.webSocketConstructor = ws; + +// TODO: @L-Mario564 we need this rule only for drizzle-orm package +// oxlint-disable-next-line drizzle-internal/require-entity-kind +export class TestCache extends Cache { + private globalTtl: number = 1000; + private usedTablesPerKey: Record = {}; + + constructor(private readonly strat: 'explicit' | 'all', private kv: Keyv = new Keyv()) { + super(); + } + + override strategy() { + return this.strat; + } + + override async get(key: string, _tables: string[], _isTag: boolean): Promise { + const res = await this.kv.get(key) ?? undefined; + return res; + } + + override async put( + key: string, + response: any, + tables: string[], + isTag: boolean, + config?: CacheConfig, + ): Promise { + await this.kv.set(key, response, config ? config.ex : this.globalTtl); + for (const table of tables) { + const keys = this.usedTablesPerKey[table]; + if (keys === undefined) { + this.usedTablesPerKey[table] = [key]; + } else { + keys.push(key); + } + } + } + + override async onMutate(params: MutationOption): Promise { + const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; + const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; + + const keysToDelete = new Set(); + + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + const keys = this.usedTablesPerKey[tableName] ?? []; + for (const key of keys) keysToDelete.add(key); + } + + if (keysToDelete.size > 0 || tagsArray.length > 0) { + for (const tag of tagsArray) { + await this.kv.delete(tag); + } + + for (const key of keysToDelete) { + await this.kv.delete(key); + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + this.usedTablesPerKey[tableName] = []; + } + } + } + } +} + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: ClientNodePostgres) { + const types = typesNodePostgres; + + types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); + types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); + types.setTypeParser(types.builtins.DATE, (val) => val); + types.setTypeParser(types.builtins.INTERVAL, (val) => val); + types.setTypeParser(1231 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); + types.setTypeParser(1115 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); + types.setTypeParser(1185 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); + types.setTypeParser(1187 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); + types.setTypeParser(1182 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); + } + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + text: sql, + values: params, + rowMode: 'array', + }); + + return { data: result.rows as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + text: sql, + values: params, + }); + + return { data: result.rows as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('BEGIN'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +export const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/postgres/mocks' as string); + + const res = await diff({}, schema, []); + + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +export const prepareNeonHttpClient = async (db: string) => { + const url = new URL(process.env['NEON_CONNECTION_STRING']!); + url.pathname = `/${db}`; + const client = neon(url.toString()); + + await client('drop schema if exists public, "mySchema" cascade;'); + await client('create schema public'); + await client('create schema "mySchema";'); + await client(`SET TIME ZONE 'UTC';`); + + const query = async (sql: string, params: any[] = []) => { + const res = await client(sql, params); + return res as any[]; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const prepareNeonWsClient = async (db: string) => { + const url = new URL(process.env['NEON_CONNECTION_STRING']!); + url.pathname = `/${db}`; + const client = new NeonPool({ connectionString: url.toString(), max: 1 }); + + await client.query('drop schema if exists public, "mySchema" cascade;'); + await client.query('create schema public'); + await client.query('create schema "mySchema";'); + await client.query(`SET TIME ZONE 'UTC';`); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client.query(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const preparePglite = async () => { + const client = new PGlite(); + await client.query('create schema "mySchema";'); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client.query(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const prepareNodePostgres = async (db: string) => { + const url = new URL(process.env['PG_CONNECTION_STRING']!); + url.pathname = `/${db}`; + if (!url) throw new Error(); + + const client = new ClientNodePostgres(url.toString()); + client.connect(); + + await client.query('drop schema if exists public, "mySchema" cascade;'); + await client.query('create schema public'); + await client.query('create schema "mySchema";'); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client.query(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const preparePostgresjs = async (db: string) => { + const url = new URL(process.env['PG_CONNECTION_STRING']!); + url.pathname = `/${db}`; + if (!url) throw new Error(); + + const client = postgres(url.toString(), { max: 1, onnotice: () => {} }); + await client`drop schema if exists public, "mySchema" cascade;`; + await client`create schema public`; + await client`create schema "mySchema";`; + + const query = async (sql: string, params: any[] = []) => { + const res = await client.unsafe(sql, params); + return res; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client.unsafe(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const prepareProxy = async (db: string) => { + const url = new URL(process.env['PG_CONNECTION_STRING']!); + url.pathname = `/${db}`; + if (!url) throw new Error(); + + const client = new ClientNodePostgres(url.toString()); + client.connect(); + + await client.query('drop schema if exists public, "mySchema" cascade;'); + await client.query('create schema public'); + await client.query('create schema "mySchema";'); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all(statements.map((x) => client.query(x))).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +const providerClosure = async (items: T[]) => { + return async () => { + while (true) { + const c = items.shift(); + if (!c) { + await new Promise((resolve) => setTimeout(resolve, 50)); + continue; + } + return { + ...c, + release: () => { + items.push(c); + }, + }; + } + }; +}; + +export const providerForNeonHttp = async () => { + const clients = [ + await prepareNeonHttpClient('db0'), + await prepareNeonHttpClient('db1'), + await prepareNeonHttpClient('db2'), + await prepareNeonHttpClient('db3'), + await prepareNeonHttpClient('db4'), + ]; + + return providerClosure(clients); +}; + +export const providerForNeonWs = async () => { + const clients = [ + await prepareNeonWsClient('db5'), + await prepareNeonWsClient('db6'), + await prepareNeonWsClient('db7'), + await prepareNeonWsClient('db8'), + await prepareNeonWsClient('db9'), + ]; + + return providerClosure(clients); +}; + +export const provideForPglite = async () => { + const clients = [ + await preparePglite(), + await preparePglite(), + await preparePglite(), + await preparePglite(), + await preparePglite(), + ]; + + return providerClosure(clients); +}; + +export const provideForNodePostgres = async () => { + const url = process.env['PG_CONNECTION_STRING']; + if (!url) throw new Error(); + const client = new ClientNodePostgres({ connectionString: url }); + client.connect(); + + await client.query(`drop database if exists db0`); + await client.query(`drop database if exists db1`); + await client.query(`drop database if exists db2`); + await client.query(`drop database if exists db3`); + await client.query(`drop database if exists db4`); + await client.query('create database db0;'); + await client.query('create database db1;'); + await client.query('create database db2;'); + await client.query('create database db3;'); + await client.query('create database db4;'); + + const clients = [ + await prepareNodePostgres('db0'), + await prepareNodePostgres('db1'), + await prepareNodePostgres('db2'), + await prepareNodePostgres('db3'), + await prepareNodePostgres('db4'), + ]; + + return providerClosure(clients); +}; + +export const provideForPostgresjs = async () => { + const url = process.env['PG_CONNECTION_STRING']; + if (!url) throw new Error(); + const client = postgres(url, { max: 1, onnotice: () => {} }); + + await client`drop database if exists db0`; + await client`drop database if exists db1`; + await client`drop database if exists db2`; + await client`drop database if exists db3`; + await client`drop database if exists db4`; + await client`create database db0;`; + await client`create database db1;`; + await client`create database db2;`; + await client`create database db3;`; + await client`create database db4;`; + + const clients = [ + await preparePostgresjs('db0'), + await preparePostgresjs('db1'), + await preparePostgresjs('db2'), + await preparePostgresjs('db3'), + await preparePostgresjs('db4'), + ]; + + return providerClosure(clients); +}; + +export const provideForProxy = async () => { + const url = process.env['PG_CONNECTION_STRING']; + if (!url) throw new Error(); + const client = new ClientNodePostgres({ connectionString: url }); + client.connect(); + + await client.query(`drop database if exists db0`); + await client.query('create database db0;'); + + const clients = [ + await prepareProxy('db0'), + ]; + + return providerClosure(clients); +}; + +type ProviderNeonHttp = Awaited>; +type ProviderNeonWs = Awaited>; +type ProvideForPglite = Awaited>; +type ProvideForNodePostgres = Awaited>; +type ProvideForPostgresjs = Awaited>; +type ProvideForProxy = Awaited>; + +type Provider = + | ProviderNeonHttp + | ProviderNeonWs + | ProvideForPglite + | ProvideForNodePostgres + | ProvideForPostgresjs + | ProvideForProxy; + +const testFor = (vendor: 'neon-http' | 'neon-serverless' | 'pglite' | 'node-postgres' | 'postgresjs' | 'proxy') => { + return base.extend<{ + provider: Provider; + kit: { + client: any; + query: (sql: string, params?: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + client: any; + db: PgDatabase; + push: (schema: any) => Promise; + createDB: { + (schema: S): PgDatabase>>; + ( + schema: S, + cb: (helpers: RelationsBuilder>) => TConfig, + ): PgDatabase>>; + }; + caches: { all: PgDatabase; explicit: PgDatabase }; + }>({ + provider: [ + // oxlint-disable-next-line no-empty-pattern + async ({}, use) => { + const provider = vendor === 'neon-http' + ? await providerForNeonHttp() + : vendor === 'neon-serverless' + ? await providerForNeonWs() + : vendor === 'pglite' + ? await provideForPglite() + : vendor === 'node-postgres' + ? await provideForNodePostgres() + : vendor === 'postgresjs' + ? await provideForPostgresjs() + : vendor === 'proxy' + ? await provideForProxy() + : '' as never; + + await use(provider); + }, + { scope: 'file' }, + ], + kit: [ + async ({ provider }, use) => { + const { client, batch, query, release } = await provider(); + await use({ client: client as any, query, batch }); + release(); + }, + { scope: 'test' }, + ], + client: [ + async ({ kit }, use) => { + await use(kit.client); + }, + { scope: 'test' }, + ], + db: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from pg proxy server:', e.message); + throw e; + } + }; + await use(drizzleProxy(proxyHandler, { relations })); + return; + } + + const db = vendor === 'neon-http' + ? drizzleNeonHttp({ client: kit.client as any, relations }) + : vendor === 'neon-serverless' + ? drizzleNeonWs({ client: kit.client as any, relations }) + : vendor === 'pglite' + ? drizzlePglite({ client: kit.client as any, relations }) + : vendor === 'node-postgres' + ? drizzleNodePostgres({ client: kit.client as any, relations }) + : vendor === 'postgresjs' + ? drizzlePostgresjs({ client: kit.client as any, relations }) + : '' as never; + + await use(db); + }, + { scope: 'test' }, + ], + push: [ + async ({ kit }, use) => { + const push = ( + schema: any, + ) => _push(kit.query, schema); + + await use(push); + }, + { scope: 'test' }, + ], + createDB: [ + async ({ kit }, use) => { + const createDB = ( + schema: S, + cb?: ( + helpers: RelationsBuilder>, + ) => RelationsBuilderConfig>, + ) => { + const relations = cb ? defineRelations(schema, cb) : defineRelations(schema); + + if (vendor === 'neon-http') return drizzleNeonHttp({ client: kit.client, relations }); + if (vendor === 'neon-serverless') return drizzleNeonWs({ client: kit.client as any, relations }); + if (vendor === 'pglite') return drizzlePglite({ client: kit.client as any, relations }); + if (vendor === 'node-postgres') return drizzleNodePostgres({ client: kit.client as any, relations }); + if (vendor === 'postgresjs') return drizzlePostgresjs({ client: kit.client as any, relations }); + + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from pg proxy server:', e.message); + throw e; + } + }; + return drizzleProxy(proxyHandler, { relations }); + } + throw new Error(); + }; + + await use(createDB); + }, + { scope: 'test' }, + ], + caches: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from pg proxy server:', e.message); + throw e; + } + }; + const db1 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('all') }); + const db2 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('explicit') }); + await use({ all: db1, explicit: db2 }); + return; + } + + const config1 = { client: kit.client as any, relations, cache: new TestCache('all') }; + const config2 = { client: kit.client as any, relations, cache: new TestCache('explicit') }; + + const db1 = vendor === 'neon-http' + ? drizzleNeonHttp(config1) + : vendor === 'neon-serverless' + ? drizzleNeonWs(config1) + : vendor === 'pglite' + ? drizzlePglite(config1) + : vendor === 'node-postgres' + ? drizzleNodePostgres(config1) + : vendor === 'postgresjs' + ? drizzlePostgresjs(config1) + : '' as never; + + const db2 = vendor === 'neon-http' + ? drizzleNeonHttp(config2) + : vendor === 'neon-serverless' + ? drizzleNeonWs(config2) + : vendor === 'pglite' + ? drizzlePglite(config2) + : vendor === 'node-postgres' + ? drizzleNodePostgres(config2) + : vendor === 'postgresjs' + ? drizzlePostgresjs(config2) + : '' as never; + + await use({ all: db1, explicit: db2 }); + }, + { scope: 'test' }, + ], + }); +}; + +export const neonHttpTest = testFor('neon-http').extend<{ neonhttp: NeonHttpDatabase }>({ + neonhttp: [ + async ({ kit }, use) => { + const db = drizzleNeonHttp({ client: kit.client as NeonQueryFunction, relations }); + await use(db); + }, + { scope: 'test' }, + ], +}); + +export const neonWsTest = testFor('neon-serverless'); +export const pgliteTest = testFor('pglite'); +export const nodePostgresTest = testFor('node-postgres'); +export const postgresjsTest = testFor('postgresjs'); +export const proxyTest = testFor('proxy').extend<{ simulator: ServerSimulator }>({ + simulator: [ + async ({ client }, use) => { + const simulator = new ServerSimulator(client); + await use(simulator); + }, + { scope: 'test' }, + ], +}); + +export type Test = ReturnType; diff --git a/integration-tests/tests/pg/neon-http-batch.test.ts b/integration-tests/tests/pg/neon-http-batch.test.ts index 746145d663..bf91e50953 100644 --- a/integration-tests/tests/pg/neon-http-batch.test.ts +++ b/integration-tests/tests/pg/neon-http-batch.test.ts @@ -1,29 +1,109 @@ -import { neon, type NeonQueryFunction } from '@neondatabase/serverless'; -import { defineRelations } from 'drizzle-orm'; -import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; -import { beforeAll, beforeEach } from 'vitest'; -import { - commentLikesConfig, - commentsConfig, - commentsTable, - groupsConfig, - groupsTable, - postsConfig, - postsTable, - tests, - usersConfig, - usersTable, - usersToGroupsConfig, - usersToGroupsTable, -} from './neon-http-batch'; -import { TestCache, TestGlobalCache } from './pg-common-cache'; +import { defineRelations, eq, sql } from 'drizzle-orm'; +import { relations as oldRels } from 'drizzle-orm/_relations'; +import { drizzle, type NeonHttpDatabase, type NeonHttpQueryResult } from 'drizzle-orm/neon-http'; +import { type AnyPgColumn, integer, pgTable, primaryKey, serial, text, timestamp } from 'drizzle-orm/pg-core'; +import { describe, expect, expectTypeOf, test as base } from 'vitest'; +import { _push, prepareNeonHttpClient } from './instrumentation'; -const ENABLE_LOGGING = false; +export const usersTable = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: integer('verified').notNull().default(0), + invitedBy: integer('invited_by').references((): AnyPgColumn => usersTable.id), +}); +export const usersConfig = oldRels(usersTable, ({ one, many }) => ({ + invitee: one(usersTable, { + fields: [usersTable.invitedBy], + references: [usersTable.id], + }), + usersToGroups: many(usersToGroupsTable), + posts: many(postsTable), +})); + +export const groupsTable = pgTable('groups', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + description: text('description'), +}); +export const groupsConfig = oldRels(groupsTable, ({ many }) => ({ + usersToGroups: many(usersToGroupsTable), +})); + +export const usersToGroupsTable = pgTable( + 'users_to_groups', + { + id: serial('id'), + userId: integer('user_id').notNull().references(() => usersTable.id), + groupId: integer('group_id').notNull().references(() => groupsTable.id), + }, + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], +); +export const usersToGroupsConfig = oldRels(usersToGroupsTable, ({ one }) => ({ + group: one(groupsTable, { + fields: [usersToGroupsTable.groupId], + references: [groupsTable.id], + }), + user: one(usersTable, { + fields: [usersToGroupsTable.userId], + references: [usersTable.id], + }), +})); + +export const postsTable = pgTable('posts', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + ownerId: integer('owner_id').references(() => usersTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const postsConfig = oldRels(postsTable, ({ one, many }) => ({ + author: one(usersTable, { + fields: [postsTable.ownerId], + references: [usersTable.id], + }), + comments: many(commentsTable), +})); + +export const commentsTable = pgTable('comments', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + creator: integer('creator').references(() => usersTable.id), + postId: integer('post_id').references(() => postsTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const commentsConfig = oldRels(commentsTable, ({ one, many }) => ({ + post: one(postsTable, { + fields: [commentsTable.postId], + references: [postsTable.id], + }), + author: one(usersTable, { + fields: [commentsTable.creator], + references: [usersTable.id], + }), + likes: many(commentLikesTable), +})); + +export const commentLikesTable = pgTable('comment_likes', { + id: serial('id').primaryKey(), + creator: integer('creator').references(() => usersTable.id), + commentId: integer('comment_id').references(() => commentsTable.id), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); +export const commentLikesConfig = oldRels(commentLikesTable, ({ one }) => ({ + comment: one(commentsTable, { + fields: [commentLikesTable.commentId], + references: [commentsTable.id], + }), + author: one(usersTable, { + fields: [commentLikesTable.creator], + references: [usersTable.id], + }), +})); export const schema = { usersTable, postsTable, commentsTable, + commentLikesTable, usersToGroupsTable, groupsTable, commentLikesConfig, @@ -34,38 +114,440 @@ export const schema = { usersConfig, }; -export const neonRelations = defineRelations(schema); - -let db: NeonHttpDatabase; -let client: NeonQueryFunction; -let dbGlobalCached: NeonHttpDatabase; -let cachedDb: NeonHttpDatabase; - -beforeAll(async () => { - const connectionString = process.env['NEON_HTTP_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('NEON_HTTP_CONNECTION_STRING is not defined'); - } - client = neon(connectionString); - db = drizzle(client, { schema, logger: ENABLE_LOGGING, relations: neonRelations }); - cachedDb = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestCache(), +export const relations = defineRelations(schema); + +const test = base.extend<{ db: NeonHttpDatabase }>({ + db: [ + // oxlint-disable-next-line no-empty-pattern + async ({}, use) => { + const { client, query } = await prepareNeonHttpClient('db6'); + await _push(query, schema); + + const db = drizzle({ client: client, relations: relations, schema }); + await use(db); + }, + { scope: 'file' }, + ], +}); + +describe('batch', () => { + test.beforeEach(async ({ db }) => { + await db.execute( + `truncate table users, groups, users_to_groups, posts, comments, comment_likes RESTART IDENTITY CASCADE;`, + ); }); - dbGlobalCached = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestGlobalCache(), + + test('batch api example', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ + id: usersTable.id, + invitedBy: usersTable.invitedBy, + }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.select().from(usersTable), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + invitedBy: number | null; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(3); + + expect(batchResponse[0]).toEqual([{ + id: 1, + invitedBy: null, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); }); -}); -beforeEach((ctx) => { - ctx.neonPg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); + // batch api only relational many + test('insert + findMany', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db._query.usersTable.findMany({}), + db.query.usersTable.findMany({}), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(4); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + }); + + // batch api relational many + one + test('insert + findMany + findFirst', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db._query.usersTable.findMany({}), + db.query.usersTable.findMany({}), + db._query.usersTable.findFirst({}), + db.query.usersTable.findFirst({}), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + } | undefined, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + } | undefined, + ]>(); + + expect(batchResponse.length).eq(6); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); -tests(); + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[4]).toEqual( + { id: 1, name: 'John', verified: 0, invitedBy: null }, + ); + + expect(batchResponse[5]).toEqual( + { id: 1, name: 'John', verified: 0, invitedBy: null }, + ); + }); + + test('insert + db.execute', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.execute(sql`insert into users (id, name) values (2, 'Dan')`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult>, + ]>(); + + expect(batchResponse.length).eq(2); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rowAsArray: false, rows: [], rowCount: 1 }); + }); + + // batch api combined rqb + raw call + test('insert + findManyWith + db.all', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db._query.usersTable.findMany({}), + db.query.usersTable.findMany({}), + db.execute(sql`select * from users`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invitedBy: number | null; + }>, + ]>(); + + expect(batchResponse.length).eq(5); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rowAsArray: true, rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual([ + { id: 1, name: 'John', verified: 0, invitedBy: null }, + { id: 2, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[4]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + { id: 2, name: 'Dan', verified: 0, invited_by: null }, + ], + }); + }); + + // batch api for insert + update + select + test('insert + update + select + select partial', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), + db._query.usersTable.findMany({}), + db.query.usersTable.findMany({}), + db.select().from(usersTable).where(eq(usersTable.id, 1)), + db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + name: string; + verified: number; + invitedBy: number | null; + }[], + { + id: number; + invitedBy: number | null; + }[], + ]>(); + + expect(batchResponse.length).eq(6); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual([ + { id: 1, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[4]).toEqual([ + { id: 1, name: 'Dan', verified: 0, invitedBy: null }, + ]); + + expect(batchResponse[5]).toEqual([ + { id: 1, invitedBy: null }, + ]); + }); + + // batch api for insert + delete + select + test('insert + delete + select + select partial', async ({ db }) => { + const batchResponse = await db.batch([ + db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), + db.insert(usersTable).values({ id: 2, name: 'Dan' }), + db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ + id: usersTable.id, + invitedBy: usersTable.invitedBy, + }), + db._query.usersTable.findFirst({ + columns: { + id: true, + invitedBy: true, + }, + }), + db.query.usersTable.findFirst({ + columns: { + id: true, + invitedBy: true, + }, + }), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + { + id: number; + }[], + NeonHttpQueryResult, + { + id: number; + invitedBy: number | null; + }[], + { + id: number; + invitedBy: number | null; + } | undefined, + { + id: number; + invitedBy: number | null; + } | undefined, + ]>(); + + expect(batchResponse.length).eq(5); + + expect(batchResponse[0]).toEqual([{ + id: 1, + }]); + + expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); + + expect(batchResponse[2]).toEqual([ + { id: 1, invitedBy: null }, + ]); + + expect(batchResponse[3]).toEqual( + { id: 2, invitedBy: null }, + ); + + expect(batchResponse[4]).toEqual( + { id: 2, invitedBy: null }, + ); + }); + + test('select raw', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Dan' }]); + const batchResponse = await db.batch([ + db.execute<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>(sql`select * from users`), + db.execute<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>(sql`select * from users where id = 1`), + ]); + + expectTypeOf(batchResponse).toEqualTypeOf<[ + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>, + NeonHttpQueryResult<{ + id: number; + name: string; + verified: number; + invited_by: number | null; + }>, + ]>(); + + expect(batchResponse.length).eq(2); + + expect(batchResponse[0]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + { id: 2, name: 'Dan', verified: 0, invited_by: null }, + ], + }); + + expect(batchResponse[1]).toMatchObject({ + rows: [ + { id: 1, name: 'John', verified: 0, invited_by: null }, + ], + }); + }); +}); diff --git a/integration-tests/tests/pg/neon-http-batch.ts b/integration-tests/tests/pg/neon-http-batch.ts deleted file mode 100644 index 7779f1ee47..0000000000 --- a/integration-tests/tests/pg/neon-http-batch.ts +++ /dev/null @@ -1,640 +0,0 @@ -import Docker from 'dockerode'; -import { eq, sql } from 'drizzle-orm'; -import { relations } from 'drizzle-orm/_relations'; -import type { NeonHttpQueryResult } from 'drizzle-orm/neon-http'; -import { integer, pgTable, primaryKey, serial, text, timestamp } from 'drizzle-orm/pg-core'; -import type { AnyPgColumn } from 'drizzle-orm/pg-core'; -import getPort from 'get-port'; -import { v4 as uuidV4 } from 'uuid'; -import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; - -export const usersTable = pgTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: integer('verified').notNull().default(0), - invitedBy: integer('invited_by').references((): AnyPgColumn => usersTable.id), -}); -export const usersConfig = relations(usersTable, ({ one, many }) => ({ - invitee: one(usersTable, { - fields: [usersTable.invitedBy], - references: [usersTable.id], - }), - usersToGroups: many(usersToGroupsTable), - posts: many(postsTable), -})); - -export const groupsTable = pgTable('groups', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - description: text('description'), -}); -export const groupsConfig = relations(groupsTable, ({ many }) => ({ - usersToGroups: many(usersToGroupsTable), -})); - -export const usersToGroupsTable = pgTable( - 'users_to_groups', - { - id: serial('id'), - userId: integer('user_id').notNull().references(() => usersTable.id), - groupId: integer('group_id').notNull().references(() => groupsTable.id), - }, - (t) => ({ - pk: primaryKey({ columns: [t.userId, t.groupId] }), - }), -); -export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ - group: one(groupsTable, { - fields: [usersToGroupsTable.groupId], - references: [groupsTable.id], - }), - user: one(usersTable, { - fields: [usersToGroupsTable.userId], - references: [usersTable.id], - }), -})); - -export const postsTable = pgTable('posts', { - id: serial('id').primaryKey(), - content: text('content').notNull(), - ownerId: integer('owner_id').references(() => usersTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const postsConfig = relations(postsTable, ({ one, many }) => ({ - author: one(usersTable, { - fields: [postsTable.ownerId], - references: [usersTable.id], - }), - comments: many(commentsTable), -})); - -export const commentsTable = pgTable('comments', { - id: serial('id').primaryKey(), - content: text('content').notNull(), - creator: integer('creator').references(() => usersTable.id), - postId: integer('post_id').references(() => postsTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ - post: one(postsTable, { - fields: [commentsTable.postId], - references: [postsTable.id], - }), - author: one(usersTable, { - fields: [commentsTable.creator], - references: [usersTable.id], - }), - likes: many(commentLikesTable), -})); - -export const commentLikesTable = pgTable('comment_likes', { - id: serial('id').primaryKey(), - creator: integer('creator').references(() => usersTable.id), - commentId: integer('comment_id').references(() => commentsTable.id), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); -export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ - comment: one(commentsTable, { - fields: [commentLikesTable.commentId], - references: [commentsTable.id], - }), - author: one(usersTable, { - fields: [commentLikesTable.creator], - references: [usersTable.id], - }), -})); - -let pgContainer: Docker.Container; -export async function createDockerDB(): Promise { - const docker = new Docker(); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], - name: `drizzle-integration-tests-${uuidV4()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -afterAll(async () => { - await pgContainer?.stop().catch(console.error); -}); - -export function tests() { - describe('common', () => { - beforeEach(async (ctx) => { - const { db } = ctx.neonPg; - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`drop schema if exists mySchema cascade`); - - await db.execute(sql`create schema public`); - - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified int not null default 0, - invited_by int references users(id) - ) - `, - ); - await db.execute( - sql` - create table groups ( - id serial primary key, - name text not null, - description text - ) - `, - ); - await db.execute( - sql` - create table users_to_groups ( - id serial, - user_id int not null references users(id), - group_id int not null references groups(id), - primary key (user_id, group_id) - ) - `, - ); - await db.execute( - sql` - create table posts ( - id serial primary key, - content text not null, - owner_id int references users(id), - created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table comments ( - id serial primary key, - content text not null, - creator int references users(id), - post_id int references posts(id), - created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table comment_likes ( - id serial primary key, - creator int references users(id), - comment_id int references comments(id), - created_at timestamp not null default now() - ) - `, - ); - }); - - test('batch api example', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ - id: usersTable.id, - invitedBy: usersTable.invitedBy, - }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.select().from(usersTable), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - invitedBy: number | null; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(3); - - expect(batchResponse[0]).toEqual([{ - id: 1, - invitedBy: null, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - }); - - // batch api only relational many - test('insert + findMany', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db._query.usersTable.findMany({}), - db.query.usersTable.findMany({}), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(4); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - }); - - // batch api relational many + one - test('insert + findMany + findFirst', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db._query.usersTable.findMany({}), - db.query.usersTable.findMany({}), - db._query.usersTable.findFirst({}), - db.query.usersTable.findFirst({}), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - } | undefined, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - } | undefined, - ]>(); - - expect(batchResponse.length).eq(6); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[4]).toEqual( - { id: 1, name: 'John', verified: 0, invitedBy: null }, - ); - - expect(batchResponse[5]).toEqual( - { id: 1, name: 'John', verified: 0, invitedBy: null }, - ); - }); - - test('insert + db.execute', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.execute(sql`insert into users (id, name) values (2, 'Dan')`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult>, - ]>(); - - expect(batchResponse.length).eq(2); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rowAsArray: false, rows: [], rowCount: 1 }); - }); - - // batch api combined rqb + raw call - test('insert + findManyWith + db.all', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db._query.usersTable.findMany({}), - db.query.usersTable.findMany({}), - db.execute(sql`select * from users`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invitedBy: number | null; - }>, - ]>(); - - expect(batchResponse.length).eq(5); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rowAsArray: true, rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual([ - { id: 1, name: 'John', verified: 0, invitedBy: null }, - { id: 2, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[4]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - { id: 2, name: 'Dan', verified: 0, invited_by: null }, - ], - }); - }); - - // batch api for insert + update + select - test('insert + update + select + select partial', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), - db._query.usersTable.findMany({}), - db.query.usersTable.findMany({}), - db.select().from(usersTable).where(eq(usersTable.id, 1)), - db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - name: string; - verified: number; - invitedBy: number | null; - }[], - { - id: number; - invitedBy: number | null; - }[], - ]>(); - - expect(batchResponse.length).eq(6); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual([ - { id: 1, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[4]).toEqual([ - { id: 1, name: 'Dan', verified: 0, invitedBy: null }, - ]); - - expect(batchResponse[5]).toEqual([ - { id: 1, invitedBy: null }, - ]); - }); - - // batch api for insert + delete + select - test('insert + delete + select + select partial', async (ctx) => { - const { db } = ctx.neonPg; - - const batchResponse = await db.batch([ - db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), - db.insert(usersTable).values({ id: 2, name: 'Dan' }), - db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ - id: usersTable.id, - invitedBy: usersTable.invitedBy, - }), - db._query.usersTable.findFirst({ - columns: { - id: true, - invitedBy: true, - }, - }), - db.query.usersTable.findFirst({ - columns: { - id: true, - invitedBy: true, - }, - }), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - { - id: number; - }[], - NeonHttpQueryResult, - { - id: number; - invitedBy: number | null; - }[], - { - id: number; - invitedBy: number | null; - } | undefined, - { - id: number; - invitedBy: number | null; - } | undefined, - ]>(); - - expect(batchResponse.length).eq(5); - - expect(batchResponse[0]).toEqual([{ - id: 1, - }]); - - expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); - - expect(batchResponse[2]).toEqual([ - { id: 1, invitedBy: null }, - ]); - - expect(batchResponse[3]).toEqual( - { id: 2, invitedBy: null }, - ); - - expect(batchResponse[4]).toEqual( - { id: 2, invitedBy: null }, - ); - }); - - test('select raw', async (ctx) => { - const { db } = ctx.neonPg; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Dan' }]); - const batchResponse = await db.batch([ - db.execute<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>(sql`select * from users`), - db.execute<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>(sql`select * from users where id = 1`), - ]); - - expectTypeOf(batchResponse).toEqualTypeOf<[ - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>, - NeonHttpQueryResult<{ - id: number; - name: string; - verified: number; - invited_by: number | null; - }>, - ]>(); - - expect(batchResponse.length).eq(2); - - expect(batchResponse[0]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - { id: 2, name: 'Dan', verified: 0, invited_by: null }, - ], - }); - - expect(batchResponse[1]).toMatchObject({ - rows: [ - { id: 1, name: 'John', verified: 0, invited_by: null }, - ], - }); - }); - }); -} diff --git a/integration-tests/tests/pg/neon-http.test.ts b/integration-tests/tests/pg/neon-http.test.ts index 0d417fbbc1..fffe1b936a 100644 --- a/integration-tests/tests/pg/neon-http.test.ts +++ b/integration-tests/tests/pg/neon-http.test.ts @@ -1,816 +1,705 @@ -import { neon, neonConfig, type NeonQueryFunction } from '@neondatabase/serverless'; +import type { NeonQueryFunction } from '@neondatabase/serverless'; import { defineRelations, eq, sql } from 'drizzle-orm'; -import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; +import { drizzle } from 'drizzle-orm/neon-http'; import { migrate } from 'drizzle-orm/neon-http/migrator'; -import { pgMaterializedView, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import { beforeAll, beforeEach, describe, expect, expectTypeOf, test, vi } from 'vitest'; -import { skipTests } from '~/common'; +import { + bigint, + bigserial, + boolean, + bytea, + char, + cidr, + date, + doublePrecision, + inet, + integer, + interval, + json, + jsonb, + line, + macaddr, + macaddr8, + numeric, + pgEnum, + pgMaterializedView, + pgTable, + point, + real, + serial, + smallint, + smallserial, + text, + time, + timestamp, + uuid, + varchar, +} from 'drizzle-orm/pg-core'; +import { getTableConfig } from 'drizzle-orm/pg-core'; +import { describe, expect, expectTypeOf, vi } from 'vitest'; import { randomString } from '~/utils'; -import { allTypesTable, tests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: NeonHttpDatabase; -let dbGlobalCached: NeonHttpDatabase; -let cachedDb: NeonHttpDatabase; - -beforeAll(async () => { - const connectionString = process.env['NEON_HTTP_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('NEON_CONNECTION_STRING is not defined'); - } - - neonConfig.fetchEndpoint = (host) => { - const [protocol, port] = host === 'db.localtest.me' ? ['http', 4444] : ['https', 443]; - return `${protocol}://${host}:${port}/sql`; - }; - const client = neon(connectionString); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestCache(), - }); - dbGlobalCached = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestGlobalCache(), - }); -}); - -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); +import { tests } from './common'; +import { neonHttpTest as test } from './instrumentation'; +import { usersMigratorTable, usersTable } from './schema'; - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test('migrator : migrate with custom schema', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); +const skips = [ + 'RQB v2 transaction find first - no rows', + 'RQB v2 transaction find first - multiple rows', + 'RQB v2 transaction find first - with relation', + 'RQB v2 transaction find first - placeholders', + 'RQB v2 transaction find many - no rows', + 'RQB v2 transaction find many - multiple rows', + 'RQB v2 transaction find many - with relation', + 'RQB v2 transaction find many - placeholders', + // // Disabled until Buffer insertion is fixed + 'all types', +]; - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); +// COMMON +tests(test, skips); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); - expect(rowCount && rowCount > 0).toBeTruthy(); +describe('migrator', () => { + test.beforeEach(async ({ db }) => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + test('migrator : default migration strategy', async ({ neonhttp: db }) => { + await db.execute(sql`drop table if exists all_columns, users12, "drizzle"."__drizzle_migrations"`); + await migrate(db, { migrationsFolder: './drizzle2/pg' }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); -}); + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + const result = await db.select().from(usersMigratorTable); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - expect(rowCount && rowCount > 0).toBeTruthy(); + await db.execute(sql`drop table all_columns, users12, "drizzle"."__drizzle_migrations"`); + }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + test('migrator : migrate with custom schema', async ({ neonhttp: db }) => { + await db.execute(sql`drop table if exists all_columns, users12, "drizzle"."__drizzle_migrations"`); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.execute(sql`drop table all_columns, users12, custom_migrations."__drizzle_migrations"`); + }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); + test('migrator : migrate with custom table', async ({ neonhttp: db }) => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns, users12, "drizzle"."__drizzle_migrations"`); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.execute(sql`drop table all_columns, users12, "drizzle".${sql.identifier(customTable)}`); + }); -test('migrator : migrate with custom table and custom schema', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + test('migrator : migrate with custom table and custom schema', async ({ neonhttp: db }) => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns, users12, "drizzle"."__drizzle_migrations"`); + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: 'custom_migrations', + }); - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: 'custom_migrations', + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from custom_migrations.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.execute(sql`drop table all_columns, users12, custom_migrations.${sql.identifier(customTable)}`); }); - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from custom_migrations.${sql.identifier(customTable)};`, - ); - expect(rowCount && rowCount > 0).toBeTruthy(); + test('migrator : --init', async ({ neonhttp: db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); -}); + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); -test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(false); }); - await db.execute(sql`drop table if exists ${table}`); + test('migrator : --init - local migrations error', async ({ neonhttp: db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + test('migrator : --init - db migrations error', async ({ neonhttp: db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); - await db.execute(sql`drop table if exists ${table}`); -}); + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); -test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(true); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone first case mode string', async ({ db, push }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); + await push({ table }); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); - expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - await db.execute(sql`drop table if exists ${table}`); -}); + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); -test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); }); - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); + test('all date and time columns without timezone second case mode string', async ({ db, push }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + await push({ table }); - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); - await db.execute(sql`drop table if exists ${table}`); -}); + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); -test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone third case mode date', async ({ db, push }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); + await push({ table }); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + }); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + test('test mode string for timestamp with timezone', async ({ db, push }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + await push({ table }); - await db.execute(sql`drop table if exists ${table}`); -}); + const timestampString = '2022-01-01 00:00:00.123456-0200'; -test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - await db.execute(sql`drop table if exists ${table}`); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + }); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + test('test mode date for timestamp with timezone', async ({ db, push }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + await push({ table }); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - await db.execute(sql`drop table if exists ${table}`); -}); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); -test('test mode string for timestamp with timezone in UTC timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode string for timestamp with timezone in UTC timezone', async ({ db, push }) => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const table = pgTable('all_columns_6', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + await push({ table }); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); -test.skip('test mode string for timestamp with timezone in different timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + }); - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); + test.skip('test mode string for timestamp with timezone in different timezone', async ({ db }) => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); - await db.execute(sql`drop table if exists ${table}`); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + const timestampString = '2022-01-01 00:00:00.123456-1000'; - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'nested transaction rollback', - 'transaction rollback', - 'nested transaction', - 'transaction', - 'timestamp timezone', - 'test $onUpdateFn and $onUpdate works as $default', - 'RQB v2 transaction find first - no rows', - 'RQB v2 transaction find first - multiple rows', - 'RQB v2 transaction find first - with relation', - 'RQB v2 transaction find first - placeholders', - 'RQB v2 transaction find many - no rows', - 'RQB v2 transaction find many - multiple rows', - 'RQB v2 transaction find many - with relation', - 'RQB v2 transaction find many - placeholders', - // Disabled until Buffer insertion is fixed - 'all types', -]); -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); - -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - expect(result.rows).toEqual([{ id: 1, name: 'John' }]); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); + test('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); + }); -test('all types - neon-http', async (ctx) => { - const { db } = ctx.pg; - - await db.execute(sql`CREATE TYPE "public"."en" AS ENUM('enVal1', 'enVal2');`); - await db.execute(sql` - CREATE TABLE "all_types" ( - "serial" serial NOT NULL, - "bigserial53" bigserial NOT NULL, - "bigserial64" bigserial, - "int" integer, - "bigint53" bigint, - "bigint64" bigint, - "bigint_string" bigint, - "bool" boolean, - "bytea" bytea, - "char" char, - "cidr" "cidr", - "date" date, - "date_str" date, - "double" double precision, - "enum" "en", - "inet" "inet", - "interval" interval, - "json" json, - "jsonb" jsonb, - "line" "line", - "line_tuple" "line", - "macaddr" "macaddr", - "macaddr8" "macaddr8", - "numeric" numeric, - "numeric_num" numeric, - "numeric_big" numeric, - "point" "point", - "point_tuple" "point", - "real" real, - "smallint" smallint, - "smallserial" "smallserial" NOT NULL, - "text" text, - "time" time, - "timestamp" timestamp, - "timestamp_tz" timestamp with time zone, - "timestamp_str" timestamp, - "timestamp_tz_str" timestamp with time zone, - "uuid" uuid, - "varchar" varchar, - "arrint" integer[], - "arrbigint53" bigint[], - "arrbigint64" bigint[], - "arrbigint_string" bigint[], - "arrbool" boolean[], - "arrbytea" bytea[], - "arrchar" char[], - "arrcidr" "cidr"[], - "arrdate" date[], - "arrdate_str" date[], - "arrdouble" double precision[], - "arrenum" "en"[], - "arrinet" "inet"[], - "arrinterval" interval[], - "arrjson" json[], - "arrjsonb" jsonb[], - "arrline" "line"[], - "arrline_tuple" "line"[], - "arrmacaddr" "macaddr"[], - "arrmacaddr8" "macaddr8"[], - "arrnumeric" numeric[], - "arrnumeric_num" numeric[], - "arrnumeric_big" numeric[], - "arrpoint" "point"[], - "arrpoint_tuple" "point"[], - "arrreal" real[], - "arrsmallint" smallint[], - "arrtext" text[], - "arrtime" time[], - "arrtimestamp" timestamp[], - "arrtimestamp_tz" timestamp with time zone[], - "arrtimestamp_str" timestamp[], - "arrtimestamp_tz_str" timestamp with time zone[], - "arruuid" uuid[], - "arrvarchar" varchar[] + test('insert via db.execute + returning', async ({ db }) => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); - `); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + }); - await db.insert(allTypesTable).values({ - serial: 1, - smallserial: 15, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - bigserial53: 9007199254740991, - bigserial64: 5044565289845416380n, - bool: true, - bytea: null, - char: 'c', - cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', - inet: '192.168.0.1/24', - macaddr: '08:00:2b:01:02:03', - macaddr8: '08:00:2b:01:02:03:04:05', - date: new Date(1741743161623), - dateStr: new Date(1741743161623).toISOString(), - double: 15.35325689124218, - enum: 'enVal1', - int: 621, - interval: '2 months ago', - json: { - str: 'strval', - arr: ['str', 10], - }, - jsonb: { - str: 'strvalb', - arr: ['strb', 11], - }, - line: { - a: 1, - b: 2, - c: 3, - }, - lineTuple: [1, 2, 3], - numeric: '475452353476', - numericNum: 9007199254740991, - numericBig: 5044565289845416380n, - point: { - x: 24.5, - y: 49.6, - }, - pointTuple: [57.2, 94.3], - real: 1.048596, - smallint: 10, - text: 'TEXT STRING', - time: '13:59:28', - timestamp: new Date(1741743161623), - timestampTz: new Date(1741743161623), - timestampStr: new Date(1741743161623).toISOString(), - timestampTzStr: new Date(1741743161623).toISOString(), - uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', - varchar: 'C4-', - arrbigint53: [9007199254740991], - arrbigint64: [5044565289845416380n], - arrbigintString: ['5044565289845416380'], - arrbool: [true], - arrbytea: [Buffer.from('BYTES')], - arrchar: ['c'], - arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], - arrinet: ['192.168.0.1/24'], - arrmacaddr: ['08:00:2b:01:02:03'], - arrmacaddr8: ['08:00:2b:01:02:03:04:05'], - arrdate: [new Date(1741743161623)], - arrdateStr: [new Date(1741743161623).toISOString()], - arrdouble: [15.35325689124218], - arrenum: ['enVal1'], - arrint: [621], - arrinterval: ['2 months ago'], - arrjson: [{ - str: 'strval', - arr: ['str', 10], - }], - arrjsonb: [{ - str: 'strvalb', - arr: ['strb', 11], - }], - arrline: [{ - a: 1, - b: 2, - c: 3, - }], - arrlineTuple: [[1, 2, 3]], - arrnumeric: ['475452353476'], - arrnumericNum: [9007199254740991], - arrnumericBig: [5044565289845416380n], - arrpoint: [{ - x: 24.5, - y: 49.6, - }], - arrpointTuple: [[57.2, 94.3]], - arrreal: [1.048596], - arrsmallint: [10], - arrtext: ['TEXT STRING'], - arrtime: ['13:59:28'], - arrtimestamp: [new Date(1741743161623)], - arrtimestampTz: [new Date(1741743161623)], - arrtimestampStr: [new Date(1741743161623).toISOString()], - arrtimestampTzStr: [new Date(1741743161623).toISOString()], - arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], - arrvarchar: ['C4-'], + test('insert via db.execute w/ query builder', async ({ db }) => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); - const rawRes = await db.select().from(allTypesTable); - - type ExpectedType = { - serial: number; - bigserial53: number; - bigserial64: bigint; - int: number | null; - bigint53: number | null; - bigint64: bigint | null; - bigintString: string | null; - bool: boolean | null; - bytea: Buffer | null; - char: string | null; - cidr: string | null; - date: Date | null; - dateStr: string | null; - double: number | null; - enum: 'enVal1' | 'enVal2' | null; - inet: string | null; - interval: string | null; - json: unknown; - jsonb: unknown; - line: { - a: number; - b: number; - c: number; - } | null; - lineTuple: [number, number, number] | null; - macaddr: string | null; - macaddr8: string | null; - numeric: string | null; - numericNum: number | null; - numericBig: bigint | null; - point: { - x: number; - y: number; - } | null; - pointTuple: [number, number] | null; - real: number | null; - smallint: number | null; - smallserial: number; - text: string | null; - time: string | null; - timestamp: Date | null; - timestampTz: Date | null; - timestampStr: string | null; - timestampTzStr: string | null; - uuid: string | null; - varchar: string | null; - arrint: number[] | null; - arrbigint53: number[] | null; - arrbigint64: bigint[] | null; - arrbigintString: string[] | null; - arrbool: boolean[] | null; - arrbytea: Buffer[] | null; - arrchar: string[] | null; - arrcidr: string[] | null; - arrdate: Date[] | null; - arrdateStr: string[] | null; - arrdouble: number[] | null; - arrenum: ('enVal1' | 'enVal2')[] | null; - arrinet: string[] | null; - arrinterval: string[] | null; - arrjson: unknown[] | null; - arrjsonb: unknown[] | null; - arrline: { - a: number; - b: number; - c: number; - }[] | null; - arrlineTuple: [number, number, number][] | null; - arrmacaddr: string[] | null; - arrmacaddr8: string[] | null; - arrnumeric: string[] | null; - arrnumericNum: number[] | null; - arrnumericBig: bigint[] | null; - arrpoint: { x: number; y: number }[] | null; - arrpointTuple: [number, number][] | null; - arrreal: number[] | null; - arrsmallint: number[] | null; - arrtext: string[] | null; - arrtime: string[] | null; - arrtimestamp: Date[] | null; - arrtimestampTz: Date[] | null; - arrtimestampStr: string[] | null; - arrtimestampTzStr: string[] | null; - arruuid: string[] | null; - arrvarchar: string[] | null; - }[]; - - const expectedRes: ExpectedType = [ - { + test('all types - neon-http', async ({ db, push }) => { + const en = pgEnum('en2', ['enVal1', 'enVal2']); + + const allTypesTable = pgTable('all_types', { + serial: serial('serial'), + bigserial53: bigserial('bigserial53', { + mode: 'number', + }), + bigserial64: bigserial('bigserial64', { + mode: 'bigint', + }), + int: integer('int'), + bigint53: bigint('bigint53', { + mode: 'number', + }), + bigint64: bigint('bigint64', { + mode: 'bigint', + }), + bigintString: bigint('bigint_string', { + mode: 'string', + }), + bool: boolean('bool'), + bytea: bytea('bytea'), + char: char('char'), + cidr: cidr('cidr'), + date: date('date', { + mode: 'date', + }), + dateStr: date('date_str', { + mode: 'string', + }), + double: doublePrecision('double'), + enum: en('enum'), + inet: inet('inet'), + interval: interval('interval'), + json: json('json'), + jsonb: jsonb('jsonb'), + line: line('line', { + mode: 'abc', + }), + lineTuple: line('line_tuple', { + mode: 'tuple', + }), + macaddr: macaddr('macaddr'), + macaddr8: macaddr8('macaddr8'), + numeric: numeric('numeric'), + numericNum: numeric('numeric_num', { + mode: 'number', + }), + numericBig: numeric('numeric_big', { + mode: 'bigint', + }), + point: point('point', { + mode: 'xy', + }), + pointTuple: point('point_tuple', { + mode: 'tuple', + }), + real: real('real'), + smallint: smallint('smallint'), + smallserial: smallserial('smallserial'), + text: text('text'), + time: time('time'), + timestamp: timestamp('timestamp', { + mode: 'date', + }), + timestampTz: timestamp('timestamp_tz', { + mode: 'date', + withTimezone: true, + }), + timestampStr: timestamp('timestamp_str', { + mode: 'string', + }), + timestampTzStr: timestamp('timestamp_tz_str', { + mode: 'string', + withTimezone: true, + }), + uuid: uuid('uuid'), + varchar: varchar('varchar'), + arrint: integer('arrint').array(), + arrbigint53: bigint('arrbigint53', { + mode: 'number', + }).array(), + arrbigint64: bigint('arrbigint64', { + mode: 'bigint', + }).array(), + arrbigintString: bigint('arrbigint_string', { + mode: 'string', + }).array(), + arrbool: boolean('arrbool').array(), + arrbytea: bytea('arrbytea').array(), + arrchar: char('arrchar').array(), + arrcidr: cidr('arrcidr').array(), + arrdate: date('arrdate', { + mode: 'date', + }).array(), + arrdateStr: date('arrdate_str', { + mode: 'string', + }).array(), + arrdouble: doublePrecision('arrdouble').array(), + arrenum: en('arrenum').array(), + arrinet: inet('arrinet').array(), + arrinterval: interval('arrinterval').array(), + arrjson: json('arrjson').array(), + arrjsonb: jsonb('arrjsonb').array(), + arrline: line('arrline', { + mode: 'abc', + }).array(), + arrlineTuple: line('arrline_tuple', { + mode: 'tuple', + }).array(), + arrmacaddr: macaddr('arrmacaddr').array(), + arrmacaddr8: macaddr8('arrmacaddr8').array(), + arrnumeric: numeric('arrnumeric').array(), + arrnumericNum: numeric('arrnumeric_num', { + mode: 'number', + }).array(), + arrnumericBig: numeric('arrnumeric_big', { + mode: 'bigint', + }).array(), + arrpoint: point('arrpoint', { + mode: 'xy', + }).array(), + arrpointTuple: point('arrpoint_tuple', { + mode: 'tuple', + }).array(), + arrreal: real('arrreal').array(), + arrsmallint: smallint('arrsmallint').array(), + arrtext: text('arrtext').array(), + arrtime: time('arrtime').array(), + arrtimestamp: timestamp('arrtimestamp', { + mode: 'date', + }).array(), + arrtimestampTz: timestamp('arrtimestamp_tz', { + mode: 'date', + withTimezone: true, + }).array(), + arrtimestampStr: timestamp('arrtimestamp_str', { + mode: 'string', + }).array(), + arrtimestampTzStr: timestamp('arrtimestamp_tz_str', { + mode: 'string', + withTimezone: true, + }).array(), + arruuid: uuid('arruuid').array(), + arrvarchar: varchar('arrvarchar').array(), + }); + + await push({ en, allTypesTable }); + await db.insert(allTypesTable).values({ serial: 1, - bigserial53: 9007199254740991, - bigserial64: 5044565289845416380n, - int: 621, + smallserial: 15, bigint53: 9007199254740991, bigint64: 5044565289845416380n, bigintString: '5044565289845416380', + bigserial53: 9007199254740991, + bigserial64: 5044565289845416380n, bool: true, bytea: null, char: 'c', cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', - date: new Date('2025-03-12T00:00:00.000Z'), - dateStr: '2025-03-12', - double: 15.35325689124218, - enum: 'enVal1', inet: '192.168.0.1/24', - interval: '-2 mons', - json: { str: 'strval', arr: ['str', 10] }, - jsonb: { arr: ['strb', 11], str: 'strvalb' }, - line: { a: 1, b: 2, c: 3 }, - lineTuple: [1, 2, 3], macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', + date: new Date(1741743161623), + dateStr: new Date(1741743161623).toISOString(), + double: 15.35325689124218, + enum: 'enVal1', + int: 621, + interval: '2 months ago', + json: { + str: 'strval', + arr: ['str', 10], + }, + jsonb: { + str: 'strvalb', + arr: ['strb', 11], + }, + line: { + a: 1, + b: 2, + c: 3, + }, + lineTuple: [1, 2, 3], numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, - point: { x: 24.5, y: 49.6 }, + point: { + x: 24.5, + y: 49.6, + }, pointTuple: [57.2, 94.3], real: 1.048596, smallint: 10, - smallserial: 15, text: 'TEXT STRING', time: '13:59:28', - timestamp: new Date('2025-03-12T01:32:41.623Z'), - timestampTz: new Date('2025-03-12T01:32:41.623Z'), - timestampStr: '2025-03-12 01:32:41.623', - timestampTzStr: '2025-03-12 01:32:41.623+00', + timestamp: new Date(1741743161623), + timestampTz: new Date(1741743161623), + timestampStr: new Date(1741743161623).toISOString(), + timestampTzStr: new Date(1741743161623).toISOString(), uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', varchar: 'C4-', - arrint: [621], arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], arrbigintString: ['5044565289845416380'], @@ -818,41 +707,224 @@ test('all types - neon-http', async (ctx) => { arrbytea: [Buffer.from('BYTES')], arrchar: ['c'], arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], - arrdate: [new Date('2025-03-12T00:00:00.000Z')], - arrdateStr: ['2025-03-12'], - arrdouble: [15.35325689124218], - arrenum: ['enVal1'], arrinet: ['192.168.0.1/24'], - arrinterval: ['-2 mons'], - arrjson: [{ str: 'strval', arr: ['str', 10] }], - arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], - arrline: [{ a: 1, b: 2, c: 3 }], - arrlineTuple: [[1, 2, 3]], arrmacaddr: ['08:00:2b:01:02:03'], arrmacaddr8: ['08:00:2b:01:02:03:04:05'], + arrdate: [new Date(1741743161623)], + arrdateStr: [new Date(1741743161623).toISOString()], + arrdouble: [15.35325689124218], + arrenum: ['enVal1'], + arrint: [621], + arrinterval: ['2 months ago'], + arrjson: [{ + str: 'strval', + arr: ['str', 10], + }], + arrjsonb: [{ + str: 'strvalb', + arr: ['strb', 11], + }], + arrline: [{ + a: 1, + b: 2, + c: 3, + }], + arrlineTuple: [[1, 2, 3]], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], - arrpoint: [{ x: 24.5, y: 49.6 }], + arrpoint: [{ + x: 24.5, + y: 49.6, + }], arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], arrtime: ['13:59:28'], - arrtimestamp: [new Date('2025-03-12T01:32:41.623Z')], - arrtimestampTz: [new Date('2025-03-12T01:32:41.623Z')], - arrtimestampStr: ['2025-03-12 01:32:41.623'], - arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], + arrtimestamp: [new Date(1741743161623)], + arrtimestampTz: [new Date(1741743161623)], + arrtimestampStr: [new Date(1741743161623).toISOString()], + arrtimestampTzStr: [new Date(1741743161623).toISOString()], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], - }, - ]; + }); - expectTypeOf(rawRes).toEqualTypeOf(); - expect(rawRes).toStrictEqual(expectedRes); + const rawRes = await db.select().from(allTypesTable); + + type ExpectedType = { + serial: number; + bigserial53: number; + bigserial64: bigint; + int: number | null; + bigint53: number | null; + bigint64: bigint | null; + bigintString: string | null; + bool: boolean | null; + bytea: Buffer | null; + char: string | null; + cidr: string | null; + date: Date | null; + dateStr: string | null; + double: number | null; + enum: 'enVal1' | 'enVal2' | null; + inet: string | null; + interval: string | null; + json: unknown; + jsonb: unknown; + line: { + a: number; + b: number; + c: number; + } | null; + lineTuple: [number, number, number] | null; + macaddr: string | null; + macaddr8: string | null; + numeric: string | null; + numericNum: number | null; + numericBig: bigint | null; + point: { + x: number; + y: number; + } | null; + pointTuple: [number, number] | null; + real: number | null; + smallint: number | null; + smallserial: number; + text: string | null; + time: string | null; + timestamp: Date | null; + timestampTz: Date | null; + timestampStr: string | null; + timestampTzStr: string | null; + uuid: string | null; + varchar: string | null; + arrint: number[] | null; + arrbigint53: number[] | null; + arrbigint64: bigint[] | null; + arrbigintString: string[] | null; + arrbool: boolean[] | null; + arrbytea: Buffer[] | null; + arrchar: string[] | null; + arrcidr: string[] | null; + arrdate: Date[] | null; + arrdateStr: string[] | null; + arrdouble: number[] | null; + arrenum: ('enVal1' | 'enVal2')[] | null; + arrinet: string[] | null; + arrinterval: string[] | null; + arrjson: unknown[] | null; + arrjsonb: unknown[] | null; + arrline: { + a: number; + b: number; + c: number; + }[] | null; + arrlineTuple: [number, number, number][] | null; + arrmacaddr: string[] | null; + arrmacaddr8: string[] | null; + arrnumeric: string[] | null; + arrnumericNum: number[] | null; + arrnumericBig: bigint[] | null; + arrpoint: { x: number; y: number }[] | null; + arrpointTuple: [number, number][] | null; + arrreal: number[] | null; + arrsmallint: number[] | null; + arrtext: string[] | null; + arrtime: string[] | null; + arrtimestamp: Date[] | null; + arrtimestampTz: Date[] | null; + arrtimestampStr: string[] | null; + arrtimestampTzStr: string[] | null; + arruuid: string[] | null; + arrvarchar: string[] | null; + }[]; + + const expectedRes: ExpectedType = [ + { + serial: 1, + bigserial53: 9007199254740991, + bigserial64: 5044565289845416380n, + int: 621, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + bool: true, + bytea: null, + char: 'c', + cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', + date: new Date('2025-03-12T00:00:00.000Z'), + dateStr: '2025-03-12', + double: 15.35325689124218, + enum: 'enVal1', + inet: '192.168.0.1/24', + interval: '-2 mons', + json: { str: 'strval', arr: ['str', 10] }, + jsonb: { arr: ['strb', 11], str: 'strvalb' }, + line: { a: 1, b: 2, c: 3 }, + lineTuple: [1, 2, 3], + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + numeric: '475452353476', + numericNum: 9007199254740991, + numericBig: 5044565289845416380n, + point: { x: 24.5, y: 49.6 }, + pointTuple: [57.2, 94.3], + real: 1.048596, + smallint: 10, + smallserial: 15, + text: 'TEXT STRING', + time: '13:59:28', + timestamp: new Date('2025-03-12T01:32:41.623Z'), + timestampTz: new Date('2025-03-12T01:32:41.623Z'), + timestampStr: '2025-03-12 01:32:41.623', + timestampTzStr: '2025-03-12 01:32:41.623+00', + uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', + varchar: 'C4-', + arrint: [621], + arrbigint53: [9007199254740991], + arrbigint64: [5044565289845416380n], + arrbigintString: ['5044565289845416380'], + arrbool: [true], + arrbytea: [Buffer.from('BYTES')], + arrchar: ['c'], + arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], + arrdate: [new Date('2025-03-12T00:00:00.000Z')], + arrdateStr: ['2025-03-12'], + arrdouble: [15.35325689124218], + arrenum: ['enVal1'], + arrinet: ['192.168.0.1/24'], + arrinterval: ['-2 mons'], + arrjson: [{ str: 'strval', arr: ['str', 10] }], + arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], + arrline: [{ a: 1, b: 2, c: 3 }], + arrlineTuple: [[1, 2, 3]], + arrmacaddr: ['08:00:2b:01:02:03'], + arrmacaddr8: ['08:00:2b:01:02:03:04:05'], + arrnumeric: ['475452353476'], + arrnumericNum: [9007199254740991], + arrnumericBig: [5044565289845416380n], + arrpoint: [{ x: 24.5, y: 49.6 }], + arrpointTuple: [[57.2, 94.3]], + arrreal: [1.048596], + arrsmallint: [10], + arrtext: ['TEXT STRING'], + arrtime: ['13:59:28'], + arrtimestamp: [new Date('2025-03-12T01:32:41.623Z')], + arrtimestampTz: [new Date('2025-03-12T01:32:41.623Z')], + arrtimestampStr: ['2025-03-12 01:32:41.623'], + arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], + arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], + arrvarchar: ['C4-'], + }, + ]; + + expectTypeOf(rawRes).toEqualTypeOf(); + expect(rawRes).toStrictEqual(expectedRes); + }); }); -describe('$withAuth tests', (it) => { +describe.skip('$withAuth tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, @@ -862,25 +934,25 @@ describe('$withAuth tests', (it) => { relations: defineRelations({ usersTable }), }); - it('$count', async () => { + it.concurrent('$count', async () => { await db.$withAuth('$count').$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: '$count' }); }); - it('delete', async () => { + it.concurrent('delete', async () => { await db.$withAuth('delete').delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'delete' }); }); - it('select', async () => { + it.concurrent('select', async () => { await db.$withAuth('select').select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'select' }); }); - it('selectDistinct', async () => { + it.concurrent('selectDistinct', async () => { await db.$withAuth('selectDistinct').selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ @@ -890,7 +962,7 @@ describe('$withAuth tests', (it) => { }); }); - it('selectDistinctOn', async () => { + it.concurrent('selectDistinctOn', async () => { await db.$withAuth('selectDistinctOn').selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ @@ -900,7 +972,7 @@ describe('$withAuth tests', (it) => { }); }); - it('update', async () => { + it.concurrent('update', async () => { await db.$withAuth('update').update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); @@ -908,7 +980,7 @@ describe('$withAuth tests', (it) => { expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'update' }); }); - it('insert', async () => { + it.concurrent('insert', async () => { await db.$withAuth('insert').insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); @@ -916,32 +988,32 @@ describe('$withAuth tests', (it) => { expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'insert' }); }); - it('with', async () => { + it.concurrent('with', async () => { await db.$withAuth('with').with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from(usersTable) .catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'with' }); }); - it('rqb', async () => { + it.concurrent('rqb', async () => { await db.$withAuth('rqb')._query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'rqb' }); }); - it('rqbV2', async () => { + it.concurrent('rqbV2', async () => { await db.$withAuth('rqbV2').query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'rqbV2' }); }); - it('exec', async () => { + it.concurrent('exec', async () => { await db.$withAuth('exec').execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'exec' }); }); - it('prepared', async () => { + it.concurrent('prepared', async () => { const prep = db.$withAuth('prepared').select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); @@ -949,7 +1021,7 @@ describe('$withAuth tests', (it) => { expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'prepared' }); }); - it('refreshMaterializedView', async () => { + it.concurrent('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); @@ -963,7 +1035,7 @@ describe('$withAuth tests', (it) => { }); }); -describe('$withAuth callback tests', (it) => { +describe.skip('$withAuth callback tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, @@ -974,37 +1046,37 @@ describe('$withAuth callback tests', (it) => { }); const auth = (token: string) => () => token; - it('$count', async () => { + it.concurrent('$count', async () => { await db.$withAuth(auth('$count')).$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('$count'); }); - it('delete', async () => { + it.concurrent('delete', async () => { await db.$withAuth(auth('delete')).delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('delete'); }); - it('select', async () => { + it.concurrent('select', async () => { await db.$withAuth(auth('select')).select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('select'); }); - it('selectDistinct', async () => { + it.concurrent('selectDistinct', async () => { await db.$withAuth(auth('selectDistinct')).selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinct'); }); - it('selectDistinctOn', async () => { + it.concurrent('selectDistinctOn', async () => { await db.$withAuth(auth('selectDistinctOn')).selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinctOn'); }); - it('update', async () => { + it.concurrent('update', async () => { await db.$withAuth(auth('update')).update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); @@ -1012,7 +1084,7 @@ describe('$withAuth callback tests', (it) => { expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('update'); }); - it('insert', async () => { + it.concurrent('insert', async () => { await db.$withAuth(auth('insert')).insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); @@ -1020,7 +1092,7 @@ describe('$withAuth callback tests', (it) => { expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('insert'); }); - it('with', async () => { + it.concurrent('with', async () => { await db.$withAuth(auth('with')).with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from( usersTable, ) @@ -1029,25 +1101,25 @@ describe('$withAuth callback tests', (it) => { expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('with'); }); - it('rqb', async () => { + it.concurrent('rqb', async () => { await db.$withAuth(auth('rqb'))._query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqb'); }); - it('rqbV2', async () => { + it.concurrent('rqbV2', async () => { await db.$withAuth(auth('rqbV2')).query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqbV2'); }); - it('exec', async () => { + it.concurrent('exec', async () => { await db.$withAuth(auth('exec')).execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('exec'); }); - it('prepared', async () => { + it.concurrent('prepared', async () => { const prep = db.$withAuth(auth('prepared')).select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); @@ -1055,7 +1127,7 @@ describe('$withAuth callback tests', (it) => { expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('prepared'); }); - it('refreshMaterializedView', async () => { + it.concurrent('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); @@ -1065,7 +1137,7 @@ describe('$withAuth callback tests', (it) => { }); }); -describe('$withAuth async callback tests', (it) => { +describe.skip('$withAuth async callback tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, @@ -1076,42 +1148,42 @@ describe('$withAuth async callback tests', (it) => { }); const auth = (token: string) => async () => token; - it('$count', async () => { + it.concurrent('$count', async () => { await db.$withAuth(auth('$count')).$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('$count'); }); - it('delete', async () => { + it.concurrent('delete', async () => { await db.$withAuth(auth('delete')).delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('delete'); }); - it('select', async () => { + it.concurrent('select', async () => { await db.$withAuth(auth('select')).select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('select'); }); - it('selectDistinct', async () => { + it.concurrent('selectDistinct', async () => { await db.$withAuth(auth('selectDistinct')).selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinct'); }); - it('selectDistinctOn', async () => { + it.concurrent('selectDistinctOn', async () => { await db.$withAuth(auth('selectDistinctOn')).selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinctOn'); }); - it('update', async () => { + it.concurrent('update', async () => { await db.$withAuth(auth('update')).update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); @@ -1120,7 +1192,7 @@ describe('$withAuth async callback tests', (it) => { expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('update'); }); - it('insert', async () => { + it.concurrent('insert', async () => { await db.$withAuth(auth('insert')).insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); @@ -1129,7 +1201,7 @@ describe('$withAuth async callback tests', (it) => { expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('insert'); }); - it('with', async () => { + it.concurrent('with', async () => { await db.$withAuth(auth('with')).with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from( usersTable, ) @@ -1139,28 +1211,28 @@ describe('$withAuth async callback tests', (it) => { expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('with'); }); - it('rqb', async () => { + it.concurrent('rqb', async () => { await db.$withAuth(auth('rqb'))._query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqb'); }); - it('rqbV2', async () => { + it.concurrent('rqbV2', async () => { await db.$withAuth(auth('rqbV2')).query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqbV2'); }); - it('exec', async () => { + it.concurrent('exec', async () => { await db.$withAuth(auth('exec')).execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('exec'); }); - it('prepared', async () => { + it.concurrent('prepared', async () => { const prep = db.$withAuth(auth('prepared')).select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); @@ -1169,7 +1241,7 @@ describe('$withAuth async callback tests', (it) => { expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('prepared'); }); - it('refreshMaterializedView', async () => { + it.concurrent('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); diff --git a/integration-tests/tests/pg/neon-serverless.test.ts b/integration-tests/tests/pg/neon-serverless.test.ts index 25d2b18c6c..c2509c3814 100644 --- a/integration-tests/tests/pg/neon-serverless.test.ts +++ b/integration-tests/tests/pg/neon-serverless.test.ts @@ -1,586 +1,586 @@ -import { neonConfig, Pool } from '@neondatabase/serverless'; import { eq, sql } from 'drizzle-orm'; -import { drizzle, type NeonDatabase } from 'drizzle-orm/neon-serverless'; import { migrate } from 'drizzle-orm/neon-serverless/migrator'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import ws from 'ws'; -import { skipTests } from '~/common'; +import { getTableConfig, type PgDatabase, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { describe } from 'node:test'; +import { expect } from 'vitest'; import { randomString } from '~/utils'; -import { mySchema, tests, usersMigratorTable, usersMySchemaTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: NeonDatabase; -let dbGlobalCached: NeonDatabase; -let cachedDb: NeonDatabase; -let client: Pool; - -neonConfig.wsProxy = (host) => `${host}:5446/v1`; -neonConfig.useSecureWebSocket = false; -neonConfig.pipelineTLS = false; -neonConfig.pipelineConnect = false; -neonConfig.webSocketConstructor = ws; - -beforeAll(async () => { - const connectionString = process.env['NEON_SERVERLESS_CONNECTION_STRING']; - if (!connectionString) { - throw new Error('NEON_SERVERLESS_CONNECTION_STRING is not defined'); - } - - client = new Pool({ connectionString }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestCache(), +import { tests } from './common'; +import { neonWsTest as test } from './instrumentation'; +import { usersMigratorTable, usersMySchemaTable, usersTable } from './schema'; + +/* + it doesn't work as expected, scope: "file" treats all these tests as 1 file + thus extra execute statements below + */ +tests(test, []); +describe('neon-serverless', () => { + let db: PgDatabase; + test.sequential('_', async ({ db: _db, push }) => { + db = _db; + + await db.execute('drop schema if exists public, "mySchema" cascade;'); + await db.execute('create schema public'); + await db.execute('create schema "mySchema";'); + + await push({ usersTable, usersMySchemaTable }); }); - dbGlobalCached = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestGlobalCache(), - }); -}); - -afterAll(async () => { - await client?.end(); -}); - -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + test('migrator : default migration strategy', async () => { + await db.execute(sql`drop table if exists all_columns,users12,"drizzle"."__drizzle_migrations"`); - const result = await db.select().from(usersMigratorTable); + await migrate(db, { migrationsFolder: './drizzle2/pg' }); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); + const result = await db.select().from(usersMigratorTable); -test('migrator : migrate with custom schema', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); + await db.execute(sql`drop table all_columns,users12,"drizzle"."__drizzle_migrations"`); + }); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); - expect(rowCount && rowCount > 0).toBeTruthy(); + test('migrator : migrate with custom schema', async () => { + await db.execute(sql`drop table if exists all_columns,users12,"drizzle"."__drizzle_migrations"`); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); -}); + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + await db.execute(sql`drop table all_columns,users12,custom_migrations."__drizzle_migrations"`); + }); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - expect(rowCount && rowCount > 0).toBeTruthy(); + test('migrator : migrate with custom table', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns,users12,"drizzle"."__drizzle_migrations"`); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); -test('migrator : migrate with custom table and custom schema', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: 'custom_migrations', + await db.execute(sql`drop table all_columns,users12,"drizzle".${sql.identifier(customTable)}`); }); - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from custom_migrations.${sql.identifier(customTable)};`, - ); - expect(rowCount && rowCount > 0).toBeTruthy(); + test('migrator : migrate with custom table and custom schema', async () => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns,users12,"drizzle"."__drizzle_migrations"`); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: 'custom_migrations', + }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); -}); + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from custom_migrations.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); -test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns,users12,custom_migrations.${sql.identifier(customTable)}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone first case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - await db.execute(sql`drop table if exists ${table}`); -}); - -test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone second case mode string', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); - expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); -test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); -test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode string for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); -test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode date for timestamp with timezone', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('test mode string for timestamp with timezone in UTC timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test('test mode string for timestamp with timezone in UTC timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test.skip('test mode string for timestamp with timezone in different timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test.skip('test mode string for timestamp with timezone in different timezone', async () => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone 'HST'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + const timestampString = '2022-01-01 00:00:00.123456-1000'; - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); - -test('select all fields', async (ctx) => { - const { db } = ctx.pg; + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); - const now = Date.now(); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - await db.insert(usersTable).values({ name: 'John' }); - const result = await db.select().from(usersTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('update with returning all fields', async (ctx) => { - const { db } = ctx.pg; + test('select all fields', async () => { + await db.execute(`truncate table users restart identity;`); + const now = Date.now(); - const now = Date.now(); + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .update(usersTable) - .set({ name: 'Jane' }) - .where(eq(usersTable.name, 'John')) - .returning(); + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(5000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); - expect(users[0]!.createdAt).toBeInstanceOf(Date); - expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); - expect(users).toEqual([ - { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); + test('update with returning all fields', async () => { + await db.execute(`truncate table users restart identity;`); + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([ + { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); + }); -test('delete with returning all fields', async (ctx) => { - const { db } = ctx.pg; + test('delete with returning all fields', async () => { + await db.execute(`truncate table users restart identity;`); + const now = Date.now(); - const now = Date.now(); + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); + }); - expect(users[0]!.createdAt).toBeInstanceOf(Date); - expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); - expect(users).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, - ]); -}); + test('mySchema :: select all fields', async () => { + await db.execute(`truncate table users restart identity;`); + const now = Date.now(); -test('mySchema :: select all fields', async (ctx) => { - const { db } = ctx.pg; + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); - const now = Date.now(); + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const result = await db.select().from(usersMySchemaTable); + test('mySchema :: delete with returning all fields', async () => { + await db.execute(`truncate table "mySchema"."users" restart identity;`); + const now = Date.now(); - expect(result[0]!.createdAt).toBeInstanceOf(Date); - expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); -}); + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); -test('mySchema :: delete with returning all fields', async (ctx) => { - const { db } = ctx.pg; - - const now = Date.now(); + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); + test('insert via db.execute + select via db.execute', async () => { + await db.execute(`truncate table users restart identity;`); + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); - expect(users[0]!.createdAt).toBeInstanceOf(Date); - expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); - expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); -}); + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); + }); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'nested transaction rollback', - 'transaction rollback', - 'nested transaction', - 'transaction', - 'timestamp timezone', - 'test $onUpdateFn and $onUpdate works as $default', - 'select all fields', - 'update with returning all fields', - 'delete with returning all fields', - 'mySchema :: select all fields', - 'mySchema :: delete with returning all fields', -]); -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`drop schema if exists ${mySchema} cascade`); - - await db.execute(sql`create schema public`); - await db.execute(sql`create schema ${mySchema}`); - - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - - await db.execute( - sql` - create table ${usersMySchemaTable} ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); + test('insert via db.execute + returning', async () => { + await db.execute(`truncate table users restart identity;`); + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + }); -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, - ); + test('insert via db.execute w/ query builder', async () => { + await db.execute(`truncate table users restart identity;`); + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + }); - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - expect(result.rows).toEqual([{ id: 1, name: 'John' }]); -}); + test('migrator : --init', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); -test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); + test('migrator : --init - local migrations error', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + test('migrator : --init - db migrations error', async () => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(true); + }); }); diff --git a/integration-tests/tests/pg/node-postgres.test.ts b/integration-tests/tests/pg/node-postgres.test.ts index e80aca66d6..7633a0b62c 100644 --- a/integration-tests/tests/pg/node-postgres.test.ts +++ b/integration-tests/tests/pg/node-postgres.test.ts @@ -1,493 +1,514 @@ -import retry from 'async-retry'; import { sql } from 'drizzle-orm'; -import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; -import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import { Client } from 'pg'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { getTableConfig, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { describe, expect } from 'vitest'; import { randomString } from '~/utils'; -import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: NodePgDatabase; -let client: Client; -let dbGlobalCached: NodePgDatabase; -let cachedDb: NodePgDatabase; - -beforeAll(async () => { - let connectionString; - if (process.env['PG_CONNECTION_STRING']) { - connectionString = process.env['PG_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = new Client(connectionString); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); - -afterAll(async () => { - await client?.end(); -}); - -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); +import { tests } from './common'; +import { nodePostgresTest as test } from './instrumentation'; +import { usersMigratorTable, usersTable } from './schema'; -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); +tests(test, []); - await migrate(db, { migrationsFolder: './drizzle2/pg' }); +describe('migrator', () => { + test('migrator : default migration strategy', async ({ db }) => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + await migrate(db, { migrationsFolder: './drizzle2/pg' }); - const result = await db.select().from(usersMigratorTable); + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + const result = await db.select().from(usersMigratorTable); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom schema', async () => { - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); + }); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + test('migrator : migrate with custom schema', async ({ db }) => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - expect(rowCount && rowCount > 0).toBeTruthy(); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(rowCount && rowCount > 0).toBeTruthy(); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); + }); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + test('migrator : migrate with custom table', async ({ db }) => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - // test if the custom migrations table was created - const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - expect(rowCount && rowCount > 0).toBeTruthy(); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + // test if the custom migrations table was created + const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(rowCount && rowCount > 0).toBeTruthy(); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom table and custom schema', async () => { - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); - // test if the custom migrations table was created - const { rowCount } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - expect(rowCount && rowCount > 0).toBeTruthy(); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); - -test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + test('migrator : migrate with custom table and custom schema', async ({ db }) => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); + + // test if the custom migrations table was created + const { rowCount } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(rowCount && rowCount > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone first case mode string', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); -test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone second case mode string', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); - expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); -test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone third case mode date', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); -test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode string for timestamp with timezone', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - await db.execute(sql`drop table if exists ${table}`); -}); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); -test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode date for timestamp with timezone', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); - await db.execute(sql` + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('test mode string for timestamp with timezone in UTC timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test('test mode string for timestamp with timezone in UTC timezone', async ({ db }) => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('test mode string for timestamp with timezone in different timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test('test mode string for timestamp with timezone in different timezone', async ({ db }) => { + // get current timezone from db + const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone '-10'`); + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone '-10'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-1000'; + const timestampString = '2022-01-01 00:00:00.123456-1000'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); + await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); + test('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute( + sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>( + sql`select id, name from "users"`, + ); + expect(result.rows).toEqual([{ id: 1, name: 'John' }]); + }); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'test mode string for timestamp with timezone in different timezone', -]); -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); + test('insert via db.execute + returning', async ({ db }) => { + const inserted = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${ + sql.identifier( + usersTable.name.name, + ) + }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + }); -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, - ); + test('insert via db.execute w/ query builder', async ({ db }) => { + const inserted = await db.execute>( + db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + }); - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - expect(result.rows).toEqual([{ id: 1, name: 'John' }]); -}); + test('migrator : --init', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + }) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); -test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); -}); + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + }) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + }) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(true); + }); }); diff --git a/integration-tests/tests/pg/pg-common-cache.ts b/integration-tests/tests/pg/pg-common-cache.ts deleted file mode 100644 index bcd5b14bea..0000000000 --- a/integration-tests/tests/pg/pg-common-cache.ts +++ /dev/null @@ -1,400 +0,0 @@ -import type Docker from 'dockerode'; -import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; -import type { MutationOption } from 'drizzle-orm/cache/core'; -import { Cache } from 'drizzle-orm/cache/core'; -import type { CacheConfig } from 'drizzle-orm/cache/core/types'; -import type { PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; -import { alias, boolean, integer, jsonb, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; -import Keyv from 'keyv'; -import { afterAll, beforeEach, describe, expect, test, vi } from 'vitest'; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestGlobalCache extends Cache { - private globalTtl: number = 1000; - private usedTablesPerKey: Record = {}; - - constructor(private kv: Keyv = new Keyv()) { - super(); - } - - override strategy(): 'explicit' | 'all' { - return 'all'; - } - override async get(key: string, _tables: string[], _isTag: boolean): Promise { - const res = await this.kv.get(key) ?? undefined; - return res; - } - override async put( - key: string, - response: any, - tables: string[], - isTag: boolean, - config?: CacheConfig, - ): Promise { - await this.kv.set(key, response, config ? config.ex : this.globalTtl); - for (const table of tables) { - const keys = this.usedTablesPerKey[table]; - if (keys === undefined) { - this.usedTablesPerKey[table] = [key]; - } else { - keys.push(key); - } - } - } - override async onMutate(params: MutationOption): Promise { - const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; - const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; - - const keysToDelete = new Set(); - - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - const keys = this.usedTablesPerKey[tableName] ?? []; - for (const key of keys) keysToDelete.add(key); - } - - if (keysToDelete.size > 0 || tagsArray.length > 0) { - for (const tag of tagsArray) { - await this.kv.delete(tag); - } - - for (const key of keysToDelete) { - await this.kv.delete(key); - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - this.usedTablesPerKey[tableName] = []; - } - } - } - } -} - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestCache extends TestGlobalCache { - override strategy(): 'explicit' | 'all' { - return 'explicit'; - } -} - -declare module 'vitest' { - interface TestContext { - cachedPg: { - db: PgDatabase; - dbGlobalCached: PgDatabase; - }; - } -} - -const usersTable = pgTable('users', { - id: serial().primaryKey(), - name: text().notNull(), - verified: boolean().notNull().default(false), - jsonb: jsonb().$type(), - createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), -}); - -const postsTable = pgTable('posts', { - id: serial().primaryKey(), - description: text().notNull(), - userId: integer('city_id').references(() => usersTable.id), -}); - -let pgContainer: Docker.Container | undefined; - -afterAll(async () => { - await pgContainer?.stop().catch(console.error); -}); - -export function tests() { - describe('common', () => { - beforeEach(async (ctx) => { - const { db, dbGlobalCached } = ctx.cachedPg; - await db.execute(sql`drop schema if exists public cascade`); - await db.$cache?.invalidate({ tables: 'users' }); - await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); - await db.execute(sql`create schema public`); - // public users - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); - }); - - test('test force invalidate', async (ctx) => { - const { db } = ctx.cachedPg; - - const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); - await db.$cache?.invalidate({ tables: 'users' }); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('default global config - no cache should be hit', async (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('default global config + enable cache on select: get, put', async (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); - - await db.insert(usersTable).values({ name: 'John' }); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - await db.insert(usersTable).values({ name: 'John' }); - - // invalidate force - await db.$cache?.invalidate({ tags: ['custom'] }); - }); - - test('global: true + disable cache', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(false); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - cache should be hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - cache: false on select - no cache hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(false); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ autoInvalidate: false }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); - - await db.insert(usersTable).values({ name: 'John' }); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('global: true - with custom tag', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - await db.insert(usersTable).values({ name: 'John' }); - - // invalidate force - await db.$cache?.invalidate({ tags: ['custom'] }); - }); - - test('global: true - with custom tag + with autoinvalidate', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedPg; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ tag: 'custom' }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - - await db.insert(usersTable).values({ name: 'John' }); - - expect(spyInvalidate).toHaveBeenCalledTimes(1); - - // invalidate force - await db.$cache?.invalidate({ tags: ['custom'] }); - }); - - // check select used tables - test('check simple select used tables', (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); - // @ts-expect-error - expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); - }); - // check select+join used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedPg; - - // @ts-expect-error - expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) - .toStrictEqual(['users', 'posts']); - expect( - // @ts-expect-error - db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), - ).toStrictEqual(['users', 'posts']); - }); - // check select+2join used tables - test('select+2joins', (ctx) => { - const { db } = ctx.cachedPg; - - expect( - db.select().from(usersTable).leftJoin( - postsTable, - eq(usersTable.id, postsTable.userId), - ).leftJoin( - alias(postsTable, 'post2'), - eq(usersTable.id, postsTable.userId), - ) - // @ts-expect-error - .getUsedTables(), - ) - .toStrictEqual(['users', 'posts']); - expect( - db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( - alias(postsTable, 'post2'), - eq(usersTable.id, postsTable.userId), - // @ts-expect-error - ).getUsedTables(), - ).toStrictEqual(['users', 'posts']); - }); - // select subquery used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedPg; - - const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); - db.select().from(sq); - - // @ts-expect-error - expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); - }); - }); -} diff --git a/integration-tests/tests/pg/pg-custom.test.ts b/integration-tests/tests/pg/pg-custom.test.ts index df5e608184..d178f16db2 100644 --- a/integration-tests/tests/pg/pg-custom.test.ts +++ b/integration-tests/tests/pg/pg-custom.test.ts @@ -1,58 +1,9 @@ -import retry from 'async-retry'; -import type Docker from 'dockerode'; import { asc, eq, sql } from 'drizzle-orm'; -import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; -import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; import { alias, customType, pgTable, pgTableCreator, serial, text } from 'drizzle-orm/pg-core'; -import { Client } from 'pg'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { randomString } from '~/utils'; -import { createDockerDB } from './pg-common'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: NodePgDatabase; -let client: Client; -let container: Docker.Container | undefined; - -beforeAll(async () => { - let connectionString; - if (process.env['PG_CONNECTION_STRING']) { - connectionString = process.env['PG_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } - client = await retry(async () => { - client = new Client(connectionString); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - await client?.end(); - await container?.stop().catch(console.error); -}); - -beforeEach((ctx) => { - ctx.pg = { - db, - }; -}); +import { nodePostgresTest as test } from './instrumentation'; const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { @@ -108,8 +59,7 @@ const usersMigratorTable = pgTable('users12', { email: text('email').notNull(), }); -beforeEach(async (ctx) => { - const { db } = ctx.pg; +test.beforeEach(async ({ db }) => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( @@ -125,9 +75,7 @@ beforeEach(async (ctx) => { ); }); -test('select all fields', async (ctx) => { - const { db } = ctx.pg; - +test('select all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); @@ -138,9 +86,7 @@ test('select all fields', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('select sql', async (ctx) => { - const { db } = ctx.pg; - +test('select sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -149,9 +95,7 @@ test('select sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('select typed sql', async (ctx) => { - const { db } = ctx.pg; - +test('select typed sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -160,9 +104,7 @@ test('select typed sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('insert returning sql', async (ctx) => { - const { db } = ctx.pg; - +test('insert returning sql', async ({ db }) => { const users = await db.insert(usersTable).values({ name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }); @@ -170,9 +112,7 @@ test('insert returning sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('delete returning sql', async (ctx) => { - const { db } = ctx.pg; - +test('delete returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, @@ -181,9 +121,7 @@ test('delete returning sql', async (ctx) => { expect(users).toEqual([{ name: 'JOHN' }]); }); -test('update returning sql', async (ctx) => { - const { db } = ctx.pg; - +test('update returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, @@ -192,9 +130,7 @@ test('update returning sql', async (ctx) => { expect(users).toEqual([{ name: 'JANE' }]); }); -test('update with returning all fields', async (ctx) => { - const { db } = ctx.pg; - +test('update with returning all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); @@ -205,9 +141,7 @@ test('update with returning all fields', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test('update with returning partial', async (ctx) => { - const { db } = ctx.pg; - +test('update with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, @@ -217,9 +151,7 @@ test('update with returning partial', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); -test('delete with returning all fields', async (ctx) => { - const { db } = ctx.pg; - +test('delete with returning all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); @@ -230,9 +162,7 @@ test('delete with returning all fields', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); -test('delete with returning partial', async (ctx) => { - const { db } = ctx.pg; - +test('delete with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, @@ -242,9 +172,7 @@ test('delete with returning partial', async (ctx) => { expect(users).toEqual([{ id: 1, name: 'John' }]); }); -test('insert + select', async (ctx) => { - const { db } = ctx.pg; - +test('insert + select', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); @@ -257,9 +185,7 @@ test('insert + select', async (ctx) => { ]); }); -test('json insert', async (ctx) => { - const { db } = ctx.pg; - +test('json insert', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, @@ -270,18 +196,14 @@ test('json insert', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); -test('insert with overridden default values', async (ctx) => { - const { db } = ctx.pg; - +test('insert with overridden default values', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); -test('insert many', async (ctx) => { - const { db } = ctx.pg; - +test('insert many', async ({ db }) => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -303,9 +225,7 @@ test('insert many', async (ctx) => { ]); }); -test('insert many with returning', async (ctx) => { - const { db } = ctx.pg; - +test('insert many with returning', async ({ db }) => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -327,9 +247,7 @@ test('insert many with returning', async (ctx) => { ]); }); -test('select with group by as field', async (ctx) => { - const { db } = ctx.pg; - +test('select with group by as field', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -338,9 +256,7 @@ test('select with group by as field', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test('select with group by as sql', async (ctx) => { - const { db } = ctx.pg; - +test('select with group by as sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -349,9 +265,7 @@ test('select with group by as sql', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); -test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.pg; - +test('select with group by as sql + column', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -360,9 +274,7 @@ test('select with group by as sql + column', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); -test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.pg; - +test('select with group by as column + sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -371,9 +283,7 @@ test('select with group by as column + sql', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); -test('select with group by complex query', async (ctx) => { - const { db } = ctx.pg; - +test('select with group by complex query', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -384,9 +294,7 @@ test('select with group by complex query', async (ctx) => { expect(result).toEqual([{ name: 'Jane' }]); }); -test('build query', async (ctx) => { - const { db } = ctx.pg; - +test('build query', async ({ db }) => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); @@ -397,16 +305,13 @@ test('build query', async (ctx) => { }); }); -test('insert sql', async (ctx) => { - const { db } = ctx.pg; - +test('insert sql', async ({ db }) => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('partial join with alias', async (ctx) => { - const { db } = ctx.pg; +test('partial join with alias', async ({ db }) => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -430,9 +335,7 @@ test('partial join with alias', async (ctx) => { }]); }); -test('full join with alias', async (ctx) => { - const { db } = ctx.pg; - +test('full join with alias', async ({ db }) => { const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { @@ -465,18 +368,14 @@ test('full join with alias', async (ctx) => { await db.execute(sql`drop table ${users}`); }); -test('insert with spaces', async (ctx) => { - const { db } = ctx.pg; - +test('insert with spaces', async ({ db }) => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); -test('prepared statement', async (ctx) => { - const { db } = ctx.pg; - +test('prepared statement', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, @@ -488,9 +387,7 @@ test('prepared statement', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('prepared statement reuse', async (ctx) => { - const { db } = ctx.pg; - +test('prepared statement reuse', async ({ db }) => { const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), @@ -520,9 +417,7 @@ test('prepared statement reuse', async (ctx) => { ]); }); -test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.pg; - +test('prepared statement with placeholder in .where', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, @@ -535,9 +430,7 @@ test('prepared statement with placeholder in .where', async (ctx) => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('prepared statement with placeholder in .limit', async (ctx) => { - const { db } = ctx.pg; - +test('prepared statement with placeholder in .limit', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ @@ -555,9 +448,7 @@ test('prepared statement with placeholder in .limit', async (ctx) => { expect(result).toHaveLength(1); }); -test('prepared statement with placeholder in .offset', async (ctx) => { - const { db } = ctx.pg; - +test('prepared statement with placeholder in .offset', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ @@ -573,7 +464,7 @@ test('prepared statement with placeholder in .offset', async (ctx) => { expect(result).toEqual([{ id: 2, name: 'John1' }]); }); -test('migrator : default migration strategy', async () => { +test('migrator : default migration strategy', async ({ db }) => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); @@ -591,7 +482,7 @@ test('migrator : default migration strategy', async () => { await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); -test('migrator : migrate with custom schema', async () => { +test('migrator : migrate with custom schema', async ({ db }) => { const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); @@ -613,7 +504,7 @@ test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); @@ -635,7 +526,7 @@ test('migrator : migrate with custom table', async () => { await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); -test('migrator : migrate with custom table and custom schema', async () => { +test('migrator : migrate with custom table and custom schema', async ({ db }) => { const customTable = randomString(); const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); @@ -664,14 +555,14 @@ test('migrator : migrate with custom table and custom schema', async () => { await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); -test('insert via db.execute + select via db.execute', async () => { +test('insert via db.execute + select via db.execute', async ({ db }) => { await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); -test('insert via db.execute + returning', async () => { +test('insert via db.execute + returning', async ({ db }) => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier(usersTable.name.name) @@ -680,16 +571,14 @@ test('insert via db.execute + returning', async () => { expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); -test('insert via db.execute w/ query builder', async () => { +test('insert via db.execute w/ query builder', async ({ db }) => { const inserted = await db.execute>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); -test('build query insert with onConflict do update', async (ctx) => { - const { db } = ctx.pg; - +test('build query insert with onConflict do update', async ({ db }) => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) @@ -702,9 +591,7 @@ test('build query insert with onConflict do update', async (ctx) => { }); }); -test('build query insert with onConflict do update / multiple columns', async (ctx) => { - const { db } = ctx.pg; - +test('build query insert with onConflict do update / multiple columns', async ({ db }) => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) @@ -717,9 +604,7 @@ test('build query insert with onConflict do update / multiple columns', async (c }); }); -test('build query insert with onConflict do nothing', async (ctx) => { - const { db } = ctx.pg; - +test('build query insert with onConflict do nothing', async ({ db }) => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() @@ -732,9 +617,7 @@ test('build query insert with onConflict do nothing', async (ctx) => { }); }); -test('build query insert with onConflict do nothing + target', async (ctx) => { - const { db } = ctx.pg; - +test('build query insert with onConflict do nothing + target', async ({ db }) => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) @@ -747,9 +630,7 @@ test('build query insert with onConflict do nothing + target', async (ctx) => { }); }); -test('insert with onConflict do update', async (ctx) => { - const { db } = ctx.pg; - +test('insert with onConflict do update', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); @@ -764,9 +645,7 @@ test('insert with onConflict do update', async (ctx) => { expect(res).toEqual([{ id: 1, name: 'John1' }]); }); -test('insert with onConflict do nothing', async (ctx) => { - const { db } = ctx.pg; - +test('insert with onConflict do nothing', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); @@ -781,9 +660,7 @@ test('insert with onConflict do nothing', async (ctx) => { expect(res).toEqual([{ id: 1, name: 'John' }]); }); -test('insert with onConflict do nothing + target', async (ctx) => { - const { db } = ctx.pg; - +test('insert with onConflict do nothing + target', async ({ db }) => { await db.insert(usersTable) .values({ name: 'John' }); diff --git a/integration-tests/tests/pg/pg-proxy.test.ts b/integration-tests/tests/pg/pg-proxy.test.ts index a91e973952..145f2bf502 100644 --- a/integration-tests/tests/pg/pg-proxy.test.ts +++ b/integration-tests/tests/pg/pg-proxy.test.ts @@ -1,146 +1,40 @@ -import retry from 'async-retry'; import { sql } from 'drizzle-orm'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import type { PgRemoteDatabase } from 'drizzle-orm/pg-proxy'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/pg-proxy'; +import { getTableConfig, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { migrate } from 'drizzle-orm/pg-proxy/migrator'; -import * as pg from 'pg'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; -import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -class ServerSimulator { - constructor(private db: pg.Client) { - const { types } = pg; - - types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); - types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); - types.setTypeParser(types.builtins.DATE, (val) => val); - types.setTypeParser(types.builtins.INTERVAL, (val) => val); - types.setTypeParser(1231 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); - types.setTypeParser(1115 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); - types.setTypeParser(1185 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); - types.setTypeParser(1187 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); - types.setTypeParser(1182 as (typeof types.builtins)[keyof typeof types.builtins], (val) => val); - } - - async query(sql: string, params: any[], method: 'all' | 'execute') { - if (method === 'all') { - try { - const result = await this.db.query({ - text: sql, - values: params, - rowMode: 'array', - }); - - return { data: result.rows as any }; - } catch (e: any) { - return { error: e }; - } - } else if (method === 'execute') { - try { - const result = await this.db.query({ - text: sql, - values: params, - }); - - return { data: result.rows as any }; - } catch (e: any) { - return { error: e }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - async migrations(queries: string[]) { - await this.db.query('BEGIN'); - try { - for (const query of queries) { - await this.db.query(query); - } - await this.db.query('COMMIT'); - } catch (e) { - await this.db.query('ROLLBACK'); - throw e; - } - - return {}; - } -} - -const ENABLE_LOGGING = false; - -let db: PgRemoteDatabase; -let dbGlobalCached: PgRemoteDatabase; -let cachedDb: PgRemoteDatabase; -let client: pg.Client; -let serverSimulator: ServerSimulator; - -beforeAll(async () => { - let connectionString; - if (process.env['PG_CONNECTION_STRING']) { - connectionString = process.env['PG_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = new pg.Client(connectionString); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - serverSimulator = new ServerSimulator(client); - const proxyHandler = async (sql: string, params: any[], method: any) => { - try { - const response = await serverSimulator.query(sql, params, method); - - if (response.error !== undefined) { - throw response.error; - } - - return { rows: response.data }; - } catch (e: any) { - console.error('Error from pg proxy server:', e.message); - throw e; - } - }; - db = proxyDrizzle(proxyHandler, { - logger: ENABLE_LOGGING, - relations, - }); - - cachedDb = proxyDrizzle(proxyHandler, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = proxyDrizzle(proxyHandler, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); +import { expect } from 'vitest'; +import { tests } from './common'; +import { proxyTest as test } from './instrumentation'; +import { usersMigratorTable, usersTable } from './schema'; -afterAll(async () => { - await client?.end(); -}); +const skips = [ + 'RQB v2 transaction find first - no rows', + 'RQB v2 transaction find first - multiple rows', + 'RQB v2 transaction find first - with relation', + 'RQB v2 transaction find first - placeholders', + 'RQB v2 transaction find many - no rows', + 'RQB v2 transaction find many - multiple rows', + 'RQB v2 transaction find many - with relation', + 'RQB v2 transaction find many - placeholders', +]; +tests(test, skips); -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; +test.beforeEach(async ({ db }) => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); }); -test('migrator : default migration strategy', async () => { +test('migrator : default migration strategy', async ({ db, simulator }) => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); @@ -148,7 +42,7 @@ test('migrator : default migration strategy', async () => { // './drizzle2/pg-proxy/first' ?? await migrate(db, async (queries) => { try { - await serverSimulator.migrations(queries); + await simulator.migrations(queries); } catch (e) { console.error(e); throw new Error('Proxy server cannot run migrations'); @@ -166,7 +60,7 @@ test('migrator : default migration strategy', async () => { await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); -test('all date and time columns without timezone first case mode string', async () => { +test('all date and time columns without timezone first case mode string', async ({ db }) => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), @@ -202,7 +96,7 @@ test('all date and time columns without timezone first case mode string', async await db.execute(sql`drop table if exists ${table}`); }); -test('all date and time columns without timezone second case mode string', async () => { +test('all date and time columns without timezone second case mode string', async ({ db }) => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), @@ -233,7 +127,7 @@ test('all date and time columns without timezone second case mode string', async await db.execute(sql`drop table if exists ${table}`); }); -test('all date and time columns without timezone third case mode date', async () => { +test('all date and time columns without timezone third case mode date', async ({ db }) => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), @@ -267,7 +161,7 @@ test('all date and time columns without timezone third case mode date', async () await db.execute(sql`drop table if exists ${table}`); }); -test('test mode string for timestamp with timezone', async () => { +test('test mode string for timestamp with timezone', async ({ db }) => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), @@ -307,7 +201,7 @@ test('test mode string for timestamp with timezone', async () => { await db.execute(sql`drop table if exists ${table}`); }); -test('test mode date for timestamp with timezone', async () => { +test('test mode date for timestamp with timezone', async ({ db }) => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), @@ -347,7 +241,7 @@ test('test mode date for timestamp with timezone', async () => { await db.execute(sql`drop table if exists ${table}`); }); -test('test mode string for timestamp with timezone in UTC timezone', async () => { +test('test mode string for timestamp with timezone in UTC timezone', async ({ db }) => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); @@ -395,7 +289,7 @@ test('test mode string for timestamp with timezone in UTC timezone', async () => await db.execute(sql`drop table if exists ${table}`); }); -test('test mode string for timestamp with timezone in different timezone', async () => { +test('test mode string for timestamp with timezone in different timezone', async ({ db }) => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); @@ -441,52 +335,7 @@ test('test mode string for timestamp with timezone in different timezone', async await db.execute(sql`drop table if exists ${table}`); }); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'test mode string for timestamp with timezone in different timezone', - 'transaction', - 'transaction rollback', - 'nested transaction', - 'nested transaction rollback', - 'test $onUpdateFn and $onUpdate works updating', - 'RQB v2 transaction find first - no rows', - 'RQB v2 transaction find first - multiple rows', - 'RQB v2 transaction find first - with relation', - 'RQB v2 transaction find first - placeholders', - 'RQB v2 transaction find many - no rows', - 'RQB v2 transaction find many - multiple rows', - 'RQB v2 transaction find many - with relation', - 'RQB v2 transaction find many - placeholders', -]); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); - -test('insert via db.execute + select via db.execute', async () => { +test('insert via db.execute + select via db.execute', async ({ db }) => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); @@ -497,7 +346,7 @@ test('insert via db.execute + select via db.execute', async () => { expect(result).toEqual([{ id: 1, name: 'John' }]); }); -test('insert via db.execute + returning', async () => { +test('insert via db.execute + returning', async ({ db }) => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( @@ -508,7 +357,7 @@ test('insert via db.execute + returning', async () => { expect(inserted).toEqual([{ id: 1, name: 'John' }]); }); -test('insert via db.execute w/ query builder', async () => { +test('insert via db.execute w/ query builder', async ({ db }) => { const inserted = await db.execute>( db .insert(usersTable) @@ -518,5 +367,138 @@ test('insert via db.execute w/ query builder', async () => { expect(inserted).toEqual([{ id: 1, name: 'John' }]); }); -tests(); -cacheTests(); +test('migrator : --init', async ({ db, simulator }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db, simulator }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res[0]?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db, simulator }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, async (queries) => { + try { + await simulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res[0]?.tableExists).toStrictEqual(true); +}); diff --git a/integration-tests/tests/relational/pg-v1.test.ts b/integration-tests/tests/pg/pg-v1.test.ts similarity index 99% rename from integration-tests/tests/relational/pg-v1.test.ts rename to integration-tests/tests/pg/pg-v1.test.ts index c1dfda5e35..ff8d617e42 100644 --- a/integration-tests/tests/relational/pg-v1.test.ts +++ b/integration-tests/tests/pg/pg-v1.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './pg.schema.ts'; +import * as schema from './pg.schema'; const { Client } = pg; @@ -89,7 +89,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.test.ts b/integration-tests/tests/pg/pg.duplicates.test.ts similarity index 97% rename from integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.test.ts rename to integration-tests/tests/pg/pg.duplicates.test.ts index aa65f9bbf9..1c569699ae 100644 --- a/integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.test.ts +++ b/integration-tests/tests/pg/pg.duplicates.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './pg.duplicates.ts'; +import * as schema from './pg.duplicates'; const { Client } = pg; @@ -77,7 +77,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.ts b/integration-tests/tests/pg/pg.duplicates.ts similarity index 91% rename from integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.ts rename to integration-tests/tests/pg/pg.duplicates.ts index e9f59854f2..fa9891af25 100644 --- a/integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.ts +++ b/integration-tests/tests/pg/pg.duplicates.ts @@ -33,12 +33,12 @@ export const artistsToMembers = pgTable( memberId: integer('member_id').notNull(), artistId: integer('artist_id').notNull(), }, - (table) => ({ - memberArtistIndex: index('artist_to_member__artist_id__member_id__idx').on( + (table) => [ + index('artist_to_member__artist_id__member_id__idx').on( table.memberId, table.artistId, ), - }), + ], ); export const albums = pgTable( @@ -53,9 +53,7 @@ export const albums = pgTable( .default(sql`CURRENT_TIMESTAMP`), artistId: integer('artist_id').notNull(), }, - (table) => ({ - artistIndex: index('albums__artist_id__idx').on(table.artistId), - }), + (table) => [index('albums__artist_id__idx').on(table.artistId)], ); // relations diff --git a/integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.schema.ts b/integration-tests/tests/pg/pg.mapping.schema.ts similarity index 91% rename from integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.schema.ts rename to integration-tests/tests/pg/pg.mapping.schema.ts index d7448ff2fd..c514a5b9cf 100644 --- a/integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.schema.ts +++ b/integration-tests/tests/pg/pg.mapping.schema.ts @@ -20,13 +20,13 @@ export const menuItemModifierGroups = pgTable( .references(() => modifierGroups.id), order: integer('order').default(0), }, - (table) => ({ - menuItemIdModifierGroupIdOrderPk: primaryKey( + (table) => [primaryKey({ + columns: [ table.menuItemId, table.modifierGroupId, table.order, - ), - }), + ], + })], ); export const ingredients = pgTable('ingredients', { @@ -54,13 +54,13 @@ export const menuItemIngredients = pgTable( .references(() => ingredients.id), order: integer('order').default(0), }, - (table) => ({ - menuItemIdIngredientIdOrderPk: primaryKey( - table.menuItemId, - table.ingredientId, - table.order, + (table) => [ + primaryKey( + { + columns: [table.menuItemId, table.ingredientId, table.order], + }, ), - }), + ], ); export const modifierGroupModifiers = pgTable( @@ -74,13 +74,15 @@ export const modifierGroupModifiers = pgTable( .references(() => modifiers.id), order: integer('order').default(0), }, - (table) => ({ - modifierGroupIdModifierIdOrderPk: primaryKey( - table.modifierGroupId, - table.modifierId, - table.order, - ), - }), + (table) => [ + primaryKey({ + columns: [ + table.modifierGroupId, + table.modifierId, + table.order, + ], + }), + ], ); export const menuItemRelations = relations(menuItems, ({ many }) => ({ diff --git a/integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.test.ts b/integration-tests/tests/pg/pg.mapping.test.ts similarity index 98% rename from integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.test.ts rename to integration-tests/tests/pg/pg.mapping.test.ts index 803f284207..ceb40898f1 100644 --- a/integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.test.ts +++ b/integration-tests/tests/pg/pg.mapping.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expectTypeOf, test } from 'vitest'; -import * as schema from './pg.schema.ts'; +import * as schema from './pg.mapping.schema'; const { Client } = pg; @@ -77,7 +77,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/pg.postgresjs-v1.test.ts b/integration-tests/tests/pg/pg.postgresjs-v1.test.ts similarity index 99% rename from integration-tests/tests/relational/pg.postgresjs-v1.test.ts rename to integration-tests/tests/pg/pg.postgresjs-v1.test.ts index 968a5d2c68..de15814266 100644 --- a/integration-tests/tests/relational/pg.postgresjs-v1.test.ts +++ b/integration-tests/tests/pg/pg.postgresjs-v1.test.ts @@ -6,7 +6,7 @@ import getPort from 'get-port'; import postgres from 'postgres'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './pg.schema.ts'; +import * as schema from './pg.schema'; const ENABLE_LOGGING = false; @@ -92,7 +92,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/pg.postgresjs.test.ts b/integration-tests/tests/pg/pg.postgresjs.test.ts similarity index 99% rename from integration-tests/tests/relational/pg.postgresjs.test.ts rename to integration-tests/tests/pg/pg.postgresjs.test.ts index f06191605d..1d2db0d525 100644 --- a/integration-tests/tests/relational/pg.postgresjs.test.ts +++ b/integration-tests/tests/pg/pg.postgresjs.test.ts @@ -7,8 +7,8 @@ import getPort from 'get-port'; import postgres from 'postgres'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './pg.relations.ts'; -import * as schema from './pg.schema.ts'; +import relations from './pg.relations'; +import * as schema from './pg.schema'; const ENABLE_LOGGING = false; @@ -104,7 +104,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client, relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/pg.relations.ts b/integration-tests/tests/pg/pg.relations.ts similarity index 99% rename from integration-tests/tests/relational/pg.relations.ts rename to integration-tests/tests/pg/pg.relations.ts index 2a8f979802..5478a72f27 100644 --- a/integration-tests/tests/relational/pg.relations.ts +++ b/integration-tests/tests/pg/pg.relations.ts @@ -1,5 +1,5 @@ import { defineRelations } from 'drizzle-orm'; -import * as schema from './pg.schema.ts'; +import * as schema from './pg.schema'; export default defineRelations(schema, (r) => ({ usersView: { diff --git a/integration-tests/tests/relational/pg.schema.ts b/integration-tests/tests/pg/pg.schema.ts similarity index 100% rename from integration-tests/tests/relational/pg.schema.ts rename to integration-tests/tests/pg/pg.schema.ts diff --git a/integration-tests/tests/relational/pg.test.ts b/integration-tests/tests/pg/pg.test.ts similarity index 99% rename from integration-tests/tests/relational/pg.test.ts rename to integration-tests/tests/pg/pg.test.ts index 8c66b1767f..38888df1e6 100644 --- a/integration-tests/tests/relational/pg.test.ts +++ b/integration-tests/tests/pg/pg.test.ts @@ -7,7 +7,7 @@ import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './pg.relations.ts'; +import relations from './pg.relations'; import { allTypesTable, commentsTable, @@ -23,7 +23,7 @@ import { students, usersTable, usersToGroupsTable, -} from './pg.schema.ts'; +} from './pg.schema'; const { Client } = pg; @@ -99,7 +99,7 @@ beforeAll(async () => { await pgContainer?.stop().catch(console.error); throw lastError; } - db = drizzle(client, { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client, relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/pg/pglite.test.ts b/integration-tests/tests/pg/pglite.test.ts index a3570370ea..3d03229872 100644 --- a/integration-tests/tests/pg/pglite.test.ts +++ b/integration-tests/tests/pg/pglite.test.ts @@ -1,130 +1,162 @@ -import { PGlite } from '@electric-sql/pglite'; import { Name, sql } from 'drizzle-orm'; -import { drizzle, type PgliteDatabase } from 'drizzle-orm/pglite'; +import { getTableConfig } from 'drizzle-orm/pg-core'; import { migrate } from 'drizzle-orm/pglite/migrator'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; -import { tests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: PgliteDatabase; -let dbGlobalCached: PgliteDatabase; -let cachedDb: PgliteDatabase; -let client: PGlite; - -beforeAll(async () => { - client = new PGlite(); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestCache(), - }); - dbGlobalCached = drizzle(client, { - logger: ENABLE_LOGGING, - cache: new TestGlobalCache(), - }); -}); +import { describe, expect } from 'vitest'; +import { tests } from './common'; +import { pgliteTest as test } from './instrumentation'; +import { usersMigratorTable, usersTable } from './schema'; -afterAll(async () => { - await client?.close(); -}); +tests(test, []); -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); +describe('pglite', () => { + test('migrator : default migration strategy', async ({ db }) => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute( + sql`drop table if exists users12`, + ); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute( - sql`drop table if exists users12`, - ); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + await migrate(db, { migrationsFolder: './drizzle2/pg' }); - await migrate(db, { migrationsFolder: './drizzle2/pg' }); + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); - const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); + }); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); + test('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); -test('insert via db.execute + select via db.execute', async () => { - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); + }); - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); - expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); -}); + test('insert via db.execute + returning', async ({ db }) => { + const result = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); + }); -test('insert via db.execute + returning', async () => { - const result = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); -}); + test('insert via db.execute w/ query builder', async ({ db }) => { + const result = await db.execute>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); + }); -test('insert via db.execute w/ query builder', async () => { - const result = await db.execute>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); -}); + test('migrator : --init', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res.rows[0]?.tableExists).toStrictEqual(false); + }); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'test mode string for timestamp with timezone in different timezone', - 'view', - 'materialized view', - 'subquery with view', - 'mySchema :: materialized view', - 'select count()', - // not working in 0.2.12 - 'select with group by as sql + column', - 'select with group by as column + sql', - 'mySchema :: select with group by as column + sql', -]); - -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res.rows[0]?.tableExists).toStrictEqual(true); + }); }); diff --git a/integration-tests/tests/pg/postgres-js.test.ts b/integration-tests/tests/pg/postgres-js.test.ts index 79e51e50fe..96a51ea412 100644 --- a/integration-tests/tests/pg/postgres-js.test.ts +++ b/integration-tests/tests/pg/postgres-js.test.ts @@ -1,491 +1,510 @@ -import retry from 'async-retry'; -import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'; -import { drizzle } from 'drizzle-orm/postgres-js'; -import postgres, { type Sql } from 'postgres'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; - import { Name, sql } from 'drizzle-orm'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +import { getTableConfig, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { migrate } from 'drizzle-orm/postgres-js/migrator'; -import { skipTests } from '~/common'; +import { describe } from 'node:test'; +import { expect } from 'vitest'; import { randomString } from '~/utils'; -import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; -import relations from './relations'; - -const ENABLE_LOGGING = false; - -let db: PostgresJsDatabase; -let dbGlobalCached: PostgresJsDatabase; -let cachedDb: PostgresJsDatabase; -let client: Sql; - -beforeAll(async () => { - let connectionString; - if (process.env['PG_CONNECTION_STRING']) { - connectionString = process.env['PG_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = postgres(connectionString, { - max: 1, - onnotice: () => { - // disable notices - }, - }); - await client`select 1`; - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); +import { tests } from './common'; +import { postgresjsTest as test } from './instrumentation'; +import { usersMigratorTable, usersTable } from './schema'; -afterAll(async () => { - await client?.end(); -}); +tests(test, []); -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); +describe('postgresjs', () => { + test('migrator : default migration strategy', async ({ db }) => { + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - await migrate(db, { migrationsFolder: './drizzle2/pg' }); + await migrate(db, { migrationsFolder: './drizzle2/pg' }); - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); + const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom schema', async () => { - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); + }); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); + test('migrator : migrate with custom schema', async ({ db }) => { + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - // test if the custom migrations table was created - const { count } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); - expect(count > 0).toBeTruthy(); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + // test if the custom migrations table was created + const { count } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); + expect(count > 0).toBeTruthy(); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); -}); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); + }); - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + test('migrator : migrate with custom table', async ({ db }) => { + const customTable = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - // test if the custom migrations table was created - const { count } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - expect(count > 0).toBeTruthy(); + await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + // test if the custom migrations table was created + const { count } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); + expect(count > 0).toBeTruthy(); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('migrator : migrate with custom table and custom schema', async () => { - const customTable = randomString(); - const customSchema = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { - migrationsFolder: './drizzle2/pg', - migrationsTable: customTable, - migrationsSchema: customSchema, + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); - // test if the custom migrations table was created - const { count } = await db.execute( - sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, - ); - expect(count > 0).toBeTruthy(); + test('migrator : migrate with custom table and custom schema', async ({ db }) => { + const customTable = randomString(); + const customSchema = randomString(); + await db.execute(sql`drop table if exists all_columns`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable: customTable, + migrationsSchema: customSchema, + }); - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + // test if the custom migrations table was created + const { count } = await db.execute( + sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, + ); + expect(count > 0).toBeTruthy(); - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); -}); + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); -test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + await db.execute(sql`drop table all_columns`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone first case mode string', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); + // 1. Insert date in string format without timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01 02:00:00.123456' }, + ]); - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); + // 2, Select in string format and check that values are the same + const result = await db.select().from(table); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and check that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - await db.execute(sql`drop table if exists ${table}`); -}); - -test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone second case mode string', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: '2022-01-01T02:00:00.123456-02' }, + ]); - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2, Select as raw query and check that values are the same + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - expect([...result]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); + expect([...result]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); -test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('all date and time columns without timezone third case mode date', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 2, Select as raw query as string + const result = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); + // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC + expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); -test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode string for timestamp with timezone', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); -test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + await db.execute(sql`drop table if exists ${table}`); }); - await db.execute(sql`drop table if exists ${table}`); + test('test mode date for timestamp with timezone', async ({ db }) => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); + const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: timestampString }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('test mode string for timestamp with timezone in UTC timezone', async () => { - // get current timezone from db - const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test('test mode string for timestamp with timezone in UTC timezone', async ({ db }) => { + // get current timezone from db + const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); + // set timezone to UTC + await db.execute(sql`set time zone 'UTC'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; + const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); + await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -test('test mode string for timestamp with timezone in different timezone', async () => { - // get current timezone from db - const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); + test('test mode string for timestamp with timezone in different timezone', async ({ db }) => { + // get current timezone from db + const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone '-10'`); + // set timezone to HST (UTC - 10) + await db.execute(sql`set time zone '-10'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), + }); - await db.execute(sql`drop table if exists ${table}`); + await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` + await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); - const timestampString = '2022-01-01 00:00:00.123456-1000'; + const timestampString = '2022-01-01 00:00:00.123456-1000'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); + // 1. Insert date in string format with timezone in it + await db.insert(table).values([ + { timestamp: timestampString }, + ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); + // 2. Select date in string format and check that the values are the same + const result = await db.select().from(table); - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); + // 3. Select as raw query and checke that values are the same + const result2 = await db.execute<{ + id: number; + timestamp_string: string; + }>(sql`select * from ${table}`); - expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); + await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); - await db.execute(sql`drop table if exists ${table}`); -}); + await db.execute(sql`drop table if exists ${table}`); + }); -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'test mode string for timestamp with timezone in different timezone', -]); - -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); + test('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); -test('insert via db.execute + select via db.execute', async () => { - await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); + }); - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); - expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); -}); + test('insert via db.execute + returning', async ({ db }) => { + const result = await db.execute<{ id: number; name: string }>( + sql`insert into ${usersTable} (${new Name( + usersTable.name.name, + )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, + ); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); + }); -test('insert via db.execute + returning', async () => { - const result = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${new Name( - usersTable.name.name, - )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); -}); + test('insert via db.execute w/ query builder', async ({ db }) => { + const result = await db.execute>( + db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), + ); + expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); + }); + + test('migrator : --init', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); -test('insert via db.execute w/ query builder', async () => { - const result = await db.execute>( - db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(res[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(res[0]?.tableExists).toStrictEqual(false); + }); + + test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsSchema = 'drzl_migrations_init'; + const migrationsTable = 'drzl_init'; + + await db.execute(sql`drop schema if exists ${sql.identifier(migrationsSchema)} cascade;`); + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`create schema public`); + + await migrate(db, { + migrationsFolder: './drizzle2/pg-init', + migrationsSchema, + migrationsTable, + }); + + const migratorRes = await migrate(db, { + migrationsFolder: './drizzle2/pg', + migrationsTable, + migrationsSchema, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)}`); + + const res = await db.execute<{ tableExists: boolean }>(sql`SELECT EXISTS ( + SELECT 1 + FROM pg_tables + WHERE schemaname = ${getTableConfig(usersMigratorTable).schema ?? 'public'} AND tablename = ${ + getTableConfig(usersMigratorTable).name + } + ) as ${sql.identifier('tableExists')};`); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(res[0]?.tableExists).toStrictEqual(true); + }); }); diff --git a/integration-tests/tests/pg/relations.ts b/integration-tests/tests/pg/relations.ts index 1e7748c064..7457696318 100644 --- a/integration-tests/tests/pg/relations.ts +++ b/integration-tests/tests/pg/relations.ts @@ -1,7 +1,7 @@ import { defineRelations } from 'drizzle-orm'; import * as schema from './schema'; -export default defineRelations(schema, (r) => ({ +export const relations = defineRelations(schema, (r) => ({ rqbUser: { posts: r.many.rqbPost(), }, diff --git a/integration-tests/tests/replicas/postgres.test.ts b/integration-tests/tests/pg/replicas.test.ts similarity index 100% rename from integration-tests/tests/replicas/postgres.test.ts rename to integration-tests/tests/pg/replicas.test.ts diff --git a/integration-tests/tests/pg/schema.ts b/integration-tests/tests/pg/schema.ts index 1ec7d6cacf..bbca906010 100644 --- a/integration-tests/tests/pg/schema.ts +++ b/integration-tests/tests/pg/schema.ts @@ -1,5 +1,15 @@ import { sql } from 'drizzle-orm'; -import { integer, type PgDatabase, type PgQueryResultHKT, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; +import { + boolean, + integer, + jsonb, + type PgDatabase, + pgSchema, + pgTable, + serial, + text, + timestamp, +} from 'drizzle-orm/pg-core'; export const rqbUser = pgTable('user_rqb_test', { id: serial().primaryKey().notNull(), @@ -20,7 +30,37 @@ export const rqbPost = pgTable('post_rqb_test', { }).notNull(), }); -export const init = async (db: PgDatabase) => { +export const postsTable = pgTable('posts', { + id: serial().primaryKey(), + description: text().notNull(), + userId: integer('city_id').references(() => usersTable.id), +}); + +export const usersMigratorTable = pgTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +export const usersTable = pgTable('users', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +export const mySchema = pgSchema('mySchema'); + +export const usersMySchemaTable = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +export const init = async (db: PgDatabase) => { await db.execute(sql` CREATE TABLE ${rqbUser} ( "id" SERIAL PRIMARY KEY NOT NULL, @@ -38,7 +78,7 @@ export const init = async (db: PgDatabase) => { `); }; -export const clear = async (db: PgDatabase) => { +export const clear = async (db: PgDatabase) => { await db.execute(sql`DROP TABLE IF EXISTS ${rqbUser} CASCADE;`).catch(() => null); await db.execute(sql`DROP TABLE IF EXISTS ${rqbPost} CASCADE;`).catch(() => null); }; diff --git a/integration-tests/tests/pg/utils.test.ts b/integration-tests/tests/pg/utils.test.ts new file mode 100644 index 0000000000..fb0591fea1 --- /dev/null +++ b/integration-tests/tests/pg/utils.test.ts @@ -0,0 +1,454 @@ +import { is, SQL, sql } from 'drizzle-orm'; +import { usersSync } from 'drizzle-orm/neon'; +import { authenticatedRole, crudPolicy } from 'drizzle-orm/neon/rls'; +import { drizzle } from 'drizzle-orm/node-postgres'; +import { + boolean, + char, + foreignKey, + getTableConfig, + index, + integer, + jsonb, + PgDialect, + PgPolicy, + pgPolicy, + pgTable, + primaryKey, + serial, + text, + timestamp, + unique, +} from 'drizzle-orm/pg-core'; +import { expect, test } from 'vitest'; + +const db = drizzle.mock(); + +test('table configs: unique third param', async () => { + const cities1Table = pgTable( + 'cities1', + { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }, + ( + t, + ) => [unique('custom_name').on(t.name, t.state).nullsNotDistinct(), unique('custom_name1').on(t.name, t.state)], + ); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); +}); + +test('table configs: unique in column', async () => { + const cities1Table = pgTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: char('state', { length: 2 }).unique('custom'), + field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + + expect(columnName?.uniqueName).toBe(undefined); + expect(columnName?.isUnique).toBe(true); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBe(true); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBe(true); + expect(columnField?.uniqueType).toBe('not distinct'); +}); + +test('table config: foreign keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' })]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); +}); + +test('table config: primary keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [primaryKey({ columns: [t.id, t.name], name: 'custom_pk' })]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); +}); + +test('Query check: Insert all defaults in 1 row', async () => { + const users = pgTable('users_40', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users_40" ("id", "name", "state") values (default, default, default)', + params: [], + }); +}); + +test('Query check: Insert all defaults in multiple rows', async () => { + const users = pgTable('users_41', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users_41" ("id", "name", "state") values (default, default, default), (default, default, default)', + params: [], + }); +}); + +test.concurrent('build query insert with onConflict do update', async () => { + const usersTable = pgTable('users_44', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users_44" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('build query insert with onConflict do update / multiple columns', async () => { + const usersTable = pgTable('users_45', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users_45" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test.concurrent('build query insert with onConflict do nothing', async () => { + const usersTable = pgTable('users_46', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing() + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users_46" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test.concurrent('build query insert with onConflict do nothing + target', async () => { + const usersTable = pgTable('users_47', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + }); + + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users_47" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('select a field without joining its table', () => { + const usersTable = pgTable('users_60', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const users2Table = pgTable('users2_60', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); +}); + +test('select all fields from subquery without alias', () => { + const users2Table = pgTable('users2_61', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare('query')).toThrowError(); +}); + +test('select for ...', () => { + const users2Table = pgTable('users2_66', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const coursesTable = pgTable('courses_66', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + { + const query = db + .select() + .from(users2Table) + .for('update') + .toSQL(); + + expect(query.sql).toMatch(/ for update$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('update', { of: [users2Table, coursesTable] }) + .toSQL(); + + expect(query.sql).toMatch(/ for update of "users2_66", "courses_66"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table }) + .toSQL(); + + expect(query.sql).toMatch(/for no key update of "users2_66"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table, skipLocked: true }) + .toSQL(); + + expect(query.sql).toMatch(/ for no key update of "users2_66" skip locked$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('share', { of: users2Table, noWait: true }) + .toSQL(); + + expect(query.sql).toMatch(/for share of "users2_66" nowait$/); + } +}); + +test('orderBy with aliased column', () => { + const users2Table = pgTable('users2_70', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as "test" from "users2_70" order by "test"'); +}); + +test('policy', () => { + { + const policy = pgPolicy('test policy'); + + expect(is(policy, PgPolicy)).toBe(true); + expect(policy.name).toBe('test policy'); + } + + { + const policy = pgPolicy('test policy', { + as: 'permissive', + for: 'all', + to: 'public', + using: sql`1=1`, + withCheck: sql`1=1`, + }); + + expect(is(policy, PgPolicy)).toBe(true); + expect(policy.name).toBe('test policy'); + expect(policy.as).toBe('permissive'); + expect(policy.for).toBe('all'); + expect(policy.to).toBe('public'); + const dialect = new PgDialect(); + expect(is(policy.using, SQL)).toBe(true); + expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); + expect(is(policy.withCheck, SQL)).toBe(true); + expect(dialect.sqlToQuery(policy.withCheck!).sql).toBe('1=1'); + } + + { + const policy = pgPolicy('test policy', { + to: 'custom value', + }); + + expect(policy.to).toBe('custom value'); + } + + { + const p1 = pgPolicy('test policy'); + const p2 = pgPolicy('test policy 2', { + as: 'permissive', + for: 'all', + to: 'public', + using: sql`1=1`, + withCheck: sql`1=1`, + }); + const table = pgTable('table_with_policy', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }, () => [ + p1, + p2, + ]); + const config = getTableConfig(table); + expect(config.policies).toHaveLength(2); + expect(config.policies[0]).toBe(p1); + expect(config.policies[1]).toBe(p2); + } +}); + +test('neon: policy', () => { + { + const policy = crudPolicy({ + read: true, + modify: true, + role: authenticatedRole, + }); + + for (const it of Object.values(policy)) { + expect(is(it, PgPolicy)).toBe(true); + expect(it?.to).toStrictEqual(authenticatedRole); + it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; + it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; + } + } + + { + const table = pgTable('name', { + id: integer('id'), + }, (t) => [ + index('name').on(t.id), + crudPolicy({ + read: true, + modify: true, + role: authenticatedRole, + }), + primaryKey({ columns: [t.id], name: 'custom' }), + ]); + + const { policies, indexes, primaryKeys } = getTableConfig(table); + + expect(policies.length).toBe(4); + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); + + expect(policies[0]?.name === 'crud-custom-policy-modify'); + expect(policies[1]?.name === 'crud-custom-policy-read'); + } +}); + +test('neon: neon_auth', () => { + const usersSyncTable = usersSync; + + const { columns, schema, name } = getTableConfig(usersSyncTable); + + expect(name).toBe('users_sync'); + expect(schema).toBe('neon_auth'); + expect(columns).toHaveLength(7); +}); + +test('Enable RLS function', () => { + const usersWithRLS = pgTable.withRLS('users', { + id: integer(), + }); + + const config1 = getTableConfig(usersWithRLS); + + const usersNoRLS = pgTable('users', { + id: integer(), + }); + + const config2 = getTableConfig(usersNoRLS); + + expect(config1.enableRLS).toBeTruthy(); + expect(config2.enableRLS).toBeFalsy(); +}); diff --git a/integration-tests/tests/pg/xata-http.test.ts b/integration-tests/tests/pg/xata-http.test.ts index caf136eadf..8f89d715ce 100644 --- a/integration-tests/tests/pg/xata-http.test.ts +++ b/integration-tests/tests/pg/xata-http.test.ts @@ -1,436 +1,436 @@ -import retry from 'async-retry'; -import { sql } from 'drizzle-orm'; -import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; -import { drizzle } from 'drizzle-orm/xata-http'; -import type { XataHttpClient, XataHttpDatabase } from 'drizzle-orm/xata-http'; -import { migrate } from 'drizzle-orm/xata-http/migrator'; -import { beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; -import { randomString } from '~/utils'; -import { getXataClient } from '../xata/xata.ts'; -import { tests, tests as cacheTests, usersMigratorTable, usersTable } from './pg-common'; -import { TestCache, TestGlobalCache } from './pg-common-cache.ts'; -import relations from './relations.ts'; - -const ENABLE_LOGGING = false; - -let db: XataHttpDatabase; -let dbGlobalCached: XataHttpDatabase; -let cachedDb: XataHttpDatabase; -let client: XataHttpClient; - -beforeAll(async () => { - const apiKey = process.env['XATA_API_KEY']; - if (!apiKey) { - throw new Error('XATA_API_KEY is not defined'); - } - - client = await retry(async () => { - client = getXataClient(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); - -beforeEach((ctx) => { - ctx.pg = { - db, - }; - ctx.cachedPg = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator : default migration strategy', async () => { - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg' }); - - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); -}); - -test('migrator : migrate with custom table', async () => { - const customTable = randomString(); - await db.execute(sql`drop table if exists all_columns`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); - - await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); - - // test if the custom migrations table was created - const { records } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); - expect(records && records.length > 0).toBeTruthy(); - - // test if the migrated table are working as expected - await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); - const result = await db.select().from(usersMigratorTable); - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table all_columns`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); -}); - -test('all date and time columns without timezone first case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format without timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01 02:00:00.123456' }, - ]); - - // 2, Select in string format and check that values are the same - const result = await db.select().from(table); - - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); - - // 3. Select as raw query and check that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('all date and time columns without timezone second case mode string', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) not null - ) - `); - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: '2022-01-01T02:00:00.123456-02' }, - ]); - - // 2, Select as raw query and check that values are the same - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - expect(result.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); +// import retry from 'async-retry'; +// import { sql } from 'drizzle-orm'; +// import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; +// import { drizzle } from 'drizzle-orm/xata-http'; +// import type { XataHttpClient, XataHttpDatabase } from 'drizzle-orm/xata-http'; +// import { migrate } from 'drizzle-orm/xata-http/migrator'; +// import { beforeAll, beforeEach, expect, test } from 'vitest'; +// import { skipTests } from '~/common'; +// import { randomString } from '~/utils'; +// import { getXataClient } from '../xata/xata'; +// import { tests, tests as cacheTests, usersMigratorTable, usersTable } from './common-pt1'; +// import { TestCache, TestGlobalCache } from './common-cache'; +// import relations from './relations'; + +// const ENABLE_LOGGING = false; + +// let db: XataHttpDatabase; +// let dbGlobalCached: XataHttpDatabase; +// let cachedDb: XataHttpDatabase; +// let client: XataHttpClient; + +// beforeAll(async () => { +// const apiKey = process.env['XATA_API_KEY']; +// if (!apiKey) { +// throw new Error('XATA_API_KEY is not defined'); +// } + +// client = await retry(async () => { +// client = getXataClient(); +// return client; +// }, { +// retries: 20, +// factor: 1, +// minTimeout: 250, +// maxTimeout: 250, +// randomize: false, +// }); +// db = drizzle(client, { logger: ENABLE_LOGGING, relations }); +// cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); +// dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); +// }); + +// beforeEach((ctx) => { +// ctx.pg = { +// db, +// }; +// ctx.cachedPg = { +// db: cachedDb, +// dbGlobalCached, +// }; +// }); + +// test('migrator : default migration strategy', async () => { +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { migrationsFolder: './drizzle2/pg' }); + +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + +// const result = await db.select().from(usersMigratorTable); + +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); +// }); + +// test('migrator : migrate with custom table', async () => { +// const customTable = randomString(); +// await db.execute(sql`drop table if exists all_columns`); +// await db.execute(sql`drop table if exists users12`); +// await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); + +// await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); + +// // test if the custom migrations table was created +// const { records } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); +// expect(records && records.length > 0).toBeTruthy(); + +// // test if the migrated table are working as expected +// await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); +// const result = await db.select().from(usersMigratorTable); +// expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + +// await db.execute(sql`drop table all_columns`); +// await db.execute(sql`drop table users12`); +// await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); +// }); + +// test('all date and time columns without timezone first case mode string', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) not null +// ) +// `); + +// // 1. Insert date in string format without timezone in it +// await db.insert(table).values([ +// { timestamp: '2022-01-01 02:00:00.123456' }, +// ]); + +// // 2, Select in string format and check that values are the same +// const result = await db.select().from(table); + +// expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); + +// // 3. Select as raw query and check that values are the same +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); + +// expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test('all date and time columns without timezone second case mode string', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) not null +// ) +// `); + +// // 1. Insert date in string format with timezone in it +// await db.insert(table).values([ +// { timestamp: '2022-01-01T02:00:00.123456-02' }, +// ]); + +// // 2, Select as raw query and check that values are the same +// const result = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); + +// expect(result.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); - await db.execute(sql`drop table if exists ${table}`); -}); +// await db.execute(sql`drop table if exists ${table}`); +// }); -test('all date and time columns without timezone third case mode date', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) not null - ) - `); - - const insertedDate = new Date('2022-01-01 20:00:00.123+04'); - - // 1. Insert date as new date - await db.insert(table).values([ - { timestamp: insertedDate }, - ]); - - // 2, Select as raw query as string - const result = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); +// test('all date and time columns without timezone third case mode date', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(3) not null +// ) +// `); + +// const insertedDate = new Date('2022-01-01 20:00:00.123+04'); + +// // 1. Insert date as new date +// await db.insert(table).values([ +// { timestamp: insertedDate }, +// ]); + +// // 2, Select as raw query as string +// const result = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); - // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC - expect(new Date(result.records[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('test mode string for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); +// // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC +// expect(new Date(result.records[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test('test mode string for timestamp with timezone', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) with time zone not null +// ) +// `); - const timestampString = '2022-01-01 00:00:00.123456-0200'; +// const timestampString = '2022-01-01 00:00:00.123456-0200'; - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); +// // 1. Insert date in string format with timezone in it +// await db.insert(table).values([ +// { timestamp: timestampString }, +// ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); +// // 2. Select date in string format and check that the values are the same +// const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); +// // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same +// expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); +// // 3. Select as raw query and checke that values are the same +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); +// // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same +// expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); +// await db.execute(sql`drop table if exists ${table}`); +// }); -test('test mode date for timestamp with timezone', async () => { - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), - }); +// test('test mode date for timestamp with timezone', async () => { +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), +// }); - await db.execute(sql`drop table if exists ${table}`); +// await db.execute(sql`drop table if exists ${table}`); - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(3) with time zone not null - ) - `); +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(3) with time zone not null +// ) +// `); - const timestampString = new Date('2022-01-01 00:00:00.456-0200'); +// const timestampString = new Date('2022-01-01 00:00:00.456-0200'); - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); +// // 1. Insert date in string format with timezone in it +// await db.insert(table).values([ +// { timestamp: timestampString }, +// ]); - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); +// // 2. Select date in string format and check that the values are the same +// const result = await db.select().from(table); - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: timestampString }]); +// // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same +// expect(result).toEqual([{ id: 1, timestamp: timestampString }]); - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); +// // 3. Select as raw query and checke that values are the same +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); +// // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same +// expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); - await db.execute(sql`drop table if exists ${table}`); -}); +// await db.execute(sql`drop table if exists ${table}`); +// }); -test('test mode string for timestamp with timezone in UTC timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); +// test('test mode string for timestamp with timezone in UTC timezone', async () => { +// // get current timezone from db +// const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - // set timezone to UTC - await db.execute(sql`set time zone 'UTC'`); +// // set timezone to UTC +// await db.execute(sql`set time zone 'UTC'`); - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-0200'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same - expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -test('test mode string for timestamp with timezone in different timezone', async () => { - // get current timezone from db - const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); - - // set timezone to HST (UTC - 10) - await db.execute(sql`set time zone 'HST'`); - - const table = pgTable('all_columns', { - id: serial('id').primaryKey(), - timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), - }); - - await db.execute(sql`drop table if exists ${table}`); - - await db.execute(sql` - create table ${table} ( - id serial primary key, - timestamp_string timestamp(6) with time zone not null - ) - `); - - const timestampString = '2022-01-01 00:00:00.123456-1000'; - - // 1. Insert date in string format with timezone in it - await db.insert(table).values([ - { timestamp: timestampString }, - ]); - - // 2. Select date in string format and check that the values are the same - const result = await db.select().from(table); - - expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); - - // 3. Select as raw query and checke that values are the same - const result2 = await db.execute<{ - id: number; - timestamp_string: string; - }>(sql`select * from ${table}`); - - expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); - - await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); - - await db.execute(sql`drop table if exists ${table}`); -}); - -skipTests([ - 'migrator : default migration strategy', - 'migrator : migrate with custom schema', - 'migrator : migrate with custom table', - 'migrator : migrate with custom table and custom schema', - 'insert via db.execute + select via db.execute', - 'insert via db.execute + returning', - 'insert via db.execute w/ query builder', - 'all date and time columns without timezone first case mode string', - 'all date and time columns without timezone third case mode date', - 'test mode string for timestamp with timezone', - 'test mode date for timestamp with timezone', - 'test mode string for timestamp with timezone in UTC timezone', - 'test mode string for timestamp with timezone in different timezone', - 'view', - 'materialized view', - 'select from enum', - 'subquery with view', -]); -tests(); -cacheTests(); - -beforeEach(async () => { - await db.execute(sql`drop schema if exists public cascade`); - await db.execute(sql`create schema public`); - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb jsonb, - created_at timestamptz not null default now() - ) - `, - ); -}); - -test('insert via db.execute + select via db.execute', async () => { - await db.execute( - sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>( - sql`select id, name from "users"`, - ); - expect(result.records).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute + returning', async () => { - const inserted = await db.execute<{ id: number; name: string }>( - sql`insert into ${usersTable} (${ - sql.identifier( - usersTable.name.name, - ) - }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, - ); - expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); -}); - -test('insert via db.execute w/ query builder', async () => { - const inserted = await db.execute>( - db - .insert(usersTable) - .values({ name: 'John' }) - .returning({ id: usersTable.id, name: usersTable.name }), - ); - expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); -}); +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) with time zone not null +// ) +// `); + +// const timestampString = '2022-01-01 00:00:00.123456-0200'; + +// // 1. Insert date in string format with timezone in it +// await db.insert(table).values([ +// { timestamp: timestampString }, +// ]); + +// // 2. Select date in string format and check that the values are the same +// const result = await db.select().from(table); + +// // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same +// expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); + +// // 3. Select as raw query and checke that values are the same +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); + +// // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same +// expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); + +// await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// test('test mode string for timestamp with timezone in different timezone', async () => { +// // get current timezone from db +// const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); + +// // set timezone to HST (UTC - 10) +// await db.execute(sql`set time zone 'HST'`); + +// const table = pgTable('all_columns', { +// id: serial('id').primaryKey(), +// timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), +// }); + +// await db.execute(sql`drop table if exists ${table}`); + +// await db.execute(sql` +// create table ${table} ( +// id serial primary key, +// timestamp_string timestamp(6) with time zone not null +// ) +// `); + +// const timestampString = '2022-01-01 00:00:00.123456-1000'; + +// // 1. Insert date in string format with timezone in it +// await db.insert(table).values([ +// { timestamp: timestampString }, +// ]); + +// // 2. Select date in string format and check that the values are the same +// const result = await db.select().from(table); + +// expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); + +// // 3. Select as raw query and checke that values are the same +// const result2 = await db.execute<{ +// id: number; +// timestamp_string: string; +// }>(sql`select * from ${table}`); + +// expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); + +// await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); + +// await db.execute(sql`drop table if exists ${table}`); +// }); + +// skipTests([ +// 'migrator : default migration strategy', +// 'migrator : migrate with custom schema', +// 'migrator : migrate with custom table', +// 'migrator : migrate with custom table and custom schema', +// 'insert via db.execute + select via db.execute', +// 'insert via db.execute + returning', +// 'insert via db.execute w/ query builder', +// 'all date and time columns without timezone first case mode string', +// 'all date and time columns without timezone third case mode date', +// 'test mode string for timestamp with timezone', +// 'test mode date for timestamp with timezone', +// 'test mode string for timestamp with timezone in UTC timezone', +// 'test mode string for timestamp with timezone in different timezone', +// 'view', +// 'materialized view', +// 'select from enum', +// 'subquery with view', +// ]); +// tests(); +// cacheTests(); + +// beforeEach(async () => { +// await db.execute(sql`drop schema if exists public cascade`); +// await db.execute(sql`create schema public`); +// await db.execute( +// sql` +// create table users ( +// id serial primary key, +// name text not null, +// verified boolean not null default false, +// jsonb jsonb, +// created_at timestamptz not null default now() +// ) +// `, +// ); +// }); + +// test('insert via db.execute + select via db.execute', async () => { +// await db.execute( +// sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, +// ); + +// const result = await db.execute<{ id: number; name: string }>( +// sql`select id, name from "users"`, +// ); +// expect(result.records).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert via db.execute + returning', async () => { +// const inserted = await db.execute<{ id: number; name: string }>( +// sql`insert into ${usersTable} (${ +// sql.identifier( +// usersTable.name.name, +// ) +// }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, +// ); +// expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); +// }); + +// test('insert via db.execute w/ query builder', async () => { +// const inserted = await db.execute>( +// db +// .insert(usersTable) +// .values({ name: 'John' }) +// .returning({ id: usersTable.id, name: usersTable.name }), +// ); +// expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); +// }); diff --git a/integration-tests/tests/xata/xata.ts b/integration-tests/tests/pg/xata.ts similarity index 100% rename from integration-tests/tests/xata/xata.ts rename to integration-tests/tests/pg/xata.ts diff --git a/integration-tests/tests/relational/mysql.test.ts b/integration-tests/tests/relational/mysql.test.ts deleted file mode 100644 index 11d64a24d8..0000000000 --- a/integration-tests/tests/relational/mysql.test.ts +++ /dev/null @@ -1,12940 +0,0 @@ -import 'dotenv/config'; -import Docker from 'dockerode'; -import { DrizzleError, sql, TransactionRollbackError } from 'drizzle-orm'; -import { alias } from 'drizzle-orm/mysql-core'; -import { drizzle, type MySql2Database } from 'drizzle-orm/mysql2'; -import getPort from 'get-port'; -import * as mysql from 'mysql2/promise'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './mysql.relations.ts'; -import { - allTypesTable, - commentsTable, - courseOfferings, - customTypesTable, - groupsTable, - postsTable, - schemaGroups, - schemaPosts, - schemaUsers, - schemaUsersToGroups, - studentGrades, - students, - usersTable, - usersToGroupsTable, -} from './mysql.schema.ts'; - -const ENABLE_LOGGING = false; - -declare module 'vitest' { - export interface TestContext { - docker: Docker; - mysqlContainer: Docker.Container; - mysqlDbV2: MySql2Database; - mysqlClient: mysql.Connection; - } -} - -let globalDocker: Docker; -let mysqlContainer: Docker.Container; -let db: MySql2Database; -let client: mysql.Connection; - -async function createDockerDB(): Promise { - const docker = (globalDocker = new Docker()); - const port = await getPort({ port: 3306 }); - const image = 'mysql:8'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - mysqlContainer = await docker.createContainer({ - Image: image, - Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await mysqlContainer.start(); - - return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; -} - -beforeAll(async () => { - const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); - - const sleep = 1000; - let timeLeft = 30000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = await mysql.createConnection({ uri: connectionString, supportBigNumbers: true, bigNumberStrings: true }); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to MySQL'); - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { relations, logger: ENABLE_LOGGING, mode: 'default', casing: 'snake_case' }); -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await mysqlContainer?.stop().catch(console.error); -}); - -beforeEach(async (ctx) => { - ctx.mysqlDbV2 = db; - ctx.mysqlClient = client; - ctx.docker = globalDocker; - ctx.mysqlContainer = mysqlContainer; - - await ctx.mysqlDbV2.execute(sql`drop table if exists \`users\``); - await ctx.mysqlDbV2.execute(sql`drop view if exists \`rqb_users_view\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`users\``); - await ctx.mysqlDbV2.execute(sql`drop view if exists \`rqb_test_schema\`.\`users_sch_view\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`groups\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`groups\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`users_to_groups\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`users_to_groups\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`posts\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`rqb_test_schema\`.\`posts\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`comments\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`comment_likes\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`all_types\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`custom_types\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`course_offerings\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`student_grades\``); - await ctx.mysqlDbV2.execute(sql`drop table if exists \`students\``); - - await ctx.mysqlDbV2.execute(sql`create schema if not exists \`rqb_test_schema\``); - - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`users\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`name\` text NOT NULL, - \`verified\` boolean DEFAULT false NOT NULL, - \`invited_by\` bigint REFERENCES \`users\`(\`id\`) - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`rqb_test_schema\`.\`users\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`name\` text NOT NULL, - \`verified\` boolean DEFAULT false NOT NULL, - \`invited_by\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`) - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`groups\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`name\` text NOT NULL, - \`description\` text - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`rqb_test_schema\`.\`groups\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`name\` text NOT NULL, - \`description\` text - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`users_to_groups\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`user_id\` bigint REFERENCES \`users\`(\`id\`), - \`group_id\` bigint REFERENCES \`groups\`(\`id\`) - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`rqb_test_schema\`.\`users_to_groups\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`user_id\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`), - \`group_id\` bigint REFERENCES \`rqb_test_schema\`.\`groups\`(\`id\`) - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`posts\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`content\` text NOT NULL, - \`owner_id\` bigint REFERENCES \`users\`(\`id\`), - \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`rqb_test_schema\`.\`posts\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`content\` text NOT NULL, - \`owner_id\` bigint REFERENCES \`rqb_test_schema\`.\`users\`(\`id\`), - \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE ALGORITHM = undefined - SQL SECURITY definer - VIEW \`rqb_test_schema\`.\`users_sch_view\` AS (select \`rqb_test_schema\`.\`users\`.\`id\`, \`rqb_test_schema\`.\`users\`.\`name\`, \`rqb_test_schema\`.\`users\`.\`verified\`, \`rqb_test_schema\`.\`users\`.\`invited_by\`, \`rqb_test_schema\`.\`posts\`.\`content\`, \`rqb_test_schema\`.\`posts\`.\`created_at\`, (select count(*) from \`rqb_test_schema\`.\`users\` as \`count_source\` where \`rqb_test_schema\`.\`users\`.\`id\` <> 2) as \`count\` from \`rqb_test_schema\`.\`users\` left join \`rqb_test_schema\`.\`posts\` on \`rqb_test_schema\`.\`users\`.\`id\` = \`rqb_test_schema\`.\`posts\`.\`owner_id\`); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE ALGORITHM = undefined - SQL SECURITY definer - VIEW \`rqb_users_view\` AS (select \`users\`.\`id\`, \`users\`.\`name\`, \`users\`.\`verified\`, \`users\`.\`invited_by\`, \`posts\`.\`content\`, \`posts\`.\`created_at\`, (select count(*) from \`users\` as \`count_source\` where \`users\`.\`id\` <> 2) as \`count\` from \`users\` left join \`posts\` on \`users\`.\`id\` = \`posts\`.\`owner_id\`); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`comments\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`content\` text NOT NULL, - \`creator\` bigint REFERENCES \`users\`(\`id\`), - \`post_id\` bigint REFERENCES \`posts\`(\`id\`), - \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`comment_likes\` ( - \`id\` serial PRIMARY KEY NOT NULL, - \`creator\` bigint REFERENCES \`users\`(\`id\`), - \`comment_id\` bigint REFERENCES \`comments\`(\`id\`), - \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`course_offerings\` ( - \`course_id\` integer NOT NULL, - \`semester\` varchar(10) NOT NULL, - CONSTRAINT \`course_offerings_pkey\` PRIMARY KEY(\`course_id\`,\`semester\`) - ) - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`student_grades\` ( - \`student_id\` integer NOT NULL, - \`course_id\` integer NOT NULL, - \`semester\` varchar(10) NOT NULL, - \`grade\` char(2), - CONSTRAINT \`student_grades_pkey\` PRIMARY KEY(\`student_id\`,\`course_id\`,\`semester\`) - ); - `, - ); - await ctx.mysqlDbV2.execute( - sql` - CREATE TABLE \`students\` ( - \`student_id\` serial PRIMARY KEY NOT NULL, - \`name\` text NOT NULL - ); - `, - ); -}); - -test('[Find Many] Get users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + limit posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + limit posts and users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - limit: 2, - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(2); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + custom fields', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).toEqual(3); - expect(usersWithPosts[0]?.posts.length).toEqual(3); - expect(usersWithPosts[1]?.posts.length).toEqual(2); - expect(usersWithPosts[2]?.posts.length).toEqual(2); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - lowerName: 'dan', - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.2', - createdAt: usersWithPosts[0]?.posts[1]?.createdAt, - }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - lowerName: 'andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { - id: 5, - ownerId: 2, - content: 'Post2.1', - createdAt: usersWithPosts[1]?.posts[1]?.createdAt, - }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - lowerName: 'alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { - id: 7, - ownerId: 3, - content: 'Post3.1', - createdAt: usersWithPosts[2]?.posts[1]?.createdAt, - }], - }); -}); - -test('[Find Many] Get users with posts + custom fields + limits', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - limit: 1, - with: { - posts: { - limit: 1, - }, - }, - extras: ({ - lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - lowerName: 'dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: '1' }, - { ownerId: 1, content: '2' }, - { ownerId: 1, content: '3' }, - { ownerId: 2, content: '4' }, - { ownerId: 2, content: '5' }, - { ownerId: 3, content: '6' }, - { ownerId: 3, content: '7' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - orderBy: { - content: 'desc', - }, - }, - }, - orderBy: { - id: 'desc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(2); - expect(usersWithPosts[1]?.posts.length).eq(2); - expect(usersWithPosts[2]?.posts.length).eq(3); - - expect(usersWithPosts[2]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { - id: 2, - ownerId: 1, - content: '2', - createdAt: usersWithPosts[2]?.posts[1]?.createdAt, - }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 5, - ownerId: 2, - content: '5', - createdAt: usersWithPosts[1]?.posts[1]?.createdAt, - }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ - id: 7, - ownerId: 3, - content: '7', - createdAt: usersWithPosts[0]?.posts[1]?.createdAt, - }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + where + partial', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: false, - }, - with: { - posts: { - columns: { - id: true, - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - posts: { - id: number; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - posts: [{ id: 1 }], - }); -}); - -test('[Find Many] Get users with posts + where + partial(false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - name: false, - }, - with: { - posts: { - columns: { - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts in transaction', async (t) => { - const { mysqlDbV2: db } = t; - - let usersWithPosts: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[] = []; - - await db.transaction(async (tx) => { - await tx.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await tx.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - usersWithPosts = await tx.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { - const { mysqlDbV2: db } = t; - - let usersWithPosts: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[] = []; - - await expect(db.transaction(async (tx) => { - await tx.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await tx.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - tx.rollback(); - - usersWithPosts = await tx.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - })).rejects.toThrowError(new TransactionRollbackError()); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(0); -}); - -test('[Find Many] Get only custom fields', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 1, content: 'Post1.2' }, - { id: 3, ownerId: 1, content: 'Post1.3' }, - { id: 4, ownerId: 2, content: 'Post2' }, - { id: 5, ownerId: 2, content: 'Post2.1' }, - { id: 6, ownerId: 3, content: 'Post3' }, - { id: 7, ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(3); - expect(usersWithPosts[0]?.posts.length).toEqual(3); - expect(usersWithPosts[1]?.posts.length).toEqual(2); - expect(usersWithPosts[2]?.posts.length).toEqual(2); - - expect(usersWithPosts[0]?.lowerName).toEqual('dan'); - expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); - expect(usersWithPosts[2]?.lowerName).toEqual('alex'); - - expect(usersWithPosts[0]?.posts).toContainEqual({ - lowerName: 'post1', - }); - - expect(usersWithPosts[0]?.posts).toContainEqual({ - lowerName: 'post1.2', - }); - - expect(usersWithPosts[0]?.posts).toContainEqual({ - lowerName: 'post1.3', - }); - - expect(usersWithPosts[1]?.posts).toContainEqual({ - lowerName: 'post2', - }); - - expect(usersWithPosts[1]?.posts).toContainEqual({ - lowerName: 'post2.1', - }); - - expect(usersWithPosts[2]?.posts).toContainEqual({ - lowerName: 'post3', - }); - - expect(usersWithPosts[2]?.posts).toContainEqual({ - lowerName: 'post3.1', - }); -}); - -test('[Find Many] Get only custom fields + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(2); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], - }); -}); - -test('[Find Many] Get only custom fields + where + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - limit: 1, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(1); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }], - }); -}); - -test('[Find Many] Get only custom fields + where + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(2); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], - }); -}); - -test('[Find One] Get only custom fields', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(3); - - expect(usersWithPosts?.lowerName).toEqual('dan'); - - expect(usersWithPosts?.posts).toContainEqual({ - lowerName: 'post1', - }); - - expect(usersWithPosts?.posts).toContainEqual({ - lowerName: 'post1.2', - }); - - expect(usersWithPosts?.posts).toContainEqual({ - lowerName: 'post1.3', - }); -}); - -test('[Find One] Get only custom fields + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(2); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], - }); -}); - -test('[Find One] Get only custom fields + where + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - limit: 1, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(1); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }], - }); -}); - -test('[Find One] Get only custom fields + where + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(2); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], - }); -}); - -test('[Find Many] Get select {}', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await expect( - async () => - await db.query.usersTable.findMany({ - columns: {}, - }), - ).rejects.toThrow(DrizzleError); -}); - -test('[Find One] Get select {}', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await expect(async () => - await db.query.usersTable.findFirst({ - columns: {}, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find Many] Get deep select {}', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - await expect(async () => - await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - }, - }, - }) - ).rejects.toThrow(DrizzleError); -}); -test('[Find One] Get deep select {}', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - await expect(async () => - await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - }, - }, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find Many] Get users with posts + prepared limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - with: { - posts: { - limit: sql.placeholder('limit'), - }, - }, - }).prepare(); - - const usersWithPosts = await prepared.execute({ limit: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - limit: sql.placeholder('uLimit'), - offset: sql.placeholder('uOffset'), - with: { - posts: { - limit: sql.placeholder('pLimit'), - }, - }, - }).prepare(); - - const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(2); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const prepared = db.query.usersTable.findMany({ - where: { - id: { - eq: sql.placeholder('id'), - }, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }).prepare(); - - const usersWithPosts = await prepared.execute({ id: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - limit: sql.placeholder('uLimit'), - offset: sql.placeholder('uOffset'), - where: { - id: { - OR: [ - { - eq: sql.placeholder('id'), - }, - 3, - ], - }, - }, - with: { - posts: { - where: { - id: { - eq: sql.placeholder('pid'), - }, - }, - limit: sql.placeholder('pLimit'), - }, - }, - }).prepare(); - - const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + limit posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts no results found', async (t) => { - const { mysqlDbV2: db } = t; - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts).toBeUndefined(); -}); - -test('[Find One] Get users with posts + limit posts and users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + custom fields', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: true, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).toEqual(3); - - expect(usersWithPosts?.lowerName).toEqual('dan'); - expect(usersWithPosts?.id).toEqual(1); - expect(usersWithPosts?.verified).toEqual(false); - expect(usersWithPosts?.invitedBy).toEqual(null); - expect(usersWithPosts?.name).toEqual('Dan'); - - expect(usersWithPosts?.posts).toContainEqual({ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: usersWithPosts?.posts[0]?.createdAt, - }); - - expect(usersWithPosts?.posts).toContainEqual({ - id: 2, - ownerId: 1, - content: 'Post1.2', - createdAt: usersWithPosts?.posts[1]?.createdAt, - }); - - expect(usersWithPosts?.posts).toContainEqual({ - id: 3, - ownerId: 1, - content: 'Post1.3', - createdAt: usersWithPosts?.posts[2]?.createdAt, - }); -}); - -test('[Find One] Get users with posts + custom fields + limits', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - extras: ({ - lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).toEqual(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - lowerName: 'dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: '1' }, - { ownerId: 1, content: '2' }, - { ownerId: 1, content: '3' }, - { ownerId: 2, content: '4' }, - { ownerId: 2, content: '5' }, - { ownerId: 3, content: '6' }, - { ownerId: 3, content: '7' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - orderBy: { - content: 'desc', - }, - }, - }, - orderBy: { - id: 'desc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(2); - - expect(usersWithPosts).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ - id: 7, - ownerId: 3, - content: '7', - createdAt: usersWithPosts?.posts[1]?.createdAt, - }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + where + partial', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: false, - }, - with: { - posts: { - columns: { - id: true, - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - posts: { - id: number; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - posts: [{ id: 1 }], - }); -}); - -test('[Find One] Get users with posts + where + partial(false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - name: false, - }, - with: { - posts: { - columns: { - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - expect(usersWithInvitee[3]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user + limit with invitee', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew', invitedBy: 1 }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - invitee: true, - }, - limit: 2, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and custom fields', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - expect(usersWithInvitee[3]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and custom fields + limits', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - limit: 3, - with: { - invitee: { - extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(3); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + order by', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[3]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[3]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[0]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + where + partial', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - id: true, - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial(true+false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - id: true, - name: true, - verified: false, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - verified: false, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial(false)', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - verified: false, - }, - with: { - invitee: { - columns: { - name: false, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitedBy: number | null; - invitee: { - id: number; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitedBy: 1, - invitee: { id: 1, verified: false, invitedBy: null }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitedBy: 2, - invitee: { id: 2, verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - invitee: true, - posts: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(4); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - expect(response[3]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + limit posts and users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 3, - with: { - invitee: true, - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee and posts + limits + custom fields in each', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 3, - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_invitee_name') }), - }, - posts: { - limit: 1, - extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_content') }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - lower: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee and posts + custom fields in each', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - }, - posts: { - extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - lower: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(4); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - expect(response[3]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(2); - expect(response[1]?.posts.length).eq(2); - expect(response[2]?.posts.length).eq(2); - expect(response[3]?.posts.length).eq(0); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.1', - lower: 'post1.1', - createdAt: response[0]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { - id: 4, - ownerId: 2, - content: 'Post2.1', - lower: 'post2.1', - createdAt: response[1]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { - id: 6, - ownerId: 3, - content: 'Post3.1', - lower: 'post3.1', - createdAt: response[2]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - invitee: true, - posts: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(4); - - expect(response[3]?.invitee).toBeNull(); - expect(response[2]?.invitee).toBeNull(); - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(2); - expect(response[3]?.posts.length).eq(2); - - expect(response[3]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[3]?.posts[1]?.createdAt, - }], - }); - expect(response[2]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { - id: 3, - ownerId: 2, - content: 'Post2', - createdAt: response[2]?.posts[1]?.createdAt, - }], - }); - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ - id: 5, - ownerId: 3, - content: 'Post3', - createdAt: response[3]?.posts[1]?.createdAt, - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [2, 3], - }, - }, - with: { - invitee: true, - posts: { - where: { - ownerId: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(2); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(0); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + limit posts and users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - limit: 1, - with: { - invitee: true, - posts: { - where: { - ownerId: 3, - }, - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(1); - - expect(response[0]?.invitee).not.toBeNull(); - expect(response[0]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee and posts + orderBy + where + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - where: { - id: { - OR: [3, 4], - }, - }, - extras: ({ - lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), - }), - with: { - invitee: true, - posts: { - where: { - ownerId: 3, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lower: (postsTable) => sql`lower(${postsTable.content})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(2); - - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ - id: 5, - ownerId: 3, - content: 'Post3', - lower: 'post3', - createdAt: response[1]?.posts[0]?.createdAt, - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - where: { - id: { - OR: [3, 4], - }, - }, - extras: ({ - lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), - }), - columns: { - id: true, - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - extras: ({ - lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), - }), - }, - posts: { - columns: { - id: true, - content: true, - }, - where: { - ownerId: 3, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lower: (postsTable) => sql`lower(${postsTable.content})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - lower: string; - posts: { id: number; lower: string; content: string }[]; - invitee: { - id: number; - name: string; - lower: string; - } | null; - }[] - >(); - - expect(response.length).eq(2); - - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - invitee: { id: 1, name: 'Dan', lower: 'dan' }, - posts: [{ - id: 5, - content: 'Post3', - lower: 'post3', - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, - posts: [], - }); -}); - -test('Get user with posts and posts with comments', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, - ]); - - await db.insert(commentsTable).values([ - { postId: 1, content: 'Comment1', creator: 2 }, - { postId: 2, content: 'Comment2', creator: 2 }, - { postId: 3, content: 'Comment3', creator: 3 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - posts: { - with: { - comments: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - comments: { - id: number; - content: string; - createdAt: Date; - creator: number | null; - postId: number | null; - }[]; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response[0]?.posts[0]?.comments.length).eq(1); - expect(response[1]?.posts[0]?.comments.length).eq(1); - expect(response[2]?.posts[0]?.comments.length).eq(1); - - expect(response[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[0]?.posts[0]?.createdAt, - comments: [ - { - id: 1, - content: 'Comment1', - creator: 2, - postId: 1, - createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: response[1]?.posts[0]?.createdAt, - comments: [ - { - id: 2, - content: 'Comment2', - creator: 2, - postId: 2, - createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - // expect(response[2]).toEqual({ - // id: 3, - // name: 'Alex', - // verified: false, - // invitedBy: null, - // posts: [{ - // id: 3, - // ownerId: 3, - // content: 'Post3', - // createdAt: response[2]?.posts[0]?.createdAt, - // comments: [ - // { - // id: , - // content: 'Comment3', - // creator: 3, - // postId: 3, - // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, - // }, - // ], - // }], - // }); -}); - -test('Get user with posts and posts with comments and comments with owner', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, - ]); - - await db.insert(commentsTable).values([ - { postId: 1, content: 'Comment1', creator: 2 }, - { postId: 2, content: 'Comment2', creator: 2 }, - { postId: 3, content: 'Comment3', creator: 3 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - posts: { - with: { - comments: { - with: { - author: true, - }, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - comments: { - id: number; - content: string; - createdAt: Date; - creator: number | null; - postId: number | null; - author: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response[0]?.posts[0]?.comments.length).eq(1); - expect(response[1]?.posts[0]?.comments.length).eq(1); - expect(response[2]?.posts[0]?.comments.length).eq(1); - - expect(response[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[0]?.posts[0]?.createdAt, - comments: [ - { - id: 1, - content: 'Comment1', - creator: 2, - author: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - postId: 1, - createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: response[1]?.posts[0]?.createdAt, - comments: [ - { - id: 2, - content: 'Comment2', - creator: 2, - author: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - postId: 2, - createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); -}); - -test('Get user with posts and posts with comments and comments with owner where exists', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, - ]); - - await db.insert(commentsTable).values([ - { postId: 1, content: 'Comment1', creator: 2 }, - { postId: 2, content: 'Comment2', creator: 2 }, - { postId: 3, content: 'Comment3', creator: 3 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - posts: { - with: { - comments: { - with: { - author: true, - }, - }, - }, - }, - }, - where: { - RAW: ({ id }, { exists, eq }) => - exists(db.select({ one: sql`1` }).from(alias(usersTable, 'alias')).where(eq(sql`1`, id))), - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - comments: { - id: number; - content: string; - createdAt: Date; - creator: number | null; - postId: number | null; - author: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]; - }[]>(); - - expect(response.length).eq(1); - expect(response[0]?.posts.length).eq(1); - - expect(response[0]?.posts[0]?.comments.length).eq(1); - - expect(response[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[0]?.posts[0]?.createdAt, - comments: [ - { - id: 1, - content: 'Comment1', - creator: 2, - author: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - postId: 1, - createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); -}); - -test('[Find Many] Get users with groups', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - usersToGroups: { - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(2); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersToGroups: { - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 2, - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 2, - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 1, - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 1, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(1); - - expect(response[0]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 1, - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(1); - - expect(response[0]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 2, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(0); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(0); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [], - }); -}); - -test('[Find Many] Get users with groups + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - groupId: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(2); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response[2]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response[2]).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[1]).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - usersToGroups: { - limit: 1, - orderBy: { - groupId: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - usersToGroups: { - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - usersToGroups: { - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 1, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 2, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(0); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [], - }); -}); - -test('[Find One] Get groups with users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - groupId: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(2); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - limit: 1, - orderBy: { - groupId: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }], - }); -}); - -test('Get groups with users + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - usersToGroups: { - limit: 1, - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[] - >(); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response[1]).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('Get users with groups + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ - lower: (usersTable) => sql`lower(${usersTable.name})`.as('lower_name'), - }), - with: { - usersToGroups: { - columns: {}, - with: { - group: { - extras: ({ - lower: (groupsTable) => sql`lower(${groupsTable.name})`.as('lower_name'), - }), - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - lower: string; - }; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(2); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - }], - }); -}); - -test('Get groups with users + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - with: { - usersToGroups: { - columns: {}, - with: { - user: { - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - lower: string; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - }; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('Force optional on where on non-optional relation query', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - inviteeRequired: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - inviteeRequired: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.inviteeRequired).toBeNull(); - expect(usersWithInvitee[1]?.inviteeRequired).toBeNull(); - expect(usersWithInvitee[2]?.inviteeRequired).not.toBeNull(); - expect(usersWithInvitee[3]?.inviteeRequired).toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - inviteeRequired: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - inviteeRequired: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - inviteeRequired: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - inviteeRequired: null, - }); -}); - -test('[Find Many .through] Get users with groups', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [ - { - id: 2, - name: 'Group2', - description: null, - }, - { - id: 3, - name: 'Group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - users: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 2, - with: { - groups: { - limit: 1, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 2, - with: { - users: { - limit: 1, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 1, - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 1, - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - users: [], - }]); -}); - -test('[Find Many .through] Get users with groups + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }, { - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - users: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - groups: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find One .through] Get users with groups', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - users: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - groups: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - users: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + limit + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }); -}); - -test('[Find One .through] Get groups with users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }, { - id: 2, - name: 'Group2', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + orderBy', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - users: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }], - }); -}); - -test('[Find Many .through] Get groups with users + orderBy + limit', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - users: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[] - >(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - with: { - groups: { - orderBy: { - id: 'asc', - }, - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - groups: { - id: number; - name: string; - description: string | null; - lower: string; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - groups: [ - { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with users + custom', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - with: { - users: { - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - lower: string; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with first group', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 3, name: 'Group3' }, - { id: 2, name: 'Group2' }, - { id: 1, name: 'Group1' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 3, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 2, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - group: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - group: { - id: number; - name: string; - description: string | null; - } | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - group: null, - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - group: { - id: 2, - name: 'Group2', - description: null, - }, - }]); -}); - -test('[Find Many .through] Get groups with first user', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - user: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - user: null, - }, { - id: 2, - name: 'Group2', - description: null, - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }, { - id: 3, - name: 'Group3', - description: null, - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }]); -}); - -test('[Find Many .through] Get users with filtered groups', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groupsFiltered: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groupsFiltered: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groupsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groupsFiltered: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groupsFiltered: [ - { - id: 2, - name: 'Group2', - description: null, - }, - { - id: 3, - name: 'Group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with filtered users', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersFiltered: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersFiltered: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersFiltered: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersFiltered: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersFiltered: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with filtered groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groupsFiltered: { - where: { - id: { - lt: 3, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groupsFiltered: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groupsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groupsFiltered: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groupsFiltered: [ - { - id: 2, - name: 'Group2', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with filtered users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersFiltered: { - where: { id: { lt: 3 } }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersFiltered: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersFiltered: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersFiltered: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersFiltered: [], - }]); -}); - -test('[Find Many] Get users with filtered posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - { id: 7, ownerId: 1, content: 'Post1.3' }, - { id: 8, ownerId: 2, content: 'Post2.3' }, - { id: 9, ownerId: 3, content: 'Post3.3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - postsFiltered: { - columns: { - ownerId: true, - content: true, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - postsFiltered: { - ownerId: number | null; - content: string; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - postsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - postsFiltered: [ - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 2, content: 'Post2.2' }, - { ownerId: 2, content: 'Post2.3' }, - ], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - postsFiltered: [], - }]); -}); - -test('[Find Many] Get posts with filtered authors', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - ]); - - const posts = await db.query.postsTable.findMany({ - columns: { - id: true, - content: true, - }, - with: { - authorFiltered: { - columns: { - name: true, - id: true, - }, - }, - }, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - authorFiltered: { - id: number; - name: string; - }; - }[]>(); - - posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(posts).toStrictEqual([ - { id: 1, content: 'Post1.1', authorFiltered: null }, - { - id: 2, - content: 'Post2.1', - authorFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 3, content: 'Post3.1', authorFiltered: null }, - { id: 4, content: 'Post1.2', authorFiltered: null }, - { - id: 5, - content: 'Post2.2', - authorFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 6, content: 'Post3.2', authorFiltered: null }, - ]); -}); - -test('[Find Many] Get users with filtered posts + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - { id: 7, ownerId: 1, content: 'Post1.3' }, - { id: 8, ownerId: 2, content: 'Post2.3' }, - { id: 9, ownerId: 3, content: 'Post3.3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - postsFiltered: { - columns: { - ownerId: true, - content: true, - }, - where: { - content: { - like: '%.2', - }, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - postsFiltered: { - ownerId: number | null; - content: string; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - postsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - postsFiltered: [ - { ownerId: 2, content: 'Post2.2' }, - ], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - postsFiltered: [], - }]); -}); - -test('[Find Many] Get posts with filtered authors + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - ]); - - const posts = await db.query.postsTable.findMany({ - columns: { - id: true, - content: true, - }, - with: { - authorAltFiltered: { - columns: { - name: true, - id: true, - }, - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - authorAltFiltered: { - id: number; - name: string; - } | null; - }[]>(); - - posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(posts).toStrictEqual([ - { id: 1, content: 'Post1.1', authorAltFiltered: null }, - { - id: 2, - content: 'Post2.1', - authorAltFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 3, content: 'Post3.1', authorAltFiltered: null }, - { id: 4, content: 'Post1.2', authorAltFiltered: null }, - { id: 5, content: 'Post2.2', authorAltFiltered: null }, - { id: 6, content: 'Post3.2', authorAltFiltered: null }, - ]); -}); - -test('[Find Many] Get custom schema users with filtered posts + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaPosts).values([ - { id: 1, ownerId: 1, content: 'Message1.1' }, - { id: 2, ownerId: 2, content: 'Message2.1' }, - { id: 3, ownerId: 3, content: 'Message3.1' }, - { id: 4, ownerId: 1, content: 'Message1.2' }, - { id: 5, ownerId: 2, content: 'Message2.2' }, - { id: 6, ownerId: 3, content: 'Message3.2' }, - { id: 7, ownerId: 1, content: 'Message1.3' }, - { id: 8, ownerId: 2, content: 'Message2.3' }, - { id: 9, ownerId: 3, content: 'Message3.3' }, - { id: 10, ownerId: 1, content: 'Post1.1' }, - { id: 11, ownerId: 2, content: 'Post2.1' }, - { id: 12, ownerId: 3, content: 'Post3.1' }, - { id: 13, ownerId: 1, content: 'Post1.2' }, - { id: 14, ownerId: 2, content: 'Post2.2' }, - { id: 15, ownerId: 3, content: 'Post3.2' }, - { id: 16, ownerId: 1, content: 'Post1.3' }, - { id: 17, ownerId: 2, content: 'Post2.3' }, - { id: 18, ownerId: 3, content: 'Post3.3' }, - ]); - - const usersWithPosts = await db.query.schemaUsers.findMany({ - with: { - posts: { - columns: { - ownerId: true, - content: true, - }, - where: { - content: { - like: '%2.%', - }, - }, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'desc', - }, - where: { - id: { - gte: 2, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - ownerId: number | null; - content: string; - }[]; - }[]>(); - - expect(usersWithPosts).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [ - { ownerId: 2, content: 'Message2.1' }, - { ownerId: 2, content: 'Message2.2' }, - { ownerId: 2, content: 'Message2.3' }, - ], - }]); -}); - -test('[Find Many] Get custom schema posts with filtered authors + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaPosts).values([ - { id: 1, ownerId: 1, content: 'Message1.1' }, - { id: 2, ownerId: 2, content: 'Message2.1' }, - { id: 3, ownerId: 3, content: 'Message3.1' }, - { id: 4, ownerId: 1, content: 'Message1.2' }, - { id: 5, ownerId: 2, content: 'Message2.2' }, - { id: 6, ownerId: 3, content: 'Message3.2' }, - { id: 7, ownerId: 1, content: 'Message1.3' }, - { id: 8, ownerId: 2, content: 'Message2.3' }, - { id: 9, ownerId: 3, content: 'Message3.3' }, - { id: 10, ownerId: 1, content: 'Post1.1' }, - { id: 11, ownerId: 2, content: 'Post2.1' }, - { id: 12, ownerId: 3, content: 'Post3.1' }, - { id: 13, ownerId: 1, content: 'Post1.2' }, - { id: 14, ownerId: 2, content: 'Post2.2' }, - { id: 15, ownerId: 3, content: 'Post3.2' }, - { id: 16, ownerId: 1, content: 'Post1.3' }, - { id: 17, ownerId: 2, content: 'Post2.3' }, - { id: 18, ownerId: 3, content: 'Post3.3' }, - ]); - - const posts = await db.query.schemaPosts.findMany({ - columns: { - content: true, - }, - with: { - author: { - columns: { - name: true, - id: true, - }, - where: { - id: 2, - }, - }, - }, - orderBy: { - id: 'desc', - }, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - content: string; - author: { - id: number; - name: string; - } | null; - }[]>(); - - expect(posts).toStrictEqual([ - { content: 'Post3.3', author: null }, - { content: 'Post2.3', author: null }, - { content: 'Post1.3', author: null }, - { content: 'Post3.2', author: null }, - { content: 'Post2.2', author: null }, - { content: 'Post1.2', author: null }, - { content: 'Post3.1', author: null }, - { content: 'Post2.1', author: null }, - { content: 'Post1.1', author: null }, - { content: 'Message3.3', author: null }, - { content: 'Message2.3', author: { id: 2, name: 'Andrew' } }, - { content: 'Message1.3', author: null }, - { content: 'Message3.2', author: null }, - { content: 'Message2.2', author: { id: 2, name: 'Andrew' } }, - { content: 'Message1.2', author: null }, - { content: 'Message3.1', author: null }, - { content: 'Message2.1', author: { id: 2, name: 'Andrew' } }, - { content: 'Message1.1', author: null }, - ]); -}); - -test('[Find Many .through] Get custom schema users with filtered groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaGroups).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(schemaUsersToGroups).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.schemaUsers.findMany({ - with: { - groups: { - where: { - id: { - lt: 3, - }, - }, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [ - { - id: 2, - name: 'Group2', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get custom schema groups with filtered users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaGroups).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(schemaUsersToGroups).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.schemaGroups.findMany({ - with: { - users: { - where: { id: { lt: 3 } }, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - users: [], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - users: [], - }]); -}); - -test('[Find Many] Get view users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersView.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - id: { - lt: 3, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - createdAt: Date | null; - postContent: string | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post1', - createdAt: date1, - posts: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }]); -}); - -test('[Find Many] Get view users with posts + filter by SQL.Aliased field', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersView.findMany({ - columns: { - id: true, - name: true, - verified: true, - invitedBy: true, - counter: true, - }, - with: { - posts: true, - }, - orderBy: { - id: 'desc', - }, - where: { - counter: { - ne: '0', - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - counter: 3, - posts: [], - }]); -}); - -test('[Find Many] Get view users with posts + filter by joined field', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersView.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - postContent: 'Post2', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }]); -}); - -test('[Find Many] Get posts with view users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.postsTable.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many] Get posts with view users + filter with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.postsTable.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - id: { - ne: 2, - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many] Get posts with view users + filter by joined column with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.postsTable.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - postContent: { - notLike: '%2', - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); -test('[Find Many] Get posts with view users + filter by SQL.Aliased with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.postsTable.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - counter: { - ne: '0', - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many .through] Get view users with filtered groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersView.findMany({ - with: { - groups: { - where: { - id: { - lt: 3, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: 3, - groups: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: 3, - groups: [ - { - id: 2, - name: 'Group2', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with filtered view users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersView: { - columns: { - createdAt: false, - postContent: false, - }, - where: { id: { lt: 3 } }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersView: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersView.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersView: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersView: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersView: [], - }]); -}); - -test('[Find Many] Get schema view users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.schemaUsersView.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - id: { - lt: 3, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - createdAt: Date | null; - postContent: string | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post1', - createdAt: date1, - posts: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }]); -}); - -test('[Find Many] Get schema view users with posts + filter by SQL.Aliased field', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.schemaUsersView.findMany({ - columns: { - id: true, - name: true, - verified: true, - invitedBy: true, - counter: true, - }, - with: { - posts: true, - }, - orderBy: { - id: 'desc', - }, - where: { - counter: { - ne: '0', - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - counter: 3, - posts: [], - }]); -}); - -test('[Find Many] Get schema view users with posts + filter by joined field', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.schemaUsersView.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - postContent: 'Post2', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }]); -}); - -test('[Find Many] Get schema posts with view users with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.schemaPosts.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - postContent: 'Post2', - createdAt: date2, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many] Get schema posts with view users + filter with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.schemaPosts.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - id: { - ne: 2, - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many] Get schema posts with view users + filter by joined column with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.schemaPosts.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - postContent: { - notLike: '%2', - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); -test('[Find Many] Get schema posts with view users + filter by SQL.Aliased with posts', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(50000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(schemaPosts).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const result = await db.query.schemaPosts.findMany({ - with: { - viewAuthor: { - with: { - posts: true, - }, - where: { - counter: { - ne: '0', - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - viewAuthor: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | null; - }[]>(); - - expect(result).toEqual([ - { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: date1, - viewAuthor: null, - }, - { - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: date2, - viewAuthor: null, - }, - { - id: 3, - ownerId: 3, - content: 'Post3', - createdAt: date3, - viewAuthor: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - counter: 3, - postContent: 'Post3', - createdAt: date3, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: date3 }], - }, - }, - ]); -}); - -test('[Find Many .through] Get schema view users with filtered groups + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaGroups).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(schemaUsersToGroups).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.schemaUsersView.findMany({ - with: { - groups: { - where: { - id: { - lt: 3, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - createdAt: Date | null; - postContent: string | null; - counter: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: 3, - groups: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - createdAt: null, - postContent: null, - counter: 3, - groups: [ - { - id: 2, - name: 'Group2', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get schema groups with filtered view users + where', async (t) => { - const { mysqlDbV2: db } = t; - - await db.insert(schemaUsers).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(schemaGroups).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(schemaUsersToGroups).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.schemaGroups.findMany({ - with: { - usersView: { - columns: { - createdAt: false, - postContent: false, - }, - where: { id: { lt: 3 } }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersView: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - counter: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersView.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersView: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersView: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - counter: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersView: [], - }]); -}); - -test('[Find Many] Get users + filter users by posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(5000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - orderBy: { - id: 'asc', - }, - where: { - posts: { - content: { - like: '%2', - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many] Get users with posts + filter users by posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(5000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1', createdAt: date1 }, - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - posts: { - content: { - like: '%2', - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: date2 }], - }]); -}); - -test('[Find Many] Get users filtered by existing posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - orderBy: { - id: 'asc', - }, - where: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many] Get users with posts + filter users by existing posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 2, content: 'Post2', createdAt: date2 }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 2, ownerId: 3, content: 'Post3', createdAt: date3 }], - }]); -}); - -test('[Find Many] Get users filtered by nonexisting posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - orderBy: { - id: 'asc', - }, - where: { - posts: false, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many] Get users with posts + filter users by existing posts', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 2, content: 'Post2', createdAt: date2 }, - { ownerId: 3, content: 'Post3', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - orderBy: { - id: 'asc', - }, - where: { - posts: false, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [], - }]); -}); - -test('[Find Many] Get users with posts + filter posts by author', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(5000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - where: { - author: { - id: 2, - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([ - { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [], - }, - { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 3, - ownerId: 2, - content: 'Post2U.1', - createdAt: date2, - }, { - id: 4, - ownerId: 2, - content: 'Post2U.2', - createdAt: date2, - }], - }, - { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [], - }, - ]); -}); - -test('[Find Many] Get users filtered by own columns and posts with filtered posts by own columns and author', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(5000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - where: { - content: { - like: '%2', - }, - author: { - id: 2, - }, - }, - }, - }, - where: { - id: { - gt: 1, - }, - posts: { - content: { - like: 'M%', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts).toEqual([ - { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 6, - ownerId: 2, - content: 'Post2U.2', - createdAt: date2, - }, { - id: 8, - ownerId: 2, - content: 'MessageU.2', - createdAt: date2, - }], - }, - ]); -}); - -test('[Find Many .through] Get users filtered by groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - groups: { - name: 'Group2', - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many .through] Get users filtered by existing groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many .through] Get users with existing groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: true, - }, - where: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }, { - id: 3, - name: 'Group3', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get users filtered by nonexisting groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - groups: false, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }]); -}); - -test('[Find Many .through] Get users with nonexisting groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: true, - }, - where: { - groups: false, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }]); -}); - -test('[Find Many .through] Get users filtered by groups with groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: { - orderBy: { - id: 'asc', - }, - }, - }, - where: { - groups: { - name: 'Group2', - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get users filtered by groups with groups filtered by users', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: { - orderBy: { - id: 'asc', - }, - where: { - users: { - id: 1, - }, - }, - }, - }, - where: { - groups: { - name: 'Group3', - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get users filtered by users of groups with groups', async (ctx) => { - const { mysqlDbV2: db } = ctx; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 1 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: { - orderBy: { - id: 'asc', - }, - }, - }, - where: { - groups: { - users: { - id: 3, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([ - { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }, - { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }, { - id: 3, - name: 'Group3', - description: null, - }], - }, - ]); -}); - -test('[Find Many] Shortcut form placeholders in filters - eq', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const query = await db.query.postsTable.findMany({ - where: { - ownerId: sql.placeholder('id'), - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const posts = await query.execute({ - id: 1, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]>(); - - expect(posts).toEqual([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - ]); -}); - -test('[Find Many] Shortcut form placeholders in filters - or', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const query = await db.query.postsTable.findMany({ - where: { - OR: [{ - ownerId: sql.placeholder('id1'), - }, { - ownerId: sql.placeholder('id2'), - }], - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const posts = await query.execute({ - id1: 1, - id2: 2, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]>(); - - expect(posts).toEqual([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - ]); -}); - -test('[Find Many] Shortcut form placeholders in filters - column or', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const query = await db.query.postsTable.findMany({ - where: { - ownerId: { - OR: [sql.placeholder('id1'), sql.placeholder('id2')], - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const posts = await query.execute({ - id1: 1, - id2: 2, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]>(); - - expect(posts).toEqual([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - ]); -}); - -test('[Find Many] Shortcut form placeholders in filters - column not', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { id: 9, ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { id: 10, ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const query = await db.query.postsTable.findMany({ - where: { - ownerId: { - NOT: sql.placeholder('id'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const posts = await query.execute({ - id: 3, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]>(); - - expect(posts).toEqual([ - { id: 1, ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { id: 2, ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { id: 3, ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { id: 4, ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { id: 5, ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { id: 6, ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { id: 7, ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { id: 8, ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - ]); -}); - -test('[Find Many] Get users filtered by posts with AND', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, - { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const users = await db.query.usersTable.findMany({ - where: { - AND: [{ - posts: { - content: { - like: 'M%', - }, - }, - }, { - posts: { - ownerId: { - ne: 2, - }, - }, - }], - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(users).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(users).toEqual([ - { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - ]); -}); - -test('[Find Many] Get users filtered by posts with OR', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, - { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const users = await db.query.usersTable.findMany({ - where: { - OR: [{ - posts: { - content: { - like: 'M%', - }, - }, - }, { - posts: { - ownerId: { - eq: 3, - }, - }, - }], - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(users).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(users).toEqual([ - { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - ]); -}); - -test('[Find Many] Get users filtered by posts with NOT', async () => { - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - const date1 = new Date(45000); - const date2 = new Date(1000); - const date3 = new Date(10000); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Post1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.1', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.2', createdAt: date1 }, - { ownerId: 1, content: 'Message1U.3', createdAt: date1 }, - { ownerId: 2, content: 'Post2U.1', createdAt: date2 }, - { ownerId: 2, content: 'Post2U.2', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.1', createdAt: date2 }, - { ownerId: 2, content: 'MessageU.2', createdAt: date2 }, - { ownerId: 3, content: 'Post3U.1', createdAt: date3 }, - { ownerId: 3, content: 'Post3U.2', createdAt: date3 }, - ]); - - const users = await db.query.usersTable.findMany({ - where: { - NOT: { - posts: { - content: { - like: 'M%', - }, - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(users).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]>(); - - expect(users).toEqual([ - { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - ]); -}); - -test('[Find Many .through] Through with uneven relation column count', async () => { - await db.insert(students).values([{ - studentId: 1, - name: 'First', - }, { - studentId: 2, - name: 'Second', - }, { - studentId: 3, - name: 'Third', - }, { - studentId: 4, - name: 'Fourth', - }]); - - await db.insert(studentGrades).values([ - { - studentId: 1, - courseId: 1, - semester: 's1', - grade: '44', - }, - { - studentId: 1, - courseId: 2, - semester: 's2', - grade: '35', - }, - { - studentId: 2, - courseId: 1, - semester: 's1', - grade: '58', - }, - { - studentId: 2, - courseId: 3, - semester: 's2', - grade: '72', - }, - { - studentId: 3, - courseId: 4, - semester: 's4', - grade: '99', - }, - { - studentId: 3, - courseId: 2, - semester: 's3', - grade: '85', - }, - { - studentId: 3, - courseId: 1, - semester: 's2', - grade: '48', - }, - { - studentId: 4, - courseId: 3, - semester: 's1', - grade: '63', - }, - { - studentId: 4, - courseId: 4, - semester: 's3', - grade: '51', - }, - ]); - - await db.insert(courseOfferings).values([{ - courseId: 1, - semester: 's3', - }, { - courseId: 2, - semester: 's4', - }, { - courseId: 4, - semester: 's1', - }, { - courseId: 4, - semester: 's3', - }, { - courseId: 1, - semester: 's1', - }, { - courseId: 1, - semester: 's2', - }, { - courseId: 2, - semester: 's1', - }, { - courseId: 2, - semester: 's2', - }, { - courseId: 2, - semester: 's3', - }, { - courseId: 3, - semester: 's3', - }, { - courseId: 3, - semester: 's4', - }, { - courseId: 4, - semester: 's4', - }, { - courseId: 3, - semester: 's1', - }]); - - const res = await db.query.students.findMany({ - with: { - courseOfferings: { - orderBy: { - courseId: 'asc', - semester: 'asc', - }, - }, - }, - orderBy: { - studentId: 'asc', - }, - }); - - expectTypeOf(res).toEqualTypeOf<{ - studentId: number; - name: string; - courseOfferings: { - courseId: number; - semester: string; - }[]; - }[]>(); - - expect(res).toStrictEqual([ - { - name: 'First', - studentId: 1, - courseOfferings: [ - { - courseId: 1, - semester: 's1', - }, - { - courseId: 2, - semester: 's2', - }, - ], - }, - { - name: 'Second', - studentId: 2, - courseOfferings: [ - { - courseId: 1, - semester: 's1', - }, - ], - }, - { - name: 'Third', - studentId: 3, - courseOfferings: [ - { - courseId: 1, - semester: 's2', - }, - { - courseId: 2, - semester: 's3', - }, - { - courseId: 4, - semester: 's4', - }, - ], - }, - { - name: 'Fourth', - studentId: 4, - courseOfferings: [ - { - courseId: 3, - semester: 's1', - }, - { - courseId: 4, - semester: 's3', - }, - ], - }, - ]); -}); - -test('[Find Many .through] Through with uneven relation column count - reverse', async () => { - await db.insert(students).values([{ - studentId: 1, - name: 'First', - }, { - studentId: 2, - name: 'Second', - }, { - studentId: 3, - name: 'Third', - }, { - studentId: 4, - name: 'Fourth', - }]); - - await db.insert(studentGrades).values([ - { - studentId: 1, - courseId: 1, - semester: 's1', - grade: '44', - }, - { - studentId: 1, - courseId: 2, - semester: 's2', - grade: '35', - }, - { - studentId: 2, - courseId: 1, - semester: 's1', - grade: '58', - }, - { - studentId: 2, - courseId: 3, - semester: 's2', - grade: '72', - }, - { - studentId: 3, - courseId: 4, - semester: 's4', - grade: '99', - }, - { - studentId: 3, - courseId: 2, - semester: 's3', - grade: '85', - }, - { - studentId: 3, - courseId: 1, - semester: 's2', - grade: '48', - }, - { - studentId: 4, - courseId: 3, - semester: 's1', - grade: '63', - }, - { - studentId: 4, - courseId: 4, - semester: 's3', - grade: '51', - }, - ]); - - await db.insert(courseOfferings).values([{ - courseId: 1, - semester: 's3', - }, { - courseId: 2, - semester: 's4', - }, { - courseId: 4, - semester: 's1', - }, { - courseId: 4, - semester: 's3', - }, { - courseId: 1, - semester: 's1', - }, { - courseId: 1, - semester: 's2', - }, { - courseId: 2, - semester: 's1', - }, { - courseId: 2, - semester: 's2', - }, { - courseId: 2, - semester: 's3', - }, { - courseId: 3, - semester: 's3', - }, { - courseId: 3, - semester: 's4', - }, { - courseId: 4, - semester: 's4', - }, { - courseId: 3, - semester: 's1', - }]); - - const res = await db.query.courseOfferings.findMany({ - with: { - students: { - orderBy: { - studentId: 'asc', - }, - }, - }, - orderBy: { - courseId: 'asc', - semester: 'asc', - }, - }); - - expectTypeOf(res).toEqualTypeOf<{ - courseId: number; - semester: string; - students: { - studentId: number; - name: string; - }[]; - }[]>(); - - expect(res).toStrictEqual([ - { - courseId: 1, - semester: 's1', - students: [ - { - name: 'First', - studentId: 1, - }, - { - name: 'Second', - studentId: 2, - }, - ], - }, - { - courseId: 1, - semester: 's2', - students: [ - { - name: 'Third', - studentId: 3, - }, - ], - }, - { - courseId: 1, - semester: 's3', - students: [], - }, - { - courseId: 2, - semester: 's1', - students: [], - }, - { - courseId: 2, - semester: 's2', - students: [ - { - name: 'First', - studentId: 1, - }, - ], - }, - { - courseId: 2, - semester: 's3', - students: [ - { - name: 'Third', - studentId: 3, - }, - ], - }, - { - courseId: 2, - semester: 's4', - students: [], - }, - { - courseId: 3, - semester: 's1', - students: [ - { - name: 'Fourth', - studentId: 4, - }, - ], - }, - { - courseId: 3, - semester: 's3', - students: [], - }, - { - courseId: 3, - semester: 's4', - students: [], - }, - { - courseId: 4, - semester: 's1', - students: [], - }, - { - courseId: 4, - semester: 's3', - students: [ - { - name: 'Fourth', - studentId: 4, - }, - ], - }, - { - courseId: 4, - semester: 's4', - students: [ - { - name: 'Third', - studentId: 3, - }, - ], - }, - ]); -}); - -test('alltypes', async () => { - await db.execute(sql` - CREATE TABLE \`all_types\` ( - \`serial\` serial AUTO_INCREMENT, - \`bigint53\` bigint, - \`bigint64\` bigint, - \`bigint_string\` bigint, - \`binary\` binary, - \`boolean\` boolean, - \`char\` char, - \`date\` date, - \`date_str\` date, - \`datetime\` datetime, - \`datetime_str\` datetime, - \`decimal\` decimal, - \`decimal_num\` decimal(30), - \`decimal_big\` decimal(30), - \`double\` double, - \`float\` float, - \`int\` int, - \`json\` json, - \`med_int\` mediumint, - \`small_int\` smallint, - \`real\` real, - \`text\` text, - \`time\` time, - \`timestamp\` timestamp, - \`timestamp_str\` timestamp, - \`tiny_int\` tinyint, - \`varbin\` varbinary(16), - \`varchar\` varchar(255), - \`year\` year, - \`enum\` enum('enV1','enV2') - ); - `); - - await db.insert(usersTable).values({ - id: 1, - name: 'First', - }); - - await db.insert(allTypesTable).values({ - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date(1741743161623), - dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - datetime: new Date(1741743161623), - datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - enum: 'enV1', - float: 1.048596, - real: 1.048596, - text: 'C4-', - int: 621, - json: { - str: 'strval', - arr: ['str', 10], - }, - medInt: 560, - smallInt: 14, - time: '04:13:22', - timestamp: new Date(1741743161623), - timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - }); - - const rawRes = await db.select().from(allTypesTable); - const relationRootRes = await db.query.allTypesTable.findMany(); - const { alltypes: nestedRelationRes } = (await db.query.usersTable.findFirst({ - with: { - alltypes: true, - }, - }))!; - - expectTypeOf(relationRootRes).toEqualTypeOf(rawRes); - expectTypeOf(nestedRelationRes).toEqualTypeOf(rawRes); - - expect(nestedRelationRes).toStrictEqual(rawRes); - expect(relationRootRes).toStrictEqual(rawRes); - - const expectedRes = [ - { - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date('2025-03-12T00:00:00.000Z'), - dateStr: '2025-03-12', - datetime: new Date('2025-03-12T01:32:42.000Z'), - datetimeStr: '2025-03-12 01:32:41', - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - float: 1.0486, - int: 621, - json: { arr: ['str', 10], str: 'strval' }, - medInt: 560, - smallInt: 14, - real: 1.048596, - text: 'C4-', - time: '04:13:22', - timestamp: new Date('2025-03-12T01:32:42.000Z'), - timestampStr: '2025-03-12 01:32:41', - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - enum: 'enV1', - }, - ]; - - expect(rawRes).toStrictEqual(expectedRes); -}); - -test('custom types', async () => { - await db.execute(sql` - CREATE TABLE \`custom_types\` ( - \`id\` int, - \`big\` bigint, - \`bytes\` blob, - \`time\` timestamp, - \`int\` int - ); - `); - - await db.insert(customTypesTable).values({ - id: 1, - big: 5044565289845416380n, - bytes: Buffer.from('BYTES'), - time: new Date(1741743161000), - int: 250, - }); - - const rawRes = await db.select().from(customTypesTable); - const relationRootRes = await db.query.customTypesTable.findMany(); - const { self: nestedRelationRes } = (await db.query.customTypesTable.findFirst({ - with: { - self: true, - }, - }))!; - - type ExpectedType = { - id: number | null; - big: bigint | null; - bytes: Buffer | null; - time: Date | null; - int: number | null; - }[]; - - expectTypeOf().toEqualTypeOf(rawRes); - expectTypeOf(relationRootRes).toEqualTypeOf(rawRes); - expectTypeOf(nestedRelationRes).toEqualTypeOf(rawRes); - - expect(nestedRelationRes).toStrictEqual(rawRes); - expect(relationRootRes).toStrictEqual(rawRes); - - const expectedRes: ExpectedType = [ - { - id: 1, - big: 5044565289845416380n, - bytes: Buffer.from('BYTES'), - time: new Date(1741743161000), - int: 250, - }, - ]; - - expect(rawRes).toStrictEqual(expectedRes); -}); - -test('.toSQL()', () => { - const query = db.query.usersTable.findFirst().toSQL(); - - expect(query).toHaveProperty('sql', expect.any(String)); - expect(query).toHaveProperty('params', expect.any(Array)); -}); diff --git a/integration-tests/tests/relational/vercel.test.ts b/integration-tests/tests/relational/vercel.test.ts deleted file mode 100644 index 19ba1317ce..0000000000 --- a/integration-tests/tests/relational/vercel.test.ts +++ /dev/null @@ -1,8879 +0,0 @@ -import 'dotenv/config'; -import { createClient, type VercelClient } from '@vercel/postgres'; -import Docker from 'dockerode'; -import { DrizzleError, sql, TransactionRollbackError } from 'drizzle-orm'; -import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; -import getPort from 'get-port'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './pg.relations.ts'; -import { commentsTable, groupsTable, postsTable, usersTable, usersToGroupsTable } from './pg.schema.ts'; - -const ENABLE_LOGGING = false; - -declare module 'vitest' { - export interface TestContext { - docker: Docker; - vpgContainer: Docker.Container; - vpgDbV2: VercelPgDatabase; - vpgClient: VercelClient; - } -} - -let globalDocker: Docker; -let pgContainer: Docker.Container; -let db: VercelPgDatabase; -let client: VercelClient; - -async function createDockerDB(): Promise { - const docker = (globalDocker = new Docker()); - const port = await getPort({ port: 5432 }); - const image = 'postgres:14'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) - ); - - pgContainer = await docker.createContainer({ - Image: image, - Env: [ - 'POSTGRES_PASSWORD=postgres', - 'POSTGRES_USER=postgres', - 'POSTGRES_DB=postgres', - ], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '5432/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await pgContainer.start(); - - return `postgres://postgres:postgres@localhost:${port}/postgres`; -} - -beforeAll(async () => { - const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); - - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - do { - try { - client = createClient({ connectionString }); - await client.connect(); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to Postgres'); - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); - throw lastError; - } - db = drizzle(client, { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); -}); - -afterAll(async () => { - await client?.end().catch(console.error); - await pgContainer?.stop().catch(console.error); -}); - -beforeEach(async (ctx) => { - ctx.vpgDbV2 = db; - ctx.vpgClient = client; - ctx.docker = globalDocker; - ctx.vpgContainer = pgContainer; - - await ctx.vpgDbV2.execute(sql`drop schema public cascade`); - await ctx.vpgDbV2.execute(sql`create schema public`); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE "users" ( - "id" serial PRIMARY KEY NOT NULL, - "name" text NOT NULL, - "verified" boolean DEFAULT false NOT NULL, - "invited_by" int REFERENCES "users"("id") - ); - `, - ); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE IF NOT EXISTS "groups" ( - "id" serial PRIMARY KEY NOT NULL, - "name" text NOT NULL, - "description" text - ); - `, - ); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE IF NOT EXISTS "users_to_groups" ( - "id" serial PRIMARY KEY NOT NULL, - "user_id" int REFERENCES "users"("id"), - "group_id" int REFERENCES "groups"("id") - ); - `, - ); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE IF NOT EXISTS "posts" ( - "id" serial PRIMARY KEY NOT NULL, - "content" text NOT NULL, - "owner_id" int REFERENCES "users"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL - ); - `, - ); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE IF NOT EXISTS "comments" ( - "id" serial PRIMARY KEY NOT NULL, - "content" text NOT NULL, - "creator" int REFERENCES "users"("id"), - "post_id" int REFERENCES "posts"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL - ); - `, - ); - await ctx.vpgDbV2.execute( - sql` - CREATE TABLE IF NOT EXISTS "comment_likes" ( - "id" serial PRIMARY KEY NOT NULL, - "creator" int REFERENCES "users"("id"), - "comment_id" int REFERENCES "comments"("id"), - "created_at" timestamp with time zone DEFAULT now() NOT NULL - ); - `, - ); -}); - -test('[Find Many] Get users with posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + limit posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + limit posts and users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - limit: 2, - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).eq(2); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + custom fields', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: true, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts.length).toEqual(3); - expect(usersWithPosts[0]?.posts.length).toEqual(3); - expect(usersWithPosts[1]?.posts.length).toEqual(2); - expect(usersWithPosts[2]?.posts.length).toEqual(2); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - lowerName: 'dan', - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.2', - createdAt: usersWithPosts[0]?.posts[1]?.createdAt, - }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - lowerName: 'andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { - id: 5, - ownerId: 2, - content: 'Post2.1', - createdAt: usersWithPosts[1]?.posts[1]?.createdAt, - }], - }); - expect(usersWithPosts[2]).toEqual({ - id: 3, - name: 'Alex', - lowerName: 'alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { - id: 7, - ownerId: 3, - content: 'Post3.1', - createdAt: usersWithPosts[2]?.posts[1]?.createdAt, - }], - }); -}); - -test('[Find Many] Get users with posts + custom fields + limits', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - limit: 1, - with: { - posts: { - limit: 1, - }, - }, - extras: ({ - lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - lowerName: 'dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: '1' }, - { ownerId: 1, content: '2' }, - { ownerId: 1, content: '3' }, - { ownerId: 2, content: '4' }, - { ownerId: 2, content: '5' }, - { ownerId: 3, content: '6' }, - { ownerId: 3, content: '7' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - posts: { - orderBy: { - content: 'desc', - }, - }, - }, - orderBy: { - id: 'desc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(2); - expect(usersWithPosts[1]?.posts.length).eq(2); - expect(usersWithPosts[2]?.posts.length).eq(3); - - expect(usersWithPosts[2]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { - id: 2, - ownerId: 1, - content: '2', - createdAt: usersWithPosts[2]?.posts[1]?.createdAt, - }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 5, - ownerId: 2, - content: '5', - createdAt: usersWithPosts[1]?.posts[1]?.createdAt, - }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ - id: 7, - ownerId: 3, - content: '7', - createdAt: usersWithPosts[0]?.posts[1]?.createdAt, - }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + where + partial', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - id: true, - name: false, - }, - with: { - posts: { - columns: { - id: true, - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - posts: { - id: number; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - posts: [{ id: 1 }], - }); -}); - -test('[Find Many] Get users with posts + where + partial(false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: { - name: false, - }, - with: { - posts: { - columns: { - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts in transaction', async (t) => { - const { vpgDbV2: db } = t; - - let usersWithPosts: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[] = []; - - await db.transaction(async (tx) => { - await tx.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await tx.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - usersWithPosts = await tx.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { - const { vpgDbV2: db } = t; - - let usersWithPosts: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[] = []; - - await expect(db.transaction(async (tx) => { - await tx.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await tx.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - tx.rollback(); - - usersWithPosts = await tx.query.usersTable.findMany({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - })).rejects.toThrowError(new TransactionRollbackError()); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(0); -}); - -test('[Find Many] Get only custom fields', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(3); - expect(usersWithPosts[0]?.posts.length).toEqual(3); - expect(usersWithPosts[1]?.posts.length).toEqual(2); - expect(usersWithPosts[2]?.posts.length).toEqual(2); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1' }, { - lowerName: 'post1.2', - }, { lowerName: 'post1.3' }], - }); - expect(usersWithPosts).toContainEqual({ - lowerName: 'andrew', - posts: [{ lowerName: 'post2' }, { - lowerName: 'post2.1', - }], - }); - expect(usersWithPosts).toContainEqual({ - lowerName: 'alex', - posts: [{ lowerName: 'post3' }, { - lowerName: 'post3.1', - }], - }); -}); - -test('[Find Many] Get only custom fields + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(2); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], - }); -}); - -test('[Find Many] Get only custom fields + where + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - limit: 1, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(1); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }], - }); -}); - -test('[Find Many] Get only custom fields + where + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - lowerName: string; - posts: { - lowerName: string; - }[]; - }[]>(); - - expect(usersWithPosts.length).toEqual(1); - expect(usersWithPosts[0]?.posts.length).toEqual(2); - - expect(usersWithPosts).toContainEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], - }); -}); - -test('[Find One] Get only custom fields', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(3); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1' }, { - lowerName: 'post1.2', - }, { lowerName: 'post1.3' }], - }); -}); - -test('[Find One] Get only custom fields + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(2); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], - }); -}); - -test('[Find One] Get only custom fields + where + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - limit: 1, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(1); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.2' }], - }); -}); - -test('[Find One] Get only custom fields + where + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - where: { - id: { - gte: 2, - }, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lowerName: ({ content }) => sql`lower(${content})`.as('content_lower'), - }), - }, - }, - where: { - id: 1, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - lowerName: string; - posts: { - lowerName: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts?.posts.length).toEqual(2); - - expect(usersWithPosts).toEqual({ - lowerName: 'dan', - posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], - }); -}); - -test('[Find Many] Get select {}', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await expect(async () => - await db.query.usersTable.findMany({ - columns: {}, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find One] Get select {}', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await expect(async () => - await db.query.usersTable.findFirst({ - columns: {}, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find Many] Get deep select {}', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - await expect(async () => - await db.query.usersTable.findMany({ - columns: {}, - with: { - posts: { - columns: {}, - }, - }, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find One] Get deep select {}', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - await expect(async () => - await db.query.usersTable.findFirst({ - columns: {}, - with: { - posts: { - columns: {}, - }, - }, - }) - ).rejects.toThrow(DrizzleError); -}); - -test('[Find Many] Get users with posts + prepared limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - with: { - posts: { - limit: sql.placeholder('limit'), - }, - }, - }).prepare('query1'); - - const usersWithPosts = await prepared.execute({ limit: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(3); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - expect(usersWithPosts[2]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - limit: sql.placeholder('uLimit'), - offset: sql.placeholder('uOffset'), - with: { - posts: { - limit: sql.placeholder('pLimit'), - }, - }, - }).prepare('query2'); - - const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(2); - expect(usersWithPosts[0]?.posts.length).eq(1); - expect(usersWithPosts[1]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const prepared = db.query.usersTable.findMany({ - where: { - id: { - eq: sql.placeholder('id'), - }, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }).prepare('query3'); - - const usersWithPosts = await prepared.execute({ id: 1 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const prepared = db.query.usersTable.findMany({ - limit: sql.placeholder('uLimit'), - offset: sql.placeholder('uOffset'), - where: { - id: { - OR: [{ eq: sql.placeholder('id') }, 3], - }, - }, - with: { - posts: { - where: { - id: { - eq: sql.placeholder('pid'), - }, - }, - limit: sql.placeholder('pLimit'), - }, - }, - }).prepare('query4'); - - const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - }[]>(); - - expect(usersWithPosts.length).eq(1); - expect(usersWithPosts[0]?.posts.length).eq(1); - - expect(usersWithPosts).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: true, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + limit posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts no results found', async (t) => { - const { vpgDbV2: db } = t; - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts).toBeUndefined(); -}); - -test('[Find One] Get users with posts + limit posts and users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + custom fields', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: true, - }, - extras: ({ - lowerName: ({ name }) => sql`lower(${name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).toEqual(3); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - lowerName: 'dan', - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.2', - createdAt: usersWithPosts?.posts[1]?.createdAt, - }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + custom fields + limits', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.2' }, - { ownerId: 1, content: 'Post1.3' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - limit: 1, - }, - }, - extras: ({ - lowerName: (usersTable, { sql }) => sql`lower(${usersTable.name})`.as('name_lower'), - }), - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lowerName: string; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).toEqual(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - lowerName: 'dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: '1' }, - { ownerId: 1, content: '2' }, - { ownerId: 1, content: '3' }, - { ownerId: 2, content: '4' }, - { ownerId: 2, content: '5' }, - { ownerId: 3, content: '6' }, - { ownerId: 3, content: '7' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - with: { - posts: { - orderBy: { - content: 'desc', - }, - }, - }, - orderBy: { - id: 'desc', - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(2); - - expect(usersWithPosts).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - posts: [{ - id: 7, - ownerId: 3, - content: '7', - createdAt: usersWithPosts?.posts[1]?.createdAt, - }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - where: { - id: 1, - }, - with: { - posts: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('[Find One] Get users with posts + where + partial', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: true, - }, - with: { - posts: { - columns: { - id: true, - content: true, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - name: string; - posts: { - id: number; - content: string; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - name: 'Dan', - posts: [{ id: 1, content: 'Post1' }], - }); -}); - -test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - id: true, - name: false, - }, - with: { - posts: { - columns: { - id: true, - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - posts: { - id: number; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - posts: [{ id: 1 }], - }); -}); - -test('[Find One] Get users with posts + where + partial(false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findFirst({ - columns: { - name: false, - }, - with: { - posts: { - columns: { - content: false, - }, - where: { - id: 1, - }, - }, - }, - where: { - id: 1, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf< - { - id: number; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - ownerId: number | null; - createdAt: Date; - }[]; - } | undefined - >(); - - expect(usersWithPosts!.posts.length).eq(1); - - expect(usersWithPosts).toEqual({ - id: 1, - verified: false, - invitedBy: null, - posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - expect(usersWithInvitee[3]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user + limit with invitee', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew', invitedBy: 1 }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - invitee: true, - }, - limit: 2, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and custom fields', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - expect(usersWithInvitee[3]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and custom fields + limits', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - limit: 3, - with: { - invitee: { - extras: ({ lower: (invitee, { sql }) => sql`lower(${invitee.name})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(3); - expect(usersWithInvitee[0]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + order by', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[3]?.invitee).toBeNull(); - expect(usersWithInvitee[2]?.invitee).toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - - expect(usersWithInvitee[3]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[0]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - with: { - invitee: true, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee + where + partial', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - id: true, - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial(true+false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - id: true, - name: true, - verified: false, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - verified: false, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitee: { - id: number; - name: string; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitee: { id: 1, name: 'Dan' }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitee: { id: 2, name: 'Andrew' }, - }); -}); - -test('Get user with invitee + where + partial(false)', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - columns: { - verified: false, - }, - with: { - invitee: { - columns: { - name: false, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - invitedBy: number | null; - invitee: { - id: number; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(usersWithInvitee.length).eq(2); - expect(usersWithInvitee[0]?.invitee).not.toBeNull(); - expect(usersWithInvitee[1]?.invitee).not.toBeNull(); - - expect(usersWithInvitee).toContainEqual({ - id: 3, - name: 'Alex', - invitedBy: 1, - invitee: { id: 1, verified: false, invitedBy: null }, - }); - expect(usersWithInvitee).toContainEqual({ - id: 4, - name: 'John', - invitedBy: 2, - invitee: { id: 2, verified: false, invitedBy: null }, - }); -}); - -test('Get user with invitee and posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - invitee: true, - posts: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(4); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - expect(response[3]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + limit posts and users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 3, - with: { - invitee: true, - posts: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee and posts + limits + custom fields in each', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 3, - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_invitee_name') }), - }, - posts: { - limit: 1, - extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_content') }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - lower: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], - }); -}); - -test('Get user with invitee and posts + custom fields in each', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - with: { - invitee: { - extras: ({ lower: (users, { sql }) => sql`lower(${users.name})`.as('lower_name') }), - }, - posts: { - extras: ({ lower: (posts, { sql }) => sql`lower(${posts.content})`.as('lower_name') }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - lower: string; - invitedBy: number | null; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - lower: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(4); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).toBeNull(); - expect(response[2]?.invitee).not.toBeNull(); - expect(response[3]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(2); - expect(response[1]?.posts.length).eq(2); - expect(response[2]?.posts.length).eq(2); - expect(response[3]?.posts.length).eq(0); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { - id: 2, - ownerId: 1, - content: 'Post1.1', - lower: 'post1.1', - createdAt: response[0]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { - id: 4, - ownerId: 2, - content: 'Post2.1', - lower: 'post2.1', - createdAt: response[1]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { - id: 6, - ownerId: 3, - content: 'Post3.1', - lower: 'post3.1', - createdAt: response[2]?.posts[1]?.createdAt, - }], - }); - expect(response).toContainEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - invitee: true, - posts: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(4); - - expect(response[3]?.invitee).toBeNull(); - expect(response[2]?.invitee).toBeNull(); - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(2); - expect(response[3]?.posts.length).eq(2); - - expect(response[3]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[3]?.posts[1]?.createdAt, - }], - }); - expect(response[2]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { - id: 3, - ownerId: 2, - content: 'Post2', - createdAt: response[2]?.posts[1]?.createdAt, - }], - }); - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ - id: 5, - ownerId: 3, - content: 'Post3', - createdAt: response[3]?.posts[1]?.createdAt, - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [2, 3], - }, - }, - with: { - invitee: true, - posts: { - where: { - ownerId: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(2); - - expect(response[0]?.invitee).toBeNull(); - expect(response[1]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(0); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - invitee: null, - posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], - }); - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + limit posts and users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - { ownerId: 3, content: 'Post3.1' }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [3, 4], - }, - }, - limit: 1, - with: { - invitee: true, - posts: { - where: { - ownerId: 3, - }, - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(1); - - expect(response[0]?.invitee).not.toBeNull(); - expect(response[0]?.posts.length).eq(1); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], - }); -}); - -// DB doesn't support `lower()` -test.skip('Get user with invitee and posts + orderBy + where + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - where: { - id: { - OR: [3, 4], - }, - }, - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - with: { - invitee: true, - posts: { - where: { - ownerId: 3, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lower: (content) => sql`lower(${content})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; - invitee: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - expect(response.length).eq(2); - - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: 1, - invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - posts: [{ - id: 5, - ownerId: 3, - content: 'Post3', - lower: 'post3', - createdAt: response[1]?.posts[0]?.createdAt, - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - verified: false, - invitedBy: 2, - invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, - posts: [], - }); -}); - -test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - await db.insert(postsTable).values([ - { ownerId: 1, content: 'Post1' }, - { ownerId: 1, content: 'Post1.1' }, - { ownerId: 2, content: 'Post2' }, - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 3, content: 'Post3' }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - where: { - id: { - OR: [3, 4], - }, - }, - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - columns: { - id: true, - name: true, - }, - with: { - invitee: { - columns: { - id: true, - name: true, - }, - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - }, - posts: { - columns: { - id: true, - content: true, - }, - where: { - ownerId: 3, - }, - orderBy: { - id: 'desc', - }, - extras: ({ - lower: ({ content }) => sql`lower(${content})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - lower: string; - posts: { id: number; lower: string; content: string }[]; - invitee: { - id: number; - name: string; - lower: string; - } | null; - }[] - >(); - - expect(response.length).eq(2); - - expect(response[1]?.invitee).not.toBeNull(); - expect(response[0]?.invitee).not.toBeNull(); - - expect(response[0]?.posts.length).eq(0); - expect(response[1]?.posts.length).eq(1); - - expect(response[1]).toEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - invitee: { id: 1, name: 'Dan', lower: 'dan' }, - posts: [{ - id: 5, - content: 'Post3', - lower: 'post3', - }], - }); - expect(response[0]).toEqual({ - id: 4, - name: 'John', - lower: 'john', - invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, - posts: [], - }); -}); - -test('Get user with posts and posts with comments', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, - ]); - - await db.insert(commentsTable).values([ - { postId: 1, content: 'Comment1', creator: 2 }, - { postId: 2, content: 'Comment2', creator: 2 }, - { postId: 3, content: 'Comment3', creator: 3 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - posts: { - with: { - comments: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - comments: { - id: number; - content: string; - createdAt: Date; - creator: number | null; - postId: number | null; - }[]; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response[0]?.posts[0]?.comments.length).eq(1); - expect(response[1]?.posts[0]?.comments.length).eq(1); - expect(response[2]?.posts[0]?.comments.length).eq(1); - - expect(response[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[0]?.posts[0]?.createdAt, - comments: [ - { - id: 1, - content: 'Comment1', - creator: 2, - postId: 1, - createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: response[1]?.posts[0]?.createdAt, - comments: [ - { - id: 2, - content: 'Comment2', - creator: 2, - postId: 2, - createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - // expect(response[2]).toEqual({ - // id: 3, - // name: 'Alex', - // verified: false, - // invitedBy: null, - // posts: [{ - // id: 3, - // ownerId: 3, - // content: 'Post3', - // createdAt: response[2]?.posts[0]?.createdAt, - // comments: [ - // { - // id: , - // content: 'Comment3', - // creator: 3, - // postId: 3, - // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, - // }, - // ], - // }], - // }); -}); - -test('Get user with posts and posts with comments and comments with owner', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1' }, - { id: 2, ownerId: 2, content: 'Post2' }, - { id: 3, ownerId: 3, content: 'Post3' }, - ]); - - await db.insert(commentsTable).values([ - { postId: 1, content: 'Comment1', creator: 2 }, - { postId: 2, content: 'Comment2', creator: 2 }, - { postId: 3, content: 'Comment3', creator: 3 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - posts: { - with: { - comments: { - with: { - author: true, - }, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - posts: { - id: number; - content: string; - ownerId: number | null; - createdAt: Date; - comments: { - id: number; - content: string; - createdAt: Date; - creator: number | null; - postId: number | null; - author: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).eq(3); - expect(response[0]?.posts.length).eq(1); - expect(response[1]?.posts.length).eq(1); - expect(response[2]?.posts.length).eq(1); - - expect(response[0]?.posts[0]?.comments.length).eq(1); - expect(response[1]?.posts[0]?.comments.length).eq(1); - expect(response[2]?.posts[0]?.comments.length).eq(1); - - expect(response[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - posts: [{ - id: 1, - ownerId: 1, - content: 'Post1', - createdAt: response[0]?.posts[0]?.createdAt, - comments: [ - { - id: 1, - content: 'Comment1', - creator: 2, - author: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - postId: 1, - createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - posts: [{ - id: 2, - ownerId: 2, - content: 'Post2', - createdAt: response[1]?.posts[0]?.createdAt, - comments: [ - { - id: 2, - content: 'Comment2', - creator: 2, - author: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - postId: 2, - createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, - }, - ], - }], - }); -}); - -test('[Find Many] Get users with groups', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - usersToGroups: { - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(2); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersToGroups: { - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 2, - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 2, - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 1, - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 1, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(1); - - expect(response[0]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 1, - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(1); - - expect(response[0]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 2, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(0); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(0); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [], - }); -}); - -test('[Find Many] Get users with groups + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1, id: 1 }, - { userId: 2, groupId: 2, id: 2 }, - { userId: 3, groupId: 2, id: 3 }, - { userId: 3, groupId: 3, id: 4 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - id: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(2); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response[2]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); - - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find Many] Get groups with users + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response[2]).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[1]).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find Many] Get users with groups + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1, id: 1 }, - { userId: 2, groupId: 2, id: 2 }, - { userId: 3, groupId: 2, id: 3 }, - { userId: 3, groupId: 3, id: 4 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - usersToGroups: { - limit: 1, - orderBy: { - id: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - }[]>(); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - usersToGroups: { - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - usersToGroups: { - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - usersToGroups: { - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 1, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - limit: 1, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - usersToGroups: { - where: { - groupId: 2, - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(0); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - usersToGroups: [], - }); -}); - -test('[Find One] Get groups with users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { - gt: 1, - }, - }, - with: { - usersToGroups: { - where: { - userId: 2, - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1, id: 1 }, - { userId: 2, groupId: 2, id: 2 }, - { userId: 3, groupId: 2, id: 3 }, - { userId: 3, groupId: 3, id: 4 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - id: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(2); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - description: null, - }, - }], - }); -}); - -test('[Find One] Get groups with users + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('[Find One] Get users with groups + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1, id: 1 }, - { userId: 2, groupId: 2, id: 2 }, - { userId: 3, groupId: 2, id: 3 }, - { userId: 3, groupId: 3, id: 4 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - usersToGroups: { - limit: 1, - orderBy: { - id: 'desc', - }, - columns: {}, - with: { - group: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - }; - }[]; - } | undefined - >(); - - expect(response?.usersToGroups.length).toEqual(1); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - description: null, - }, - }], - }); -}); - -test('Get groups with users + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - usersToGroups: { - limit: 1, - orderBy: { - userId: 'desc', - }, - columns: {}, - with: { - user: true, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }; - }[]; - }[] - >(); - - expect(response.length).toEqual(2); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - - expect(response[1]).toEqual({ - id: 2, - name: 'Group2', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response[0]).toEqual({ - id: 3, - name: 'Group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('Get users with groups + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - with: { - usersToGroups: { - columns: {}, - with: { - group: { - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - usersToGroups: { - group: { - id: number; - name: string; - description: string | null; - lower: string; - }; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(1); - expect(response[2]?.usersToGroups.length).toEqual(2); - - expect(response).toContainEqual({ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - usersToGroups: [{ - group: { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - }, - }, { - group: { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - }], - }); -}); - -test('Get groups with users + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - with: { - usersToGroups: { - columns: {}, - with: { - user: { - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - lower: string; - usersToGroups: { - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - }; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response.length).toEqual(3); - - expect(response[0]?.usersToGroups.length).toEqual(1); - expect(response[1]?.usersToGroups.length).toEqual(2); - expect(response[2]?.usersToGroups.length).toEqual(1); - - expect(response).toContainEqual({ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - usersToGroups: [{ - user: { - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - usersToGroups: [{ - user: { - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - }, - }, { - user: { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }, - }], - }); - - expect(response).toContainEqual({ - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - usersToGroups: [{ - user: { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }, - }], - }); -}); - -test('Force optional on where on non-optional relation query', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex', invitedBy: 1 }, - { id: 4, name: 'John', invitedBy: 2 }, - ]); - - const usersWithInvitee = await db.query.usersTable.findMany({ - with: { - inviteeRequired: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(usersWithInvitee).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - inviteeRequired: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[] - >(); - - usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithInvitee.length).eq(4); - expect(usersWithInvitee[0]?.inviteeRequired).toBeNull(); - expect(usersWithInvitee[1]?.inviteeRequired).toBeNull(); - expect(usersWithInvitee[2]?.inviteeRequired).not.toBeNull(); - expect(usersWithInvitee[3]?.inviteeRequired).toBeNull(); - - expect(usersWithInvitee[0]).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - inviteeRequired: null, - }); - expect(usersWithInvitee[1]).toEqual({ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - inviteeRequired: null, - }); - expect(usersWithInvitee[2]).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: 1, - inviteeRequired: { id: 1, name: 'Dan', verified: false, invitedBy: null }, - }); - expect(usersWithInvitee[3]).toEqual({ - id: 4, - name: 'John', - verified: false, - invitedBy: 2, - inviteeRequired: null, - }); -}); - -test('[Find Many .through] Get users with groups', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [ - { - id: 2, - name: 'Group2', - description: null, - }, - { - id: 3, - name: 'Group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - users: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 2, - with: { - groups: { - limit: 1, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 2, - with: { - users: { - limit: 1, - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - limit: 1, - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - limit: 1, - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - users: [], - }]); -}); - -test('[Find Many .through] Get users with groups + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }, { - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }]); -}); - -test('[Find Many .through] Get groups with users + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - with: { - users: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - groups: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - description: null, - }], - }]); -}); - -test('[Find One .through] Get users with groups', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - groups: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - users: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - with: { - groups: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - with: { - users: { - limit: 1, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 1, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + limit + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - where: { - id: { - OR: [1, 2], - }, - }, - with: { - groups: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groups: [], - }); -}); - -test('[Find One .through] Get groups with users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - where: { - id: { gt: 1 }, - }, - with: { - users: { - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toStrictEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }, { - id: 2, - name: 'Group2', - description: null, - }], - }); -}); - -test('[Find One .through] Get groups with users + orderBy', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - users: { - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }); -}); - -test('[Find One .through] Get users with groups + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findFirst({ - orderBy: { - id: 'desc', - }, - with: { - groups: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groups: { - id: number; - name: string; - description: string | null; - }[]; - } | undefined - >(); - - expect(response).toEqual({ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groups: [{ - id: 3, - name: 'Group3', - description: null, - }], - }); -}); - -test('[Find Many .through] Get groups with users + orderBy + limit', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - orderBy: { - id: 'desc', - }, - limit: 2, - with: { - users: { - limit: 1, - orderBy: { - id: 'desc', - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[] - >(); - - expect(response).toStrictEqual([{ - id: 3, - name: 'Group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - description: null, - users: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with groups + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - with: { - groups: { - orderBy: { - id: 'asc', - }, - extras: ({ - lower: ({ name }) => sql`lower(${name})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - groups: { - id: number; - name: string; - description: string | null; - lower: string; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groups.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - groups: [{ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - }], - }, { - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - groups: [{ - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - groups: [ - { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - }, - { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with users + custom', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - with: { - users: { - extras: ({ - lower: (table, { sql }) => sql`lower(${table.name})`.as('lower_name'), - }), - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf< - { - id: number; - name: string; - description: string | null; - lower: string; - users: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - lower: string; - }[]; - }[] - >(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.users.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - lower: 'group1', - description: null, - users: [{ - id: 1, - name: 'Dan', - lower: 'dan', - verified: false, - invitedBy: null, - }], - }, { - id: 2, - name: 'Group2', - lower: 'group2', - description: null, - users: [{ - id: 2, - name: 'Andrew', - lower: 'andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - lower: 'group3', - description: null, - users: [{ - id: 3, - name: 'Alex', - lower: 'alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with first group', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 3, name: 'Group3' }, - { id: 2, name: 'Group2' }, - { id: 1, name: 'Group1' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 3, groupId: 2 }, - { userId: 2, groupId: 3 }, - { userId: 2, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - group: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - group: { - id: number; - name: string; - description: string | null; - } | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - group: null, - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - group: { - id: 3, - name: 'Group3', - description: null, - }, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - group: { - id: 2, - name: 'Group2', - description: null, - }, - }]); -}); - -test('[Find Many .through] Get groups with first user', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - user: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - user: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - } | null; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - user: null, - }, { - id: 2, - name: 'Group2', - description: null, - user: { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, - }, { - id: 3, - name: 'Group3', - description: null, - user: { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }, - }]); -}); - -test('[Find Many .through] Get users with filtered groups', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groupsFiltered: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groupsFiltered: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groupsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groupsFiltered: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groupsFiltered: [ - { - id: 2, - name: 'Group2', - description: null, - }, - { - id: 3, - name: 'Group3', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with filtered users', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersFiltered: true, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersFiltered: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersFiltered: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersFiltered: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersFiltered: [{ - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - }], - }]); -}); - -test('[Find Many .through] Get users with filtered groups + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.usersTable.findMany({ - with: { - groupsFiltered: { - where: { - id: { - lt: 3, - }, - }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - groupsFiltered: { - id: number; - name: string; - description: string | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.groupsFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - groupsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - groupsFiltered: [{ - id: 2, - name: 'Group2', - description: null, - }], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - groupsFiltered: [ - { - id: 2, - name: 'Group2', - description: null, - }, - ], - }]); -}); - -test('[Find Many .through] Get groups with filtered users + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(groupsTable).values([ - { id: 1, name: 'Group1' }, - { id: 2, name: 'Group2' }, - { id: 3, name: 'Group3' }, - ]); - - await db.insert(usersToGroupsTable).values([ - { userId: 1, groupId: 1 }, - { userId: 2, groupId: 2 }, - { userId: 3, groupId: 3 }, - { userId: 3, groupId: 2 }, - ]); - - const response = await db.query.groupsTable.findMany({ - with: { - usersFiltered: { - where: { id: { lt: 3 } }, - }, - }, - }); - - expectTypeOf(response).toEqualTypeOf<{ - id: number; - name: string; - description: string | null; - usersFiltered: { - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - }[]; - }[]>(); - - response.sort((a, b) => (a.id > b.id) ? 1 : -1); - for (const e of response) { - e.usersFiltered.sort((a, b) => (a.id > b.id) ? 1 : -1); - } - - expect(response).toStrictEqual([{ - id: 1, - name: 'Group1', - description: null, - usersFiltered: [], - }, { - id: 2, - name: 'Group2', - description: null, - usersFiltered: [{ - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - }], - }, { - id: 3, - name: 'Group3', - description: null, - usersFiltered: [], - }]); -}); - -test('[Find Many] Get users with filtered posts', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - { id: 7, ownerId: 1, content: 'Post1.3' }, - { id: 8, ownerId: 2, content: 'Post2.3' }, - { id: 9, ownerId: 3, content: 'Post3.3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - postsFiltered: { - columns: { - ownerId: true, - content: true, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - postsFiltered: { - ownerId: number | null; - content: string; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - postsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - postsFiltered: [ - { ownerId: 2, content: 'Post2.1' }, - { ownerId: 2, content: 'Post2.2' }, - { ownerId: 2, content: 'Post2.3' }, - ], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - postsFiltered: [], - }]); -}); - -test('[Find Many] Get posts with filtered authors', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - ]); - - const posts = await db.query.postsTable.findMany({ - columns: { - id: true, - content: true, - }, - with: { - authorFiltered: { - columns: { - name: true, - id: true, - }, - }, - }, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - authorFiltered: { - id: number; - name: string; - }; - }[]>(); - - posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(posts).toStrictEqual([ - { id: 1, content: 'Post1.1', authorFiltered: null }, - { - id: 2, - content: 'Post2.1', - authorFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 3, content: 'Post3.1', authorFiltered: null }, - { id: 4, content: 'Post1.2', authorFiltered: null }, - { - id: 5, - content: 'Post2.2', - authorFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 6, content: 'Post3.2', authorFiltered: null }, - ]); -}); - -test('[Find Many] Get users with filtered posts + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - { id: 7, ownerId: 1, content: 'Post1.3' }, - { id: 8, ownerId: 2, content: 'Post2.3' }, - { id: 9, ownerId: 3, content: 'Post3.3' }, - ]); - - const usersWithPosts = await db.query.usersTable.findMany({ - with: { - postsFiltered: { - columns: { - ownerId: true, - content: true, - }, - where: { - content: { - like: '%.2', - }, - }, - }, - }, - }); - - expectTypeOf(usersWithPosts).toEqualTypeOf<{ - id: number; - name: string; - verified: boolean; - invitedBy: number | null; - postsFiltered: { - ownerId: number | null; - content: string; - }[]; - }[]>(); - - usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(usersWithPosts).toStrictEqual([{ - id: 1, - name: 'Dan', - verified: false, - invitedBy: null, - postsFiltered: [], - }, { - id: 2, - name: 'Andrew', - verified: false, - invitedBy: null, - postsFiltered: [ - { ownerId: 2, content: 'Post2.2' }, - ], - }, { - id: 3, - name: 'Alex', - verified: false, - invitedBy: null, - postsFiltered: [], - }]); -}); - -test('[Find Many] Get posts with filtered authors + where', async (t) => { - const { vpgDbV2: db } = t; - - await db.insert(usersTable).values([ - { id: 1, name: 'Dan' }, - { id: 2, name: 'Andrew' }, - { id: 3, name: 'Alex' }, - ]); - - await db.insert(postsTable).values([ - { id: 1, ownerId: 1, content: 'Post1.1' }, - { id: 2, ownerId: 2, content: 'Post2.1' }, - { id: 3, ownerId: 3, content: 'Post3.1' }, - { id: 4, ownerId: 1, content: 'Post1.2' }, - { id: 5, ownerId: 2, content: 'Post2.2' }, - { id: 6, ownerId: 3, content: 'Post3.2' }, - ]); - - const posts = await db.query.postsTable.findMany({ - columns: { - id: true, - content: true, - }, - with: { - authorAltFiltered: { - columns: { - name: true, - id: true, - }, - where: { - id: 2, - }, - }, - }, - }); - - expectTypeOf(posts).toEqualTypeOf<{ - id: number; - content: string; - authorAltFiltered: { - id: number; - name: string; - } | null; - }[]>(); - - posts.sort((a, b) => (a.id > b.id) ? 1 : -1); - - expect(posts).toStrictEqual([ - { id: 1, content: 'Post1.1', authorAltFiltered: null }, - { - id: 2, - content: 'Post2.1', - authorAltFiltered: { - id: 2, - name: 'Andrew', - }, - }, - { id: 3, content: 'Post3.1', authorAltFiltered: null }, - { id: 4, content: 'Post1.2', authorAltFiltered: null }, - { id: 5, content: 'Post2.2', authorAltFiltered: null }, - { id: 6, content: 'Post3.2', authorAltFiltered: null }, - ]); -}); - -test('.toSQL()', () => { - const query = db.query.usersTable.findFirst().toSQL(); - - expect(query).toHaveProperty('sql', expect.any(String)); - expect(query).toHaveProperty('params', expect.any(Array)); -}); diff --git a/integration-tests/tests/seeder/mysql.test.ts b/integration-tests/tests/seeder/mysql.test.ts index 22530a2aa3..6fa92d3f5e 100644 --- a/integration-tests/tests/seeder/mysql.test.ts +++ b/integration-tests/tests/seeder/mysql.test.ts @@ -8,7 +8,7 @@ import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; -import * as schema from './mysqlSchema.ts'; +import * as schema from './mysqlSchema'; let mysqlContainer: Docker.Container; let client: Connection; @@ -256,7 +256,7 @@ beforeAll(async () => { try { client = await createConnection(connectionString); await client.connect(); - db = drizzle(client); + db = drizzle({ client }); connected = true; break; } catch (e) { diff --git a/integration-tests/tests/seeder/pg.test.ts b/integration-tests/tests/seeder/pg.test.ts index 5c3fd82c27..04d610d0f5 100644 --- a/integration-tests/tests/seeder/pg.test.ts +++ b/integration-tests/tests/seeder/pg.test.ts @@ -4,7 +4,7 @@ import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { cities, countries, firstNames, lastNames, reset, seed } from 'drizzle-seed'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; -import * as schema from './pgSchema.ts'; +import * as schema from './pgSchema'; let client: PGlite; let db: PgliteDatabase; @@ -876,7 +876,7 @@ const createAllGeneratorsTables = async () => { beforeAll(async () => { client = new PGlite(); - db = drizzle(client); + db = drizzle({ client }); await db.execute(sql`CREATE SCHEMA IF NOT EXISTS "seeder_lib_pg";`); @@ -1242,8 +1242,8 @@ test('valuesFromArray unique generator test', async () => { }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ - { values: lastNames.slice(0, 14920), weight: 0.3 }, - { values: lastNames.slice(14920), weight: 0.7 }, + { values: lastNames.slice(0, 14894), weight: 0.3 }, + { values: lastNames.slice(14894), weight: 0.7 }, ], isUnique: true, }), diff --git a/integration-tests/tests/seeder/sqlite.test.ts b/integration-tests/tests/seeder/sqlite.test.ts index 9b1e3ff77b..3c731bf7f8 100644 --- a/integration-tests/tests/seeder/sqlite.test.ts +++ b/integration-tests/tests/seeder/sqlite.test.ts @@ -4,7 +4,7 @@ import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { reset, seed } from 'drizzle-seed'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; -import * as schema from './sqliteSchema.ts'; +import * as schema from './sqliteSchema'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; @@ -12,7 +12,7 @@ let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); - db = drizzle(client); + db = drizzle({ client }); db.run( sql.raw(` diff --git a/integration-tests/tests/seeder/sqliteSchema.ts b/integration-tests/tests/seeder/sqliteSchema.ts index 3388336593..126c35b992 100644 --- a/integration-tests/tests/seeder/sqliteSchema.ts +++ b/integration-tests/tests/seeder/sqliteSchema.ts @@ -34,12 +34,10 @@ export const employees = sqliteTable( reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, - (table) => ({ - reportsToFk: foreignKey(() => ({ - columns: [table.reportsTo], - foreignColumns: [table.id], - })), - }), + (table) => [foreignKey({ + columns: [table.reportsTo], + foreignColumns: [table.id], + })], ); export const orders = sqliteTable('order', { diff --git a/integration-tests/tests/singlestore/common-1.ts b/integration-tests/tests/singlestore/common-1.ts new file mode 100644 index 0000000000..f320a9b575 --- /dev/null +++ b/integration-tests/tests/singlestore/common-1.ts @@ -0,0 +1,1170 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { and, asc, eq, exists, inArray, Name, notInArray, placeholder, sql } from 'drizzle-orm'; +import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; +import { + alias, + bigint, + boolean, + date, + datetime, + getTableConfig, + int, + json, + mediumint, + primaryKey, + serial, + singlestoreEnum, + singlestoreTable, + singlestoreTableCreator, + /* singlestoreView, */ + smallint, + text, + time, + timestamp, + tinyint, + unique, + uniqueIndex, + vector, + year, +} from 'drizzle-orm/singlestore-core'; +import { migrate } from 'drizzle-orm/singlestore/migrator'; +import { describe, expect } from 'vitest'; +import { toLocalDate } from '../utils'; +import type { Test } from './instrumentation'; + +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const datesTable = singlestoreTable('datestable', { + date: date('date'), + dateAsString: date('date_as_string', { mode: 'string' }), + time: time('time'), + datetime: datetime('datetime'), + datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), + timestamp: timestamp('timestamp'), + timestampAsString: timestamp('timestamp_as_string', { mode: 'string' }), + year: year('year'), +}); + +const coursesTable = singlestoreTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: int('category_id'), +}); + +const courseCategoriesTable = singlestoreTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +const usersMigratorTable = singlestoreTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}, (table) => [ + uniqueIndex('').on(table.name).using('btree'), +]); + +export function tests(test: Test) { + const connDict: Record = {}; + + describe('common', () => { + test.beforeEach(async ({ db, client }) => { + const connKey = `${client.config.user}:${client.config.password}@${client.config.host}:${client.config.port}`; + if (connDict[connKey] === undefined) { + connDict[connKey] = false; + + await Promise.all([ + db.execute(sql`drop table if exists userstest;`), + db.execute(sql`drop table if exists users2;`), + db.execute(sql`drop table if exists cities;`), + ]); + await Promise.all([ + db.execute(sql`create table userstest ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + );`), + db.execute(sql`create table users2 ( + id serial primary key, + name text not null, + city_id int + );`), + db.execute(sql`create table cities ( + id serial primary key, + name text not null + );`), + ]); + } + + await Promise.all([ + db.execute(sql`truncate table userstest;`), + db.execute(sql`truncate table users2;`), + db.execute(sql`truncate table cities;`), + ]); + }); + + test.concurrent('table config: unsigned ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number', unsigned: true }), + int: int('int', { unsigned: true }), + smallint: smallint('smallint', { unsigned: true }), + mediumint: mediumint('mediumint', { unsigned: true }), + tinyint: tinyint('tinyint', { unsigned: true }), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); + expect(intColumn.getSQLType()).toBe('int unsigned'); + expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); + expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); + expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); + }); + + test.concurrent('table config: signed ints', async () => { + const unsignedInts = singlestoreTable('cities1', { + bigint: bigint('bigint', { mode: 'number' }), + int: int('int'), + smallint: smallint('smallint'), + mediumint: mediumint('mediumint'), + tinyint: tinyint('tinyint'), + }); + + const tableConfig = getTableConfig(unsignedInts); + + const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; + const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; + const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; + const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; + const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; + + expect(bigintColumn.getSQLType()).toBe('bigint'); + expect(intColumn.getSQLType()).toBe('int'); + expect(smallintColumn.getSQLType()).toBe('smallint'); + expect(mediumintColumn.getSQLType()).toBe('mediumint'); + expect(tinyintColumn.getSQLType()).toBe('tinyint'); + }); + + test.concurrent('table config: primary keys name', async () => { + const table = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [primaryKey({ columns: [t.id, t.name], name: 'custom_pk' })]); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); + }); + + test.concurrent('table configs: unique third param', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => [unique('custom_name').on(t.name, t.state), unique('custom_name1').on(t.name, t.state)]); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + }); + + test.concurrent('table configs: unique in column', async () => { + const cities1Table = singlestoreTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: text('state').unique('custom'), + field: text('field').unique('custom_field'), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + expect(columnName?.uniqueName).toBe(undefined); + expect(columnName?.isUnique).toBeTruthy(); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBeTruthy(); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBeTruthy(); + }); + + test.concurrent('select all fields', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('select sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('select typed sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('select with empty array in inArray', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(inArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([]); + }); + + test.concurrent('select with empty array in notInArray', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(notInArray(usersTable.id, [])) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); + }); + + test.concurrent('select distinct', async ({ db }) => { + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test.concurrent('insert returning sql', async ({ db }) => { + const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test.concurrent('delete returning sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test.concurrent('update returning sql', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + expect(users[0].changedRows).toBe(1); + }); + + test.concurrent('update with returning all fields', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); + expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); + }); + + test.concurrent('update with returning partial', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); + + const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test.concurrent('delete with returning all fields', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test.concurrent('delete with returning partial', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test.concurrent('insert + select', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test.concurrent('json insert', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); + }); + + test.concurrent('insert with overridden default values', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('insert many', async ({ db }) => { + await db.insert(usersTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('insert many with returning', async ({ db }) => { + const result = await db.insert(usersTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + + expect(result[0].affectedRows).toBe(4); + }); + + test.concurrent('select with group by as field', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.name) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test.concurrent('select with exists', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }]); + }); + + test.concurrent('select with group by as sql', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test.concurrent('$default function', async ({ db }) => { + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); + const selectedOrder = await db.select().from(orders); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test.concurrent('$default with empty array', async ({ db }) => { + await db.execute(sql`drop table if exists \`s_orders\``); + await db.execute( + sql` + create table \`s_orders\` ( + \`id\` serial primary key, + \`region\` text default 'Ukraine', + \`product\` text not null + ) + `, + ); + + const users = singlestoreTable('s_orders', { + id: serial('id').primaryKey(), + region: text('region').default('Ukraine'), + product: text('product').$defaultFn(() => 'random_string'), + }); + + await db.insert(users).values({ id: 1 }); + const selectedOrder = await db.select().from(users); + + expect(selectedOrder).toEqual([{ + id: 1, + region: 'Ukraine', + product: 'random_string', + }]); + }); + + test.concurrent('select with group by as sql + column', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test.concurrent('select with group by as column + sql', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test.concurrent('select with group by complex query', async ({ db }) => { + await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); + + const result = await db.select({ name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); + }); + + test.concurrent('build query', async ({ db }) => { + const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, + params: [], + }); + }); + + test.concurrent('Query check: Insert all defaults in 1 row', async ({ db }) => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', + params: [], + }); + }); + + test.concurrent('Query check: Insert all defaults in multiple rows', async ({ db }) => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', + params: [], + }); + }); + + test.concurrent('Insert all defaults in 1 row', async ({ db }) => { + const users = singlestoreTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({ id: 1 }); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); + }); + + test.concurrent('Insert all defaults in multiple rows', async ({ db }) => { + const users = singlestoreTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{ id: 1 }, { id: 2 }]); + + const res = await db.select().from(users).orderBy(asc(users.id)); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); + }); + + test.concurrent('build query insert with onDuplicate', async ({ db }) => { + const query = db.insert(usersTable) + .values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }) + .onDuplicateKeyUpdate({ set: { id: 1, name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (?, ?, default, ?, default) on duplicate key update `id` = ?, `name` = ?', + params: [1, 'John', '["foo","bar"]', 1, 'John1'], + }); + }); + + test.concurrent('insert with onDuplicate', async ({ db }) => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .values({ id: 1, name: 'John' }) + .onDuplicateKeyUpdate({ set: { name: 'John1' } }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); + }); + + test.concurrent('insert conflict', async ({ db }) => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await expect((async () => { + db.insert(usersTable).values({ id: 1, name: 'John1' }); + })()).resolves.not.toThrowError(); + }); + + test.concurrent('insert conflict with ignore', async ({ db }) => { + await db.insert(usersTable) + .values({ id: 1, name: 'John' }); + + await db.insert(usersTable) + .ignore() + .values({ id: 1, name: 'John1' }); + + const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( + eq(usersTable.id, 1), + ); + + expect(res).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert sql', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('partial join with alias', async ({ db }) => { + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); + }); + + test.concurrent('full join with alias', async ({ db }) => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select().from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)) + .orderBy(asc(users.id)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('select from alias', async ({ db }) => { + const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)) + .orderBy(asc(user.id)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('insert with spaces', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test.concurrent('prepared statement', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const statement = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .prepare(); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert: placeholders on columns with encoder', async ({ db }) => { + const date = new Date('2024-08-07T15:30:00Z'); + + const statement = db.insert(usersTable).values({ + id: 1, + name: 'John', + createdAt: sql.placeholder('createdAt'), + }).prepare(); + + await statement.execute({ createdAt: date }); + + const result = await db + .select({ + id: usersTable.id, + createdAt: usersTable.createdAt, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, createdAt: date }, + ]); + }); + + test.concurrent('prepared statement reuse', async ({ db }) => { + const stmt = db.insert(usersTable).values({ + verified: true, + id: placeholder('id'), + name: placeholder('name'), + }).prepare(); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ id: i + 1, name: `John ${i}` }); + } + + const result = await db.select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }).from(usersTable) + .orderBy(asc(usersTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); + }); + + test.concurrent('prepared statement with placeholder in .where', async ({ db }) => { + await db.insert(usersTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersTable.id, + name: usersTable.name, + }).from(usersTable) + .where(eq(usersTable.id, placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('migrator', async ({ db }) => { + await db.execute(sql`drop table if exists cities_migration`); + await db.execute(sql`drop table if exists users_migration`); + await db.execute(sql`drop table if exists users12`); + await db.execute(sql`drop table if exists __drizzle_migrations`); + + await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); + + await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); + + const result = await db.select().from(usersMigratorTable); + + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.execute(sql`drop table cities_migration`); + await db.execute(sql`drop table users_migration`); + await db.execute(sql`drop table users12`); + await db.execute(sql`drop table __drizzle_migrations`); + }); + + test.concurrent('insert via db.execute + select via db.execute', async ({ db }) => { + await db.execute( + sql`insert into ${usersTable} (${new Name(usersTable.id.name)},${new Name( + usersTable.name.name, + )}) values (1,${'John'})`, + ); + + const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); + expect(result[0]).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('insert via db.execute w/ query builder', async ({ db }) => { + const inserted = await db.execute( + db.insert(usersTable).values({ id: 1, name: 'John' }), + ); + expect(inserted[0].affectedRows).toBe(1); + }); + + test.concurrent('insert + select all possible dates', async ({ db }) => { + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`date\` date, + \`date_as_string\` date, + \`time\` time, + \`datetime\` datetime, + \`datetime_as_string\` datetime, + \`timestamp\` timestamp(6), + \`timestamp_as_string\` timestamp(6), + \`year\` year + ) + `, + ); + + const date = new Date('2022-11-11'); + const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); + + await db.insert(datesTable).values({ + date: date, + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: date, + year: 22, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: dateWithMilliseconds, + timestampAsString: '2022-11-11 12:12:12.123', + }); + + const res = await db.select().from(datesTable); + + expect(res[0]?.date).toBeInstanceOf(Date); + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(typeof res[0]?.dateAsString).toBe('string'); + expect(typeof res[0]?.datetimeAsString).toBe('string'); + + expect(res).toEqual([{ + date: toLocalDate(new Date('2022-11-11')), + dateAsString: '2022-11-11', + time: '12:12:12', + datetime: new Date('2022-11-11'), + year: 2022, + datetimeAsString: '2022-11-11 12:12:12', + timestamp: new Date('2022-11-11 12:12:12.123'), + timestampAsString: '2022-11-11 12:12:12.123000', + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + const tableWithEnums = singlestoreTable('enums_test_case', { + id: serial('id').primaryKey(), + enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), + enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), + enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), + }); + + test.concurrent('SingleStore enum test case #1', async ({ db }) => { + await db.execute(sql`drop table if exists \`enums_test_case\``); + + await db.execute(sql` + create table \`enums_test_case\` ( + \`id\` serial primary key, + \`enum1\` ENUM('a', 'b', 'c') not null, + \`enum2\` ENUM('a', 'b', 'c') default 'a', + \`enum3\` ENUM('a', 'b', 'c') not null default 'b' + ) + `); + + await db.insert(tableWithEnums).values([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum3: 'c' }, + { id: 3, enum1: 'a' }, + ]); + + const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); + + await db.execute(sql`drop table \`enums_test_case\``); + + expect(res).toEqual([ + { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, + { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, + { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, + ]); + }); + + test.concurrent('left join (flat object fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(users2Table.id); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); + }); + + test.concurrent('left join (grouped fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }).from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); + }); + + test.concurrent('left join (all fields)', async ({ db }) => { + await db.insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); + + const res = await db.select().from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) + .orderBy(asc(users2Table.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId: 1, + }, + cities: { + id: 1, + name: 'Paris', + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); + }); + + test.concurrent('join subquery', async ({ db }) => { + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + + await db.execute( + sql` + create table \`course_categories\` ( + \`id\` serial primary key, + \`name\` text not null + ) + `, + ); + + await db.execute( + sql` + create table \`courses\` ( + \`id\` serial primary key, + \`name\` text not null, + \`category_id\` int + ) + `, + ); + + await db.insert(courseCategoriesTable).values([ + { id: 1, name: 'Category 1' }, + { id: 2, name: 'Category 2' }, + { id: 3, name: 'Category 3' }, + { id: 4, name: 'Category 4' }, + ]); + + await db.insert(coursesTable).values([ + { id: 1, name: 'Development', categoryId: 2 }, + { id: 2, name: 'IT & Software', categoryId: 3 }, + { id: 3, name: 'Marketing', categoryId: 4 }, + { id: 4, name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); + + await db.execute(sql`drop table if exists \`courses\``); + await db.execute(sql`drop table if exists \`course_categories\``); + }); + }); +} diff --git a/integration-tests/tests/singlestore/common-2.ts b/integration-tests/tests/singlestore/common-2.ts new file mode 100644 index 0000000000..57a2c5fed9 --- /dev/null +++ b/integration-tests/tests/singlestore/common-2.ts @@ -0,0 +1,2786 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + getTableColumns, + gt, + gte, + inArray, + lt, + max, + min, + sql, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; +import { + alias, + bigint, + binary, + boolean, + char, + date, + datetime, + decimal, + double, + except, + float, + getTableConfig, + index, + int, + intersect, + json, + mediumint, + primaryKey, + real, + serial, + singlestoreEnum, + singlestoreSchema, + singlestoreTable, + singlestoreTableCreator, + /* singlestoreView, */ + smallint, + text, + time, + timestamp, + tinyint, + union, + unionAll, + varbinary, + varchar, + vector, + year, +} from 'drizzle-orm/singlestore-core'; +import { dotProduct, euclideanDistance } from 'drizzle-orm/singlestore-core/expressions'; +import { describe, expect, expectTypeOf } from 'vitest'; +import { Expect } from '../utils'; +import type { Equal } from '../utils'; +import type { Test } from './instrumentation'; +import type relations from './relations'; + +type TestSingleStoreDB = SingleStoreDatabase; + +const allTypesTable = singlestoreTable('all_types', { + serial: serial('scol'), + bigint53: bigint('bigint53', { + mode: 'number', + }), + bigint64: bigint('bigint64', { + mode: 'bigint', + }), + bigintString: bigint('bigint_string', { + mode: 'string', + }), + binary: binary('binary'), + boolean: boolean('boolean'), + char: char('char'), + date: date('date', { + mode: 'date', + }), + dateStr: date('date_str', { + mode: 'string', + }), + datetime: datetime('datetime', { + mode: 'date', + }), + datetimeStr: datetime('datetime_str', { + mode: 'string', + }), + decimal: decimal('decimal'), + decimalNum: decimal('decimal_num', { + scale: 30, + mode: 'number', + }), + decimalBig: decimal('decimal_big', { + scale: 30, + mode: 'bigint', + }), + double: double('double'), + float: float('float'), + int: int('int'), + json: json('json'), + medInt: mediumint('med_int'), + smallInt: smallint('small_int'), + real: real('real'), + text: text('text'), + time: time('time'), + timestamp: timestamp('timestamp', { + mode: 'date', + }), + timestampStr: timestamp('timestamp_str', { + mode: 'string', + }), + tinyInt: tinyint('tiny_int'), + varbin: varbinary('varbin', { + length: 16, + }), + varchar: varchar('varchar', { + length: 255, + }), + year: year('year'), + enum: singlestoreEnum('enum', ['enV1', 'enV2']), + vectorI8: vector('vec_i8', { + dimensions: 5, + elementType: 'I8', + }), + vectorI16: vector('vec_i16', { + dimensions: 5, + elementType: 'I16', + }), + vectorI32: vector('vec_i32', { + dimensions: 5, + elementType: 'I32', + }), + vectorI64: vector('vec_i64', { + dimensions: 5, + elementType: 'I64', + }), + vectorF32: vector('vec_f32', { + dimensions: 5, + elementType: 'F32', + }), + vectorF64: vector('vec_f64', { + dimensions: 5, + elementType: 'F64', + }), +}); + +const usersTable = singlestoreTable('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const users2Table = singlestoreTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), +}); + +const citiesTable = singlestoreTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const usersOnUpdate = singlestoreTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: datetime('updated_at', { mode: 'date' }).$onUpdateFn(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value +}); + +const orders = singlestoreTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: int('amount').notNull(), + quantity: int('quantity').notNull(), +}); + +// To test aggregate functions +const aggregateTable = singlestoreTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: int('a'), + b: int('b'), + c: int('c'), + nullOnly: int('null_only'), +}); + +const vectorSearchTable = singlestoreTable('vector_search', { + id: serial('id').notNull(), + text: text('text').notNull(), + embedding: vector('embedding', { dimensions: 10 }), +}); + +// To test another schema and multischema +const mySchema = singlestoreSchema(`mySchema`); + +const usersMySchemaTable = mySchema.table('userstest', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +export function tests(test: Test) { + const connDict: Record = {}; + + describe('common', () => { + test.beforeEach(async ({ db, client }) => { + const connKey = `${client.config.user}:${client.config.password}@${client.config.host}:${client.config.port}`; + + if (connDict[connKey] === undefined) { + connDict[connKey] = false; + + await Promise.all([ + db.execute(sql`drop schema if exists \`mySchema\`;`), + db.execute(sql`drop table if exists userstest;`), + db.execute(sql`drop table if exists users2;`), + db.execute(sql`drop table if exists cities;`), + db.execute(sql`drop table if exists aggregate_table;`), + db.execute(sql`drop table if exists vector_search;`), + db.execute(sql`drop table if exists users_default_fn;`), + ]); + await db.execute(sql`create schema \`mySchema\`;`); + await Promise.all([ + db.execute(sql`create table userstest ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + );`), + db.execute(sql`create table users2 ( + id serial primary key, + name text not null, + city_id int + );`), + db.execute(sql`create table cities ( + id serial primary key, + name text not null + );`), + db.execute(sql`create table \`mySchema\`.\`userstest\` ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb json, + created_at timestamp not null default now() + );`), + db.execute(sql`create table \`mySchema\`.\`cities\` ( + \`id\` serial primary key, + \`name\` text not null + );`), + db.execute(sql`create table \`mySchema\`.\`users2\` ( + \`id\` serial primary key, + \`name\` text not null, + \`city_id\` int + );`), + db.execute(sql`create table aggregate_table ( + id integer primary key auto_increment not null, + name text not null, + a integer, + b integer, + c integer, + null_only integer + );`), + db.execute(sql`create table vector_search ( + id integer primary key auto_increment not null, + text text not null, + embedding vector(10) not null + );`), + db.execute(sql`create table users_default_fn ( + id varchar(256) primary key, + name text not null + );`), + ]); + } + + await Promise.all([ + db.execute(sql`truncate table userstest;`), + db.execute(sql`truncate table users2;`), + db.execute(sql`truncate table cities;`), + db.execute(sql`truncate table aggregate_table;`), + db.execute(sql`truncate table vector_search;`), + db.execute(sql`truncate table users_default_fn;`), + + db.execute(sql`truncate table \`mySchema\`.\`userstest\`;`), + db.execute(sql`truncate table \`mySchema\`.\`cities\`;`), + db.execute(sql`truncate table \`mySchema\`.\`users2\`;`), + ]); + }); + + async function setupReturningFunctionsTest(db: SingleStoreDatabase) { + await db.execute(sql`truncate table users_default_fn`); + } + + async function setupSetOperationTest(db: TestSingleStoreDB) { + await Promise.all([db.execute(sql`truncate table \`users2\`;`), db.execute(sql`truncate table \`cities\``)]); + await Promise.all( + [ + db.insert(citiesTable).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]), + db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]), + ], + ); + } + + async function setupAggregateFunctionsTest(db: TestSingleStoreDB) { + await db.execute(sql`truncate table aggregate_table`); + await db.insert(aggregateTable).values([ + { id: 1, name: 'value 1', a: 5, b: 10, c: 20 }, + { id: 2, name: 'value 1', a: 5, b: 20, c: 30 }, + { id: 3, name: 'value 2', a: 10, b: 50, c: 60 }, + { id: 4, name: 'value 3', a: 20, b: 20, c: null }, + { id: 5, name: 'value 4', a: null, b: 90, c: 120 }, + { id: 6, name: 'value 5', a: 80, b: 10, c: null }, + { id: 7, name: 'value 6', a: null, b: null, c: 150 }, + ]); + } + + async function setupVectorSearchTest(db: TestSingleStoreDB) { + await db.execute(sql`truncate table vector_search`); + await db.insert(vectorSearchTable).values([ + { + id: 1, + text: 'I like dogs', + embedding: [0.6119, 0.1395, 0.2921, 0.3664, 0.4561, 0.7852, 0.1997, 0.5142, 0.5924, 0.0465], + }, + { + id: 2, + text: 'I like cats', + embedding: [0.6075, 0.1705, 0.0651, 0.9489, 0.9656, 0.8084, 0.3046, 0.0977, 0.6842, 0.4402], + }, + ]); + } + + test.concurrent('with ... select', async ({ db }) => { + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, + productSales: sql`cast(sum(${orders.amount}) as unsigned)`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + + expect(result).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + }); + + test.concurrent('with ... update', async ({ db }) => { + const products = singlestoreTable('products', { + id: serial('id').primaryKey(), + price: decimal('price', { + precision: 15, + scale: 2, + }).notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price decimal(15, 2) not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { id: 1, price: '10.99' }, + { id: 2, price: '25.85' }, + { id: 3, price: '32.99' }, + { id: 4, price: '2.50' }, + { id: 5, price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)); + + const result = await db + .select({ + id: products.id, + }) + .from(products) + .where(eq(products.cheap, true)) + .orderBy(asc(products.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test.concurrent('with ... delete', async ({ db }) => { + await db.execute(sql`drop table if exists \`orders\``); + await db.execute( + sql` + create table \`orders\` ( + \`id\` serial primary key, + \`region\` text not null, + \`product\` text not null, + \`amount\` int not null, + \`quantity\` int not null + ) + `, + ); + + await db.insert(orders).values([ + { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, + { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, + { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, + { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); + + const result = await db + .select({ + id: orders.id, + }) + .from(orders) + .orderBy(asc(orders.id)); + + expect(result).toEqual([ + { id: 1 }, + { id: 2 }, + { id: 3 }, + { id: 4 }, + { id: 5 }, + ]); + }); + + test.concurrent('select from subquery sql', async ({ db }) => { + await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); + + const sq = db + .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) + .from(users2Table) + .orderBy(asc(users2Table.id)) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); + }); + + test.concurrent('select a field without joining its table', ({ db }) => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); + }); + + test.concurrent('select all fields from subquery without alias', ({ db }) => { + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare()).toThrowError(); + }); + + test.concurrent('select count()', async ({ db }) => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); + }); + + test.concurrent('select for ...', ({ db }) => { + { + const query = db.select().from(users2Table).for('update').toSQL(); + expect(query.sql).toMatch(/ for update$/); + } + { + const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); + expect(query.sql).toMatch(/ for share skip locked$/); + } + { + const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); + expect(query.sql).toMatch(/ for update nowait$/); + } + }); + + test.concurrent('having', async ({ db }) => { + await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { + id: 3, + name: 'New York', + }]); + + await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { + id: 3, + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); + }); + + // TODO: Unskip when views are supported + /* test.skip('view', async ({ db }) => { + + + const newYorkers1 = singlestoreView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = singlestoreView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); */ + + test.concurrent('select from raw sql', async ({ db }) => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + }); + + test.concurrent('select from raw sql with joins', async ({ db }) => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); + }); + + test.concurrent('join on aliased sql from select', async ({ db }) => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test.concurrent('join on aliased sql from with clause', async ({ db }) => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); + }); + + test.concurrent('prefixed table', async ({ db }) => { + const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); + + const users = singlestoreTable('test_prefixed_table_with_unique_name', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('orderBy with aliased column', ({ db }) => { + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); + }); + + test.concurrent('timestamp timezone', async ({ db }) => { + const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); + + await db.insert(usersTable).values({ id: 1, name: 'With default times' }); + await db.insert(usersTable).values({ + id: 2, + name: 'Without default times', + createdAt: date, + }); + const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(5000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); + }); + + test.concurrent('transaction', async ({ db }) => { + const users = singlestoreTable('users_transactions', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + const products = singlestoreTable('products_transactions', { + id: serial('id').primaryKey(), + price: int('price').notNull(), + stock: int('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, + ); + + const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); + const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); + const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); + const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); + }); + + test.concurrent('transaction rollback', async ({ db }) => { + const users = singlestoreTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: int('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('join subquery with join', async ({ db }) => { + const internalStaff = singlestoreTable('internal_staff', { + userId: int('user_id').notNull(), + }); + + const customUser = singlestoreTable('custom_user', { + id: int('id').notNull(), + }); + + const ticket = singlestoreTable('ticket', { + staffId: int('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); + }); + + // TODO: Unskip when views are supported + /* test.skip('subquery with view', async ({ db }) => { + + + const users = singlestoreTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); */ + + // TODO: Unskip when views are supported + /* test.skip('join view as subquery', async ({ db }) => { + + + const users = singlestoreTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }); + + const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 1 }, + { id: 4, name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); + }); */ + + test.concurrent('select iterator', async ({ db }) => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const iter = db.select().from(users) + .orderBy(asc(users.id)) + .iterator(); + + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test.concurrent('select iterator w/ prepared statement', async ({ db }) => { + const users = singlestoreTable('users_iterator', { + id: serial('id').primaryKey(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key)`); + + await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); + + const prepared = db.select().from(users) + .orderBy(asc(users.id)) + .prepare(); + const iter = prepared.iterator(); + const result: typeof users.$inferSelect[] = []; + + for await (const row of iter) { + result.push(row); + } + + expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + }); + + test.concurrent('insert undefined', async ({ db }) => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('update undefined', async ({ db }) => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await expect((async () => { + await db.update(users).set({ id: 1, name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); + }); + + test.concurrent('utc config for datetime', async ({ db }) => { + await db.execute(sql`drop table if exists \`datestable\``); + await db.execute( + sql` + create table \`datestable\` ( + \`datetime_utc\` datetime(6), + \`datetime\` datetime(6) + ) + `, + ); + const datesTable = singlestoreTable('datestable', { + datetimeUTC: datetime('datetime_utc', { mode: 'date' }), + datetime: datetime('datetime'), + }); + + const dateObj = new Date('2022-11-11'); + const dateUtc = new Date('2022-11-11T12:12:12.122Z'); + + await db.insert(datesTable).values({ + datetimeUTC: dateUtc, + datetime: dateObj, + }); + + const res = await db.select().from(datesTable); + + const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); + const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; + + expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122000'); + expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); + + expect(res[0]?.datetime).toBeInstanceOf(Date); + expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); + + expect(res).toEqual([{ + datetimeUTC: dateUtc, + datetime: new Date('2022-11-11'), + }]); + + await db.execute(sql`drop table if exists \`datestable\``); + }); + + // TODO (https://memsql.atlassian.net/browse/MCDB-63261) allow chaining limit and orderby in subquery + test.concurrent('set operations (union) from query builder with subquery', async ({ db }) => { + await setupSetOperationTest(db); + const citiesQuery = db + .select({ + id: citiesTable.id, + name: citiesTable.name, + orderCol: sql`0`.as('orderCol'), + }) + .from(citiesTable); + + const usersQuery = db + .select({ + id: users2Table.id, + name: users2Table.name, + orderCol: sql`1`.as('orderCol'), + }) + .from(users2Table); + + const unionQuery = db + .select({ + id: sql`id`, + name: sql`name`, + }) + .from( + citiesQuery.union(usersQuery).as('combined'), + ) + .orderBy(sql`orderCol`, sql`id`) + .limit(8); + + const result = await unionQuery; + + expect(result).toHaveLength(8); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + ]); + + // union should throw if selected fields are not in the same order + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).union( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (union) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + union( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (union all) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2).unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).orderBy(asc(sql`id`)).limit(2), + ).as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)).limit(3); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (union all) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (intersect) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ) + .as('sq'); + + const result = await db.select().from(sq).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (intersect) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = await intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(1); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(1); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (except) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const result = await db + .select() + .from(citiesTable).except( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + }); + + test.concurrent('set operations (except) as function', async ({ db }) => { + await setupSetOperationTest(db); + + const sq = except( + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).as('sq'); + + const result = await db.select().from(sq).limit(3); + + expect(result).toHaveLength(2); + + expect(result).toContainEqual({ id: 2, name: 'London' }); + expect(result).toContainEqual({ id: 3, name: 'Tampa' }); + + await expect((async () => { + except( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable), + db + .select({ id: citiesTable.id, name: citiesTable.name }) + .from(citiesTable).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).limit(3); + })()).rejects.toThrowError(); + }); + + test.concurrent('define constraints as array', async ({ db }) => { + const table = singlestoreTable('name', { + id: int(), + }, (t) => [ + index('name').on(t.id), + primaryKey({ columns: [t.id], name: 'custom' }), + ]); + + const { indexes, primaryKeys } = getTableConfig(table); + + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); + }); + + test.concurrent('define constraints as array inside third param', async ({ db }) => { + const table = singlestoreTable('name', { + id: int(), + }, (t) => [ + [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], + ]); + + const { indexes, primaryKeys } = getTableConfig(table); + + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); + }); + + test.skip('set operations (mixed) from query builder', async ({ db }) => { + await setupSetOperationTest(db); + + const sq1 = unionAll( + db + .select() + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).offset(1).as('sq3'); + + const result = await db + .select() + .from(citiesTable) + .except( + db + .select() + .from(sq3), + ); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 3, name: 'Tampa' }, + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(citiesTable).except( + ({ unionAll }) => + unionAll( + db + .select({ name: citiesTable.name, id: citiesTable.id }) + .from(citiesTable).where(gt(citiesTable.id, 1)), + db.select().from(citiesTable).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('set operations (mixed all) as function with subquery', async ({ db }) => { + await setupSetOperationTest(db); + + const sq1 = except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).as('sq1'); + + const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); + + const sq3 = await db.select().from(sq2).limit(1).as('sq3'); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db.select().from(sq3), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(4); + + // multiple results possible as a result of the filters >= 5 and ==7 because singlestore doesn't guarantee order + // dynamically validate results + const hasValidEntry = (entry: { id: number; name: string }) => { + if (entry.id === 1) return entry.name === 'John'; + if (entry.id > 1 && entry.id < 5) return entry.name === 'Tampa' || entry.name === 'London'; + if (entry.id >= 5 && entry.id !== 7) return true; // Accept any entry with id >= 5 and not 7 + return false; + }; + + for (const entry of result) { + expect(hasValidEntry(entry)).toBe(true); + } + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ).limit(1), + db + .select().from(citiesTable).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); + }); + + test.concurrent('aggregate function: count', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); + }); + + test.concurrent('aggregate function: avg', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('42.5000'); + }); + + test.concurrent('aggregate function: sum', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBe(null); + expect(result3[0]?.value).toBe('170'); + }); + + test.concurrent('aggregate function: max', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBe(null); + }); + + test.concurrent('aggregate function: min', async ({ db }) => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBe(null); + }); + + test.concurrent('simple vector search', async ({ db }) => { + const table = vectorSearchTable; + const embedding = [0.42, 0.93, 0.88, 0.57, 0.32, 0.64, 0.76, 0.52, 0.19, 0.81]; // ChatGPT's 10 dimension embedding for "dogs are cool" not sure how accurate but it works + await setupVectorSearchTest(db); + + const withRankEuclidean = db.select({ + id: table.id, + text: table.text, + rank: sql`row_number() over (order by ${euclideanDistance(table.embedding, embedding)})`.as('rank'), + }).from(table).as('with_rank'); + const withRankDotProduct = db.select({ + id: table.id, + text: table.text, + rank: sql`row_number() over (order by ${dotProduct(table.embedding, embedding)})`.as('rank'), + }).from(table).as('with_rank'); + const result1 = await db.select({ id: withRankEuclidean.id, text: withRankEuclidean.text }).from( + withRankEuclidean, + ).where(eq(withRankEuclidean.rank, 1)); + const result2 = await db.select({ id: withRankDotProduct.id, text: withRankDotProduct.text }).from( + withRankDotProduct, + ).where(eq(withRankDotProduct.rank, 1)); + + expect(result1.length).toEqual(1); + expect(result1[0]).toEqual({ id: 1, text: 'I like dogs' }); + + expect(result2.length).toEqual(1); + expect(result2[0]).toEqual({ id: 1, text: 'I like dogs' }); + }); + + test.concurrent('test $onUpdateFn and $onUpdate works as $default', async ({ db }) => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 5000; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + test.concurrent('test $onUpdateFn and $onUpdate works updating', async ({ db }) => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial not null primary key, + name text not null, + update_counter integer default 1 not null, + updated_at datetime(6), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { id: 1, name: 'John', alwaysNull: 'this will will be null after updating' }, + { id: 2, name: 'Jane' }, + { id: 3, name: 'Jack' }, + { id: 4, name: 'Jill' }, + ]); + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + const initial = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + + const justDates = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from( + usersOnUpdate, + ); + + const response = await db.select().from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { id: 1, name: 'Angel', updateCounter: 2, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 2, name: 'Jane', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 3, name: 'Jack', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + { id: 4, name: 'Jill', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, + ]); + + const initialRecord = initial.find((record) => record.id === 1); + const updatedRecord = justDates.find((record) => record.id === 1); + + expect(initialRecord?.updatedAt?.valueOf()).not.toBe(updatedRecord?.updatedAt?.valueOf()); + + const msDelay = 5000; + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } + }); + + // mySchema tests + test.concurrent('mySchema :: select all fields', async ({ db }) => { + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + // not timezone based timestamp, thats why it should not work here + // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('mySchema :: select sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: select typed sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); + }); + + test.concurrent('mySchema :: select distinct', async ({ db }) => { + const usersDistinctTable = singlestoreTable('users_distinct', { + id: int('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + }); + + test.concurrent('mySchema :: insert returning sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + const [result, _] = await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + + expect(result.insertId).toBe(1); + }); + + test.concurrent('mySchema :: delete returning sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(users[0].affectedRows).toBe(1); + }); + + test.concurrent('mySchema :: update with returning partial', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( + eq(usersMySchemaTable.name, 'John'), + ); + + const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ) + .where( + eq(usersMySchemaTable.id, 1), + ); + + expect(updatedUsers[0].changedRows).toBe(1); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); + }); + + test.concurrent('mySchema :: delete with returning all fields', async ({ db }) => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); + + expect(deletedUser[0].affectedRows).toBe(1); + }); + + test.concurrent('mySchema :: insert + select', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ id: 2, name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable).orderBy(asc(usersMySchemaTable.id)); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); + }); + + test.concurrent('mySchema :: insert with overridden default values', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); + }); + + test.concurrent('mySchema :: insert many', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([ + { id: 1, name: 'John' }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, + { id: 3, name: 'Jane' }, + { id: 4, name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); + }); + + test.concurrent('mySchema :: select with group by as field', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); + }); + + test.concurrent('mySchema :: select with group by as column + sql', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { + id: 3, + name: 'Jane', + }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`) + .orderBy(asc(usersMySchemaTable.id)); + + expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + }); + + test.concurrent('mySchema :: build query', async ({ db }) => { + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: + `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, + params: [], + }); + }); + + test.concurrent('mySchema :: insert with spaces', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); + }); + + test.concurrent('mySchema :: prepared statement with placeholder in .where', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); + const stmt = db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }).from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .prepare(); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + }); + + test.concurrent('mySchema :: select from tables with same name from different schema using alias', async ({ db }) => { + await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); + + await db.execute(sql`drop table if exists \`userstest\``); + await db.execute( + sql` + create table \`userstest\` ( + \`id\` serial primary key, + \`name\` text not null, + \`verified\` boolean not null default false, + \`jsonb\` json, + \`created_at\` timestamp not null default now() + ) + `, + ); + + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + userstest: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.userstest.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); + }); + + test.concurrent('insert $returningId: serial as id', async ({ db }) => { + const result = await db.insert(usersTable).values({ id: 1, name: 'John' }).$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + expect(result).toStrictEqual([{ id: 1 }]); + }); + + test.concurrent('insert $returningId: serial as id, batch insert', async ({ db }) => { + const result = await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]) + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + id: number; + }[]>(); + + // singlestore auto increments when batch inserting, so the ids increment by one + expect(result).toStrictEqual([{ id: 2 }, { id: 3 }]); + }); + + test.concurrent('insert $returningId: $default as primary key', async ({ db }) => { + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { + customId: 'dyqs529eom0iczo2efxzbcut', + }]); + }); + + test.concurrent('insert $returningId: $default as primary key with value', async ({ db }) => { + const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; + let iterator = 0; + + const usersTableDefFn = singlestoreTable('users_default_fn', { + customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { + const value = uniqueKeys[iterator]!; + iterator++; + return value; + }), + name: text('name').notNull(), + }); + + await setupReturningFunctionsTest(db); + + const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) + // ^? + .$returningId(); + + expectTypeOf(result).toEqualTypeOf<{ + customId: string; + }[]>(); + + expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); + }); + + // TODO: Unkip this test when views are supported + /* test.skip('mySchema :: view', async ({ db }) => { + + + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); + }); */ + + test.concurrent('limit 0', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(0); + + expect(users).toEqual([]); + }); + + test.concurrent('limit -1', async ({ db }) => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(-1); + + expect(users.length).toBeGreaterThan(0); + }); + + test.concurrent('sql operator as cte', async ({ db }) => { + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); + + test.concurrent('cross join', async ({ db }) => { + await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + await db + .insert(citiesTable) + .values([ + { name: 'Seattle' }, + { name: 'New York City' }, + ]); + + const result = await db + .select({ + user: usersTable.name, + city: citiesTable.name, + }) + .from(usersTable) + .crossJoin(citiesTable) + .orderBy(usersTable.name, citiesTable.name); + + expect(result).toStrictEqual([ + { city: 'New York City', user: 'Jane' }, + { city: 'Seattle', user: 'Jane' }, + { city: 'New York City', user: 'John' }, + { city: 'Seattle', user: 'John' }, + ]); + }); + + test.concurrent('left join (lateral)', async ({ db }) => { + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(eq(users2Table.cityId, citiesTable.id)) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .leftJoinLateral(sq, sql`true`) + .orderBy(citiesTable.id); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + { cityId: 2, cityName: 'London', userId: null, userName: null }, + ]); + }); + + test.concurrent('inner join (lateral)', async ({ db }) => { + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(eq(users2Table.cityId, citiesTable.id)) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .innerJoinLateral(sq, sql`true`); + + expect(res).toStrictEqual([ + { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, + ]); + }); + + test.concurrent('cross join (lateral)', async ({ db }) => { + await db + .insert(citiesTable) + .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { + name: 'Patrick', + cityId: 2, + }]); + + const sq = db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: users2Table.cityId, + }) + .from(users2Table) + .where(eq(users2Table.cityId, citiesTable.id)) + .as('sq'); + + const res = await db + .select({ + cityId: citiesTable.id, + cityName: citiesTable.name, + userId: sq.userId, + userName: sq.userName, + }) + .from(citiesTable) + .crossJoinLateral(sq) + .orderBy(sq.userId, citiesTable.id); + + expect(res).toStrictEqual([ + { + cityId: 1, + cityName: 'Paris', + userId: 1, + userName: 'John', + }, + { + cityId: 2, + cityName: 'London', + userId: 2, + userName: 'Jane', + }, + { + cityId: 2, + cityName: 'London', + userId: 3, + userName: 'Patrick', + }, + ]); + }); + + test.concurrent('column.as', async ({ db, push }) => { + const users = singlestoreTable('users_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const cities = singlestoreTable('cities_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ users, cities }); + + try { + await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]); + + await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + } finally { + await db.execute(sql`DROP TABLE ${users}`).catch(() => null); + await db.execute(sql`DROP TABLE ${cities}`).catch(() => null); + } + }); + + test.concurrent('select from a many subquery', async ({ db, push }) => { + const users2Table = singlestoreTable('users_many_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const citiesTable = singlestoreTable('cities_many_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + population: db.select({ count: count().as('count') }).from(users2Table).where( + eq(users2Table.cityId, citiesTable.id), + ).as( + 'population', + ), + name: citiesTable.name, + }).from(citiesTable); + + expectTypeOf(res).toEqualTypeOf< + { + population: number; + name: string; + }[] + >(); + + expect(res).toStrictEqual(expect.arrayContaining([{ + population: 1, + name: 'Paris', + }, { + population: 2, + name: 'London', + }])); + }); + + test.concurrent('select from a one subquery', async ({ db, push }) => { + const users2Table = singlestoreTable('users_one_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: int('city_id'), + }); + + const citiesTable = singlestoreTable('cities_one_subquery', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await push({ citiesTable, users2Table }); + + await db.insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 2 }, + ]); + + const res = await db.select({ + cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) + .as( + 'cityName', + ), + name: users2Table.name, + }).from(users2Table); + + expectTypeOf(res).toEqualTypeOf< + { + cityName: string; + name: string; + }[] + >(); + + expect(res).toStrictEqual(expect.arrayContaining([{ + cityName: 'Paris', + name: 'John', + }, { + cityName: 'London', + name: 'Jane', + }, { + cityName: 'London', + name: 'Jack', + }])); + }); + + test.concurrent('test $onUpdateFn and $onUpdate works with sql value', async ({ db, push }) => { + const users = singlestoreTable('users_on_update_sql', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updatedAt: timestamp('updated_at') + .notNull() + .$onUpdate(() => sql`current_timestamp`), + }); + + await push({ users }); + + await db.insert(users).values({ + name: 'John', + }); + const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); + await new Promise((resolve) => setTimeout(resolve, 1000)); + + const now = Date.now(); + await new Promise((resolve) => setTimeout(resolve, 1000)); + await db.update(users).set({ + name: 'John', + }); + const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); + + expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); + expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); + }); + + test.concurrent('all types', async ({ db }) => { + await db.execute(sql`drop table if exists ${allTypesTable};`); + await db.execute(sql` + CREATE TABLE \`all_types\` ( + \`scol\` serial, + \`bigint53\` bigint, + \`bigint64\` bigint, + \`bigint_string\` bigint, + \`binary\` binary, + \`boolean\` boolean, + \`char\` char, + \`date\` date, + \`date_str\` date, + \`datetime\` datetime, + \`datetime_str\` datetime, + \`decimal\` decimal, + \`decimal_num\` decimal(30), + \`decimal_big\` decimal(30), + \`double\` double, + \`float\` float, + \`int\` int, + \`json\` json, + \`med_int\` mediumint, + \`small_int\` smallint, + \`real\` real, + \`text\` text, + \`time\` time, + \`timestamp\` timestamp, + \`timestamp_str\` timestamp, + \`tiny_int\` tinyint, + \`varbin\` varbinary(16), + \`varchar\` varchar(255), + \`year\` year, + \`enum\` enum('enV1','enV2'), + \`vec_i8\` vector(5, I8), + \`vec_i16\` vector(5, I16), + \`vec_i32\` vector(5, I32), + \`vec_i64\` vector(5, I64), + \`vec_f32\` vector(5, F32), + \`vec_f64\` vector(5, F64), + shard key(\`scol\`) + ); + `); + + await db.insert(allTypesTable).values({ + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date(1741743161623), + dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + datetime: new Date(1741743161623), + datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + enum: 'enV1', + float: 1.048596, + real: 1.048596, + text: 'C4-', + int: 621, + json: { + str: 'strval', + arr: ['str', 10], + }, + medInt: 560, + smallInt: 14, + time: '04:13:22', + timestamp: new Date(1741743161623), + timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + vectorF32: [0.735482, -0.291647, 1.183529, -2.406378, 0.014263], + vectorF64: [ + 0.3918573842719283, + -1.682530118745203, + 2.014963587205109, + -0.005832741903218165, + 0.7841029456712038, + ], + vectorI8: [-2, 8, 127, 85, -128], + vectorI16: [-2, 8, 127, 85, -128], + vectorI32: [15342, -27894, 6271, -10385, 31056], + vectorI64: [ + 4829301283746501823n, + -7203847501293847201n, + 1623847561928374650n, + -5938475628374651983n, + 803745610293847561n, + ], + }); + + const rawRes = await db.select().from(allTypesTable); + + type ExpectedType = { + serial: number; + bigint53: number | null; + bigint64: bigint | null; + bigintString: string | null; + binary: string | null; + boolean: boolean | null; + char: string | null; + date: Date | null; + dateStr: string | null; + datetime: Date | null; + datetimeStr: string | null; + decimal: string | null; + decimalNum: number | null; + decimalBig: bigint | null; + double: number | null; + float: number | null; + int: number | null; + json: unknown; + medInt: number | null; + smallInt: number | null; + real: number | null; + text: string | null; + time: string | null; + timestamp: Date | null; + timestampStr: string | null; + tinyInt: number | null; + varbin: string | null; + varchar: string | null; + year: number | null; + enum: 'enV1' | 'enV2' | null; + vectorI8: number[] | null; + vectorI16: number[] | null; + vectorI32: number[] | null; + vectorI64: bigint[] | null; + vectorF32: number[] | null; + vectorF64: number[] | null; + }[]; + + const expectedRes: ExpectedType = [ + { + serial: 1, + bigint53: 9007199254740991, + bigint64: 5044565289845416380n, + bigintString: '5044565289845416380', + binary: '1', + boolean: true, + char: 'c', + date: new Date('2025-03-12T00:00:00.000Z'), + dateStr: '2025-03-12', + datetime: new Date('2025-03-12T01:32:41.000Z'), + datetimeStr: '2025-03-12 01:32:41', + decimal: '47521', + decimalNum: 9007199254740991, + decimalBig: 5044565289845416380n, + double: 15.35325689124218, + float: 1.0486, + int: 621, + json: { arr: ['str', 10], str: 'strval' }, + medInt: 560, + smallInt: 14, + real: 1.048596, + text: 'C4-', + time: '04:13:22', + timestamp: new Date('2025-03-12T01:32:41.000Z'), + timestampStr: '2025-03-12 01:32:41', + tinyInt: 7, + varbin: '1010110101001101', + varchar: 'VCHAR', + year: 2025, + enum: 'enV1', + vectorF32: [...new Float32Array([0.735482, -0.291647, 1.183529, -2.406378, 0.014263])], + vectorF64: [ + 0.3918573842719283, + -1.682530118745203, + 2.014963587205109, + -0.005832741903218165, + 0.7841029456712038, + ], + vectorI8: [-2, 8, 127, 85, -128], + vectorI16: [-2, 8, 127, 85, -128], + vectorI32: [15342, -27894, 6271, -10385, 31056], + vectorI64: [ + 4829301283746501823n, + -7203847501293847201n, + 1623847561928374650n, + -5938475628374651983n, + 803745610293847561n, + ], + }, + ]; + + expectTypeOf(rawRes).toEqualTypeOf(); + expect(rawRes).toStrictEqual(expectedRes); + }); + }); +} diff --git a/integration-tests/tests/singlestore/common-cache.ts b/integration-tests/tests/singlestore/common-cache.ts new file mode 100644 index 0000000000..7f69407ce5 --- /dev/null +++ b/integration-tests/tests/singlestore/common-cache.ts @@ -0,0 +1,284 @@ +import { eq, sql } from 'drizzle-orm'; +import { alias, boolean, int, json, serial, singlestoreTable, text, timestamp } from 'drizzle-orm/singlestore-core'; +import { describe, expect, vi } from 'vitest'; +import type { Test } from './instrumentation'; + +const usersTable = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: json('jsonb').$type(), + createdAt: timestamp('created_at').notNull().defaultNow(), +}); + +const postsTable = singlestoreTable('posts', { + id: serial().primaryKey(), + description: text().notNull(), + userId: int('city_id'), +}); + +export function tests(test: Test) { + describe('common_cache', () => { + test.beforeEach(async ({ caches, push }) => { + const { explicit, all } = caches; + await Promise.all([ + explicit.execute(sql`drop table if exists users`), + explicit.execute(sql`drop table if exists posts`), + ]); + await explicit.$cache?.invalidate({ tables: 'users' }); + await all.$cache?.invalidate({ tables: 'users' }); + // public users + await Promise.all([ + push({ usersTable }), + push({ postsTable }), + ]); + }); + + test.concurrent('test force invalidate', async ({ caches }) => { + const { explicit: db } = caches; + + using spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); + await db.$cache?.invalidate({ tables: 'users' }); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test.concurrent('default global config - no cache should be hit', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test.concurrent('default global config + enable cache on select: get, put', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test.concurrent('default global config + enable cache on select + write: get, put, onMutate', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + spyPut.mockClear(); + spyGet.mockClear(); + spyInvalidate.mockClear(); + + await db.insert(usersTable).values({ name: 'John' }); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test.concurrent('default global config + enable cache on select + disable invalidate: get, put', async ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + await db.insert(usersTable).values({ name: 'John' }); + + // invalidate force + await db.$cache?.invalidate({ tags: ['custom'] }); + }); + + test.concurrent('global: true + disable cache', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(false); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test.concurrent('global: true - cache should be hit', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test.concurrent('global: true - cache: false on select - no cache hit', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache(false); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + }); + + test.concurrent('global: true - disable invalidate - cache hit + no invalidate', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ autoInvalidate: false }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + spyPut.mockClear(); + spyGet.mockClear(); + spyInvalidate.mockClear(); + + await db.insert(usersTable).values({ name: 'John' }); + + expect(spyPut).toHaveBeenCalledTimes(0); + expect(spyGet).toHaveBeenCalledTimes(0); + expect(spyInvalidate).toHaveBeenCalledTimes(1); + }); + + test.concurrent('global: true - with custom tag', async ({ caches }) => { + const { all: db } = caches; + + // @ts-expect-error + using spyPut = vi.spyOn(db.$cache, 'put'); + // @ts-expect-error + using spyGet = vi.spyOn(db.$cache, 'get'); + // @ts-expect-error + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + + await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); + + expect(spyPut).toHaveBeenCalledTimes(1); + expect(spyGet).toHaveBeenCalledTimes(1); + expect(spyInvalidate).toHaveBeenCalledTimes(0); + + await db.insert(usersTable).values({ name: 'John' }); + + // invalidate force + await db.$cache?.invalidate({ tags: ['custom'] }); + }); + + // check select used tables + test.concurrent('check simple select used tables', ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); + // @ts-expect-error + expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); + }); + // check select+join used tables + test.concurrent('select+join', ({ caches }) => { + const { explicit: db } = caches; + + // @ts-expect-error + expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) + .toStrictEqual(['users', 'posts']); + expect( + // @ts-expect-error + db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), + ).toStrictEqual(['users', 'posts']); + }); + // check select+2join used tables + test.concurrent('select+2joins', ({ caches }) => { + const { explicit: db } = caches; + + expect( + db.select().from(usersTable).leftJoin( + postsTable, + eq(usersTable.id, postsTable.userId), + ).leftJoin( + alias(postsTable, 'post2'), + eq(usersTable.id, postsTable.userId), + ) + // @ts-expect-error + .getUsedTables(), + ) + .toStrictEqual(['users', 'posts']); + expect( + db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( + alias(postsTable, 'post2'), + eq(usersTable.id, postsTable.userId), + // @ts-expect-error + ).getUsedTables(), + ).toStrictEqual(['users', 'posts']); + }); + // select subquery used tables + test.concurrent('select+join', ({ caches }) => { + const { explicit: db } = caches; + + const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); + db.select().from(sq); + + // @ts-expect-error + expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); + }); + }); +} diff --git a/integration-tests/tests/singlestore/common-rqb.ts b/integration-tests/tests/singlestore/common-rqb.ts new file mode 100644 index 0000000000..06d4f98c3c --- /dev/null +++ b/integration-tests/tests/singlestore/common-rqb.ts @@ -0,0 +1,552 @@ +/* eslint-disable @typescript-eslint/no-unused-vars */ +import 'dotenv/config'; +import { sql } from 'drizzle-orm'; +import { describe, expect } from 'vitest'; +import type { Test } from './instrumentation'; +import { rqbPost, rqbUser } from './schema'; + +export function tests(test: Test) { + const connDict: Record = {}; + + describe('common', () => { + test.beforeEach(async ({ db, client, push }) => { + const connKey = `${client.config.user}:${client.config.password}@${client.config.host}:${client.config.port}`; + if (connDict[connKey] === undefined) { + connDict[connKey] = false; + + await Promise.all([ + db.execute(sql`drop table if exists ${rqbUser};`), + db.execute(sql`drop table if exists ${rqbPost};`), + ]); + + await Promise.all([ + push({ rqbUser }), + push({ rqbPost }), + ]); + } + + await Promise.all([ + db.execute(sql`truncate table ${rqbUser};`), + db.execute(sql`truncate table ${rqbPost};`), + ]); + }); + + test.concurrent('RQB v2 simple find first - no rows', async ({ db }) => { + const result = await db.query.rqbUser.findFirst(); + + expect(result).toStrictEqual(undefined); + }); + + test.concurrent('RQB v2 simple find first - multiple rows', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.rqbUser.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test.concurrent('RQB v2 simple find first - with relation', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.rqbUser.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: date, + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }], + }); + }); + + test.concurrent('RQB v2 simple find first - placeholders', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.rqbUser.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + + test.concurrent('RQB v2 simple find many - no rows', async ({ db }) => { + const result = await db.query.rqbUser.findMany(); + + expect(result).toStrictEqual([]); + }); + + test.concurrent('RQB v2 simple find many - multiple rows', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const result = await db.query.rqbUser.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }, { + id: 1, + createdAt: date, + name: 'First', + }]); + }); + + test.concurrent('RQB v2 simple find many - with relation', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + const result = await db.query.rqbPost.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }]); + }); + + test.concurrent('RQB v2 simple find many - placeholders', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + const query = db.query.rqbUser.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }]); + }); + + test.concurrent('RQB v2 transaction find first - no rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst(); + + expect(result).toStrictEqual(undefined); + }); + }); + + test.concurrent('RQB v2 transaction find first - multiple rows', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + }); + + test.concurrent('RQB v2 transaction find first - with relation', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findFirst({ + with: { + posts: { + orderBy: { + id: 'asc', + }, + }, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual({ + id: 1, + createdAt: date, + name: 'First', + posts: [{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }], + }); + }); + }); + + test.concurrent('RQB v2 transaction find first - placeholders', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const query = db.query.rqbUser.findFirst({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual({ + id: 2, + createdAt: date, + name: 'Second', + }); + }); + }); + + test.concurrent('RQB v2 transaction find many - no rows', async ({ db }) => { + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findMany(); + + expect(result).toStrictEqual([]); + }); + }); + + test.concurrent('RQB v2 transaction find many - multiple rows', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const result = await db.query.rqbUser.findMany({ + orderBy: { + id: 'desc', + }, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }, { + id: 1, + createdAt: date, + name: 'First', + }]); + }); + }); + + test.concurrent('RQB v2 transaction find many - with relation', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.insert(rqbPost).values([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + }]); + + await db.transaction(async (db) => { + const result = await db.query.rqbPost.findMany({ + with: { + author: true, + }, + orderBy: { + id: 'asc', + }, + }); + + expect(result).toStrictEqual([{ + id: 1, + userId: 1, + createdAt: date, + content: null, + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }, { + id: 2, + userId: 1, + createdAt: date, + content: 'Has message this time', + author: { + id: 1, + createdAt: date, + name: 'First', + }, + }]); + }); + }); + + test.concurrent('RQB v2 transaction find many - placeholders', async ({ db }) => { + const date = new Date(120000); + + await db.insert(rqbUser).values([{ + id: 1, + createdAt: date, + name: 'First', + }, { + id: 2, + createdAt: date, + name: 'Second', + }]); + + await db.transaction(async (db) => { + const query = db.query.rqbUser.findMany({ + where: { + id: { + eq: sql.placeholder('filter'), + }, + }, + orderBy: { + id: 'asc', + }, + }).prepare(); + + const result = await query.execute({ + filter: 2, + }); + + expect(result).toStrictEqual([{ + id: 2, + createdAt: date, + name: 'Second', + }]); + }); + }); + }); +} diff --git a/integration-tests/tests/singlestore/common.ts b/integration-tests/tests/singlestore/common.ts new file mode 100644 index 0000000000..b0a2ca713f --- /dev/null +++ b/integration-tests/tests/singlestore/common.ts @@ -0,0 +1,14 @@ +import { tests as tests1 } from './common-1'; +import { tests as tests2 } from './common-2'; +import { tests as tests3 } from './common-rqb'; +import type { Test } from './instrumentation'; + +export const tests = (test: Test, exclude: string[] = []) => { + test.beforeEach(({ task, skip }) => { + if (exclude.includes(task.name)) skip(); + }); + + tests1(test); + tests2(test); + tests3(test); +}; diff --git a/integration-tests/tests/singlestore/instrumentation.ts b/integration-tests/tests/singlestore/instrumentation.ts new file mode 100644 index 0000000000..4abca92d67 --- /dev/null +++ b/integration-tests/tests/singlestore/instrumentation.ts @@ -0,0 +1,467 @@ +import { + type AnyRelationsBuilderConfig, + defineRelations, + type ExtractTablesFromSchema, + type ExtractTablesWithRelations, + getTableName, + is, + type RelationsBuilder, + type RelationsBuilderConfig, + Table, +} from 'drizzle-orm'; +import { Cache, type MutationOption } from 'drizzle-orm/cache/core'; +import type { CacheConfig } from 'drizzle-orm/cache/core/types'; +import { drizzle as drizzleSingleStore, type SingleStoreDatabase } from 'drizzle-orm/singlestore'; +import type { SingleStoreEnumColumn, SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; +import type { SingleStoreView } from 'drizzle-orm/singlestore-core/view'; +import { drizzle as drizzleProxy } from 'drizzle-orm/singlestore-proxy'; +import Keyv from 'keyv'; +import { type Connection, createConnection } from 'mysql2/promise'; +import { test as base } from 'vitest'; +import relations from './relations'; + +// oxlint-disable-next-line drizzle-internal/require-entity-kind +export class TestCache extends Cache { + private globalTtl: number = 1000; + private usedTablesPerKey: Record = {}; + + constructor(private readonly strat: 'explicit' | 'all', private kv: Keyv = new Keyv()) { + super(); + } + + override strategy() { + return this.strat; + } + + override async get(key: string, _tables: string[], _isTag: boolean): Promise { + const res = await this.kv.get(key) ?? undefined; + return res; + } + + override async put( + key: string, + response: any, + tables: string[], + isTag: boolean, + config?: CacheConfig, + ): Promise { + await this.kv.set(key, response, config ? config.ex : this.globalTtl); + for (const table of tables) { + const keys = this.usedTablesPerKey[table]; + if (keys === undefined) { + this.usedTablesPerKey[table] = [key]; + } else { + keys.push(key); + } + } + } + + override async onMutate(params: MutationOption): Promise { + const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; + const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; + + const keysToDelete = new Set(); + + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + const keys = this.usedTablesPerKey[tableName] ?? []; + for (const key of keys) keysToDelete.add(key); + } + + if (keysToDelete.size > 0 || tagsArray.length > 0) { + for (const tag of tagsArray) { + await this.kv.delete(tag); + } + + for (const key of keysToDelete) { + await this.kv.delete(key); + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + this.usedTablesPerKey[tableName] = []; + } + } + } + } +} + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: Connection) {} + + async query(sql: string, params: any[], method: 'all' | 'execute') { + if (method === 'all') { + try { + const result = await this.db.query({ + sql, + values: params, + rowsAsArray: true, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result[0] as any }; + } catch (e: any) { + return { error: e }; + } + } else if (method === 'execute') { + try { + const result = await this.db.query({ + sql, + values: params, + typeCast: function(field: any, next: any) { + if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { + return field.string(); + } + return next(); + }, + }); + + return { data: result as any }; + } catch (e: any) { + return { error: e }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + async migrations(queries: string[]) { + await this.db.query('START TRANSACTION'); + try { + for (const query of queries) { + await this.db.query(query); + } + await this.db.query('COMMIT'); + } catch (e) { + await this.db.query('ROLLBACK'); + throw e; + } + + return {}; + } +} + +export const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/singlestore/mocks' as string); + + const res = await diff({}, schema, []); + + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +export const prepareSingleStoreClient = async (uri: string) => { + const client = await createConnection({ + uri, + supportBigNumbers: true, + multipleStatements: true, + }); + await client.connect(); + + await Promise.all([ + client.query('drop database if exists "mySchema";'), + client.query('drop database if exists drizzle;'), + ]); + + await Promise.all([ + client.query('create database "mySchema";'), + client.query('create database drizzle'), + ]); + + await client.changeUser({ database: 'drizzle' }); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res[0] as any[]; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => client.query(x)), + ).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +export const prepareProxy = async (uri: string) => { + const client = await createConnection({ + uri, + supportBigNumbers: true, + multipleStatements: true, + }); + await client.connect(); + + await Promise.all([ + client.query('drop database if exists "mySchema";'), + client.query('drop database if exists drizzle;'), + ]); + + await Promise.all([ + client.query('create database "mySchema";'), + client.query('create database drizzle'), + ]); + + await client.changeUser({ database: 'drizzle' }); + + const query = async (sql: string, params: any[] = []) => { + const res = await client.query(sql, params); + return res[0] as any[]; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => client.query(x)), + ).then((results) => [results] as any); + }; + + return { client, query, batch }; +}; + +const providerClosure = async (items: T[]) => { + return async () => { + while (true) { + const c = items.shift(); + if (!c) { + await new Promise((resolve) => setTimeout(resolve, 50)); + continue; + } + return { + ...c, + release: () => { + items.push(c); + }, + }; + } + }; +}; + +export const providerForSingleStore = async () => { + const url = process.env['SINGLESTORE_MANY_CONNECTION_STRING']; + if (url === undefined) throw new Error('SINGLESTORE_CONNECTION_STRING is not set.'); + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareSingleStoreClient(urlI))); + + return providerClosure(clients); +}; + +export const provideForProxy = async () => { + const url = process.env['SINGLESTORE_MANY_CONNECTION_STRING']; + if (url === undefined) throw new Error('SINGLESTORE_CONNECTION_STRING is not set.'); + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareSingleStoreClient(urlI))); + + return providerClosure(clients); +}; + +type ProviderSingleStore = Awaited>; +type ProvideForProxy = Awaited>; + +type Provider = + | ProviderSingleStore + | ProvideForProxy; + +export type SingleStoreSchema_ = Record< + string, + | SingleStoreTable + | SingleStoreEnumColumn + | SingleStoreSchema + | SingleStoreView + | unknown +>; + +const testFor = (vendor: 'singlestore' | 'proxy') => { + return base.extend<{ + provider: Provider; + kit: { + client: any; + query: (sql: string, params?: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + client: Connection; + db: SingleStoreDatabase; + push: (schema: any) => Promise; + createDB: { + ( + schema: S, + ): SingleStoreDatabase>>; + ( + schema: S, + cb: (helpers: RelationsBuilder>) => TConfig, + ): SingleStoreDatabase>>; + }; + caches: { + all: SingleStoreDatabase; + explicit: SingleStoreDatabase; + }; + }>({ + provider: [ + // oxlint-disable-next-line no-empty-pattern + async ({}, use) => { + const provider = vendor === 'singlestore' + ? await providerForSingleStore() + : vendor === 'proxy' + ? await provideForProxy() + : '' as never; + + await use(provider); + }, + { scope: 'file' }, + ], + kit: [ + async ({ provider }, use) => { + const { client, batch, query, release } = await provider(); + await use({ client: client, query, batch }); + release(); + }, + { scope: 'test' }, + ], + client: [ + async ({ kit }, use) => { + await use(kit.client); + }, + { scope: 'test' }, + ], + db: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from singlestore proxy server:', e.message); + throw e; + } + }; + await use(drizzleProxy(proxyHandler, { relations })); + return; + } + + const db = vendor === 'singlestore' + ? drizzleSingleStore({ client: kit.client as any, relations }) + : '' as never; + + await use(db); + }, + { scope: 'test' }, + ], + push: [ + async ({ kit }, use) => { + const push = ( + schema: any, + ) => _push(kit.query, schema); + + await use(push); + }, + { scope: 'test' }, + ], + createDB: [ + async ({ kit }, use) => { + const createDB = ( + schema: S, + cb?: ( + helpers: RelationsBuilder>, + ) => RelationsBuilderConfig>, + ) => { + const relations = cb ? defineRelations(schema, cb) : defineRelations(schema); + + if (vendor === 'singlestore') return drizzleSingleStore({ client: kit.client, relations }); + + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from singlestore proxy server:', e.message); + throw e; + } + }; + return drizzleProxy(proxyHandler, { relations }); + } + throw new Error(); + }; + + await use(createDB); + }, + { scope: 'test' }, + ], + caches: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from singlestore proxy server:', e.message); + throw e; + } + }; + const db1 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('all') }); + const db2 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('explicit') }); + await use({ all: db1, explicit: db2 }); + return; + } + + const config1 = { client: kit.client as any, relations, cache: new TestCache('all') }; + const config2 = { client: kit.client as any, relations, cache: new TestCache('explicit') }; + + const db1 = vendor === 'singlestore' + ? drizzleSingleStore(config1) + : '' as never; + + const db2 = vendor === 'singlestore' + ? drizzleSingleStore(config2) + : '' as never; + + await use({ all: db1, explicit: db2 }); + }, + { scope: 'test' }, + ], + }); +}; + +export const singleStoreTest = testFor('singlestore'); +export const proxyTest = testFor('proxy').extend<{ simulator: ServerSimulator }>({ + simulator: [ + async ({ client }, use) => { + const simulator = new ServerSimulator(client); + await use(simulator); + }, + { scope: 'test' }, + ], +}); + +export type Test = ReturnType; diff --git a/integration-tests/tests/replicas/singlestore.test.ts b/integration-tests/tests/singlestore/replicas.test.ts similarity index 100% rename from integration-tests/tests/replicas/singlestore.test.ts rename to integration-tests/tests/singlestore/replicas.test.ts diff --git a/integration-tests/tests/singlestore/schema.ts b/integration-tests/tests/singlestore/schema.ts index 44dbf1a74c..fb475c0761 100644 --- a/integration-tests/tests/singlestore/schema.ts +++ b/integration-tests/tests/singlestore/schema.ts @@ -12,6 +12,7 @@ export const rqbPost = singlestoreTable('post_rqb_test', { id: serial().primaryKey().notNull(), userId: bigint('user_id', { mode: 'number', + unsigned: true, }).notNull(), content: text(), createdAt: timestamp('created_at', { diff --git a/integration-tests/tests/singlestore/singlestore-cache.ts b/integration-tests/tests/singlestore/singlestore-cache.ts deleted file mode 100644 index 992849aa8a..0000000000 --- a/integration-tests/tests/singlestore/singlestore-cache.ts +++ /dev/null @@ -1,390 +0,0 @@ -import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; -import type { MutationOption } from 'drizzle-orm/cache/core'; -import { Cache } from 'drizzle-orm/cache/core'; -import type { CacheConfig } from 'drizzle-orm/cache/core/types'; -import { - alias, - boolean, - int, - json, - serial, - type SingleStoreDatabase, - singlestoreTable, - text, - timestamp, -} from 'drizzle-orm/singlestore-core'; -import Keyv from 'keyv'; -import { beforeEach, describe, expect, test, vi } from 'vitest'; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestGlobalCache extends Cache { - private globalTtl: number = 1000; - private usedTablesPerKey: Record = {}; - - constructor(private kv: Keyv = new Keyv()) { - super(); - } - - override strategy(): 'explicit' | 'all' { - return 'all'; - } - override async get(key: string, _tables: string[], _isTag: boolean): Promise { - const res = await this.kv.get(key) ?? undefined; - return res; - } - override async put( - key: string, - response: any, - tables: string[], - isTag: boolean, - config?: CacheConfig, - ): Promise { - await this.kv.set(key, response, config ? config.ex : this.globalTtl); - for (const table of tables) { - const keys = this.usedTablesPerKey[table]; - if (keys === undefined) { - this.usedTablesPerKey[table] = [key]; - } else { - keys.push(key); - } - } - } - override async onMutate(params: MutationOption): Promise { - const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; - const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; - - const keysToDelete = new Set(); - - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - const keys = this.usedTablesPerKey[tableName] ?? []; - for (const key of keys) keysToDelete.add(key); - } - - if (keysToDelete.size > 0 || tagsArray.length > 0) { - for (const tag of tagsArray) { - await this.kv.delete(tag); - } - - for (const key of keysToDelete) { - await this.kv.delete(key); - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - this.usedTablesPerKey[tableName] = []; - } - } - } - } -} - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestCache extends TestGlobalCache { - override strategy(): 'explicit' | 'all' { - return 'explicit'; - } -} - -type TestSingleStoreDB = SingleStoreDatabase; - -declare module 'vitest' { - interface TestContext { - cachedSingleStore: { - db: TestSingleStoreDB; - dbGlobalCached: TestSingleStoreDB; - }; - } -} - -const usersTable = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); - -const postsTable = singlestoreTable('posts', { - id: serial().primaryKey(), - description: text().notNull(), - userId: int('city_id'), -}); - -export function tests() { - describe('common_cache', () => { - beforeEach(async (ctx) => { - const { db, dbGlobalCached } = ctx.cachedSingleStore; - await db.execute(sql`drop table if exists users`); - await db.execute(sql`drop table if exists posts`); - await db.$cache?.invalidate({ tables: 'users' }); - await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); - // public users - await db.execute( - sql` - create table users ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb json, - created_at timestamp not null default now() - ) - `, - ); - await db.execute( - sql` - create table posts ( - id serial primary key, - description text not null, - user_id int - ) - `, - ); - }); - - test('test force invalidate', async (ctx) => { - const { db } = ctx.cachedSingleStore; - - const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); - await db.$cache?.invalidate({ tables: 'users' }); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('default global config - no cache should be hit', async (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('default global config + enable cache on select: get, put', async (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); - - await db.insert(usersTable).values({ name: 'John' }); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - await db.insert(usersTable).values({ name: 'John' }); - - // invalidate force - await db.$cache?.invalidate({ tags: ['custom'] }); - }); - - test('global: true + disable cache', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(false); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - cache should be hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - cache: false on select - no cache hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache(false); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - }); - - test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ autoInvalidate: false }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - spyPut.mockClear(); - spyGet.mockClear(); - spyInvalidate.mockClear(); - - await db.insert(usersTable).values({ name: 'John' }); - - expect(spyPut).toHaveBeenCalledTimes(0); - expect(spyGet).toHaveBeenCalledTimes(0); - expect(spyInvalidate).toHaveBeenCalledTimes(1); - }); - - test('global: true - with custom tag', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSingleStore; - - // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); - // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); - // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); - - await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); - - expect(spyPut).toHaveBeenCalledTimes(1); - expect(spyGet).toHaveBeenCalledTimes(1); - expect(spyInvalidate).toHaveBeenCalledTimes(0); - - await db.insert(usersTable).values({ name: 'John' }); - - // invalidate force - await db.$cache?.invalidate({ tags: ['custom'] }); - }); - - // check select used tables - test('check simple select used tables', (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); - // @ts-expect-error - expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); - }); - // check select+join used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedSingleStore; - - // @ts-expect-error - expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) - .toStrictEqual(['users', 'posts']); - expect( - // @ts-expect-error - db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), - ).toStrictEqual(['users', 'posts']); - }); - // check select+2join used tables - test('select+2joins', (ctx) => { - const { db } = ctx.cachedSingleStore; - - expect( - db.select().from(usersTable).leftJoin( - postsTable, - eq(usersTable.id, postsTable.userId), - ).leftJoin( - alias(postsTable, 'post2'), - eq(usersTable.id, postsTable.userId), - ) - // @ts-expect-error - .getUsedTables(), - ) - .toStrictEqual(['users', 'posts']); - expect( - db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( - alias(postsTable, 'post2'), - eq(usersTable.id, postsTable.userId), - // @ts-expect-error - ).getUsedTables(), - ).toStrictEqual(['users', 'posts']); - }); - // select subquery used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedSingleStore; - - const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); - db.select().from(sq); - - // @ts-expect-error - expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); - }); - }); -} diff --git a/integration-tests/tests/singlestore/singlestore-common.ts b/integration-tests/tests/singlestore/singlestore-common.ts deleted file mode 100644 index b3524c7250..0000000000 --- a/integration-tests/tests/singlestore/singlestore-common.ts +++ /dev/null @@ -1,4691 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ -import 'dotenv/config'; -import Docker from 'dockerode'; -import { - and, - asc, - avg, - avgDistinct, - count, - countDistinct, - eq, - exists, - getTableColumns, - gt, - gte, - inArray, - lt, - max, - min, - Name, - notInArray, - placeholder, - sql, - sum, - sumDistinct, - TransactionRollbackError, -} from 'drizzle-orm'; -import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; -import { - alias, - bigint, - binary, - boolean, - char, - date, - datetime, - decimal, - double, - except, - float, - getTableConfig, - index, - int, - intersect, - json, - mediumint, - primaryKey, - real, - serial, - singlestoreEnum, - singlestoreSchema, - singlestoreTable, - singlestoreTableCreator, - /* singlestoreView, */ - smallint, - text, - time, - timestamp, - tinyint, - union, - unionAll, - unique, - uniqueIndex, - uniqueKeyName, - varbinary, - varchar, - vector, - year, -} from 'drizzle-orm/singlestore-core'; -import { dotProduct, euclideanDistance } from 'drizzle-orm/singlestore-core/expressions'; -import { migrate } from 'drizzle-orm/singlestore/migrator'; -import getPort from 'get-port'; -import { v4 as uuid } from 'uuid'; -import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; -import { Expect, toLocalDate } from '~/utils.ts'; -import type { Equal } from '~/utils.ts'; -import type relations from './relations'; -import { rqbPost, rqbUser } from './schema'; - -type TestSingleStoreDB = SingleStoreDatabase; - -declare module 'vitest' { - interface TestContext { - singlestore: { - db: TestSingleStoreDB; - }; - } -} - -const ENABLE_LOGGING = false; - -const allTypesTable = singlestoreTable('all_types', { - serial: serial('scol'), - bigint53: bigint('bigint53', { - mode: 'number', - }), - bigint64: bigint('bigint64', { - mode: 'bigint', - }), - bigintString: bigint('bigint_string', { - mode: 'string', - }), - binary: binary('binary'), - boolean: boolean('boolean'), - char: char('char'), - date: date('date', { - mode: 'date', - }), - dateStr: date('date_str', { - mode: 'string', - }), - datetime: datetime('datetime', { - mode: 'date', - }), - datetimeStr: datetime('datetime_str', { - mode: 'string', - }), - decimal: decimal('decimal'), - decimalNum: decimal('decimal_num', { - scale: 30, - mode: 'number', - }), - decimalBig: decimal('decimal_big', { - scale: 30, - mode: 'bigint', - }), - double: double('double'), - float: float('float'), - int: int('int'), - json: json('json'), - medInt: mediumint('med_int'), - smallInt: smallint('small_int'), - real: real('real'), - text: text('text'), - time: time('time'), - timestamp: timestamp('timestamp', { - mode: 'date', - }), - timestampStr: timestamp('timestamp_str', { - mode: 'string', - }), - tinyInt: tinyint('tiny_int'), - varbin: varbinary('varbin', { - length: 16, - }), - varchar: varchar('varchar', { - length: 255, - }), - year: year('year'), - enum: singlestoreEnum('enum', ['enV1', 'enV2']), - vectorI8: vector('vec_i8', { - dimensions: 5, - elementType: 'I8', - }), - vectorI16: vector('vec_i16', { - dimensions: 5, - elementType: 'I16', - }), - vectorI32: vector('vec_i32', { - dimensions: 5, - elementType: 'I32', - }), - vectorI64: vector('vec_i64', { - dimensions: 5, - elementType: 'I64', - }), - vectorF32: vector('vec_f32', { - dimensions: 5, - elementType: 'F32', - }), - vectorF64: vector('vec_f64', { - dimensions: 5, - elementType: 'F64', - }), -}); - -const usersTable = singlestoreTable('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); - -const users2Table = singlestoreTable('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id'), -}); - -const citiesTable = singlestoreTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const usersOnUpdate = singlestoreTable('users_on_update', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), - updatedAt: datetime('updated_at', { mode: 'date' }).$onUpdateFn(() => new Date()), - alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value -}); - -const datesTable = singlestoreTable('datestable', { - date: date('date'), - dateAsString: date('date_as_string', { mode: 'string' }), - time: time('time'), - datetime: datetime('datetime'), - datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), - timestamp: timestamp('timestamp'), - timestampAsString: timestamp('timestamp_as_string', { mode: 'string' }), - year: year('year'), -}); - -const coursesTable = singlestoreTable('courses', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - categoryId: int('category_id'), -}); - -const courseCategoriesTable = singlestoreTable('course_categories', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -const orders = singlestoreTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull().$default(() => 'random_string'), - amount: int('amount').notNull(), - quantity: int('quantity').notNull(), -}); - -const usersMigratorTable = singlestoreTable('users12', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - email: text('email').notNull(), -}, (table) => { - return { - name: uniqueIndex('').on(table.name).using('btree'), - }; -}); - -// To test aggregate functions -const aggregateTable = singlestoreTable('aggregate_table', { - id: serial('id').notNull(), - name: text('name').notNull(), - a: int('a'), - b: int('b'), - c: int('c'), - nullOnly: int('null_only'), -}); - -const vectorSearchTable = singlestoreTable('vector_search', { - id: serial('id').notNull(), - text: text('text').notNull(), - embedding: vector('embedding', { dimensions: 10 }), -}); - -// To test another schema and multischema -const mySchema = singlestoreSchema(`mySchema`); - -const usersMySchemaTable = mySchema.table('userstest', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - verified: boolean('verified').notNull().default(false), - jsonb: json('jsonb').$type(), - createdAt: timestamp('created_at').notNull().defaultNow(), -}); - -const users2MySchemaTable = mySchema.table('users2', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id'), -}); - -const citiesMySchemaTable = mySchema.table('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), -}); - -let singlestoreContainer: Docker.Container; -export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { - const docker = new Docker(); - const port = await getPort({ port: 3306 }); - const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:0.2.67'; - - const pullStream = await docker.pull(image); - await new Promise((resolve, reject) => - docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) - ); - - singlestoreContainer = await docker.createContainer({ - Image: image, - Env: ['ROOT_PASSWORD=singlestore'], - name: `drizzle-integration-tests-${uuid()}`, - HostConfig: { - AutoRemove: true, - PortBindings: { - '3306/tcp': [{ HostPort: `${port}` }], - }, - }, - }); - - await singlestoreContainer.start(); - await new Promise((resolve) => setTimeout(resolve, 4000)); - - return { - connectionString: `singlestore://root:singlestore@localhost:${port}/`, - container: singlestoreContainer, - }; -} - -export function tests(driver?: string) { - describe('common', () => { - afterAll(async () => { - await singlestoreContainer?.stop().catch(console.error); - }); - - beforeEach(async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`drop table if exists userstest`); - await db.execute(sql`drop table if exists users2`); - await db.execute(sql`drop table if exists cities`); - await db.execute(sql`drop table if exists ${allTypesTable}`); - await db.execute(sql`DROP TABLE IF EXISTS ${rqbUser};`); - await db.execute(sql`DROP TABLE IF EXISTS ${rqbPost};`); - - await db.execute(sql`drop schema if exists \`mySchema\``); - await db.execute(sql`create schema if not exists \`mySchema\``); - - await db.execute( - sql` - create table userstest ( - id serial primary key, - name text not null, - verified boolean not null default false, - jsonb json, - created_at timestamp not null default now() - ) - `, - ); - - await db.execute( - sql` - create table users2 ( - id serial primary key, - name text not null, - city_id int - ) - `, - ); - - await db.execute( - sql` - create table cities ( - id serial primary key, - name text not null - ) - `, - ); - - // mySchema - await db.execute( - sql` - create table \`mySchema\`.\`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await db.execute( - sql` - create table \`mySchema\`.\`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`mySchema\`.\`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int - ) - `, - ); - - await db.execute(sql` - CREATE TABLE ${rqbUser} ( - \`id\` SERIAL PRIMARY KEY NOT NULL, - \`name\` TEXT NOT NULL, - \`created_at\` TIMESTAMP NOT NULL - ) - `); - - await db.execute(sql` - CREATE TABLE ${rqbPost} ( - \`id\` SERIAL PRIMARY KEY NOT NULL, - \`user_id\` BIGINT(20) UNSIGNED NOT NULL, - \`content\` TEXT, - \`created_at\` TIMESTAMP NOT NULL - ) - `); - }); - - async function setupReturningFunctionsTest(db: SingleStoreDatabase) { - await db.execute(sql`drop table if exists \`users_default_fn\``); - await db.execute( - sql` - create table \`users_default_fn\` ( - \`id\` varchar(256) primary key, - \`name\` text not null - ); - `, - ); - } - - async function setupSetOperationTest(db: TestSingleStoreDB) { - await db.execute(sql`drop table if exists \`users2\``); - await db.execute(sql`drop table if exists \`cities\``); - await db.execute( - sql` - create table \`users2\` ( - \`id\` serial primary key, - \`name\` text not null, - \`city_id\` int - ) - `, - ); - - await db.execute( - sql` - create table \`cities\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.insert(citiesTable).values([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 3 }, - { id: 4, name: 'Peter', cityId: 3 }, - { id: 5, name: 'Ben', cityId: 2 }, - { id: 6, name: 'Jill', cityId: 1 }, - { id: 7, name: 'Mary', cityId: 2 }, - { id: 8, name: 'Sally', cityId: 1 }, - ]); - } - - async function setupAggregateFunctionsTest(db: TestSingleStoreDB) { - await db.execute(sql`drop table if exists \`aggregate_table\``); - await db.execute( - sql` - create table \`aggregate_table\` ( - \`id\` integer primary key auto_increment not null, - \`name\` text not null, - \`a\` integer, - \`b\` integer, - \`c\` integer, - \`null_only\` integer - ); - `, - ); - await db.insert(aggregateTable).values([ - { id: 1, name: 'value 1', a: 5, b: 10, c: 20 }, - { id: 2, name: 'value 1', a: 5, b: 20, c: 30 }, - { id: 3, name: 'value 2', a: 10, b: 50, c: 60 }, - { id: 4, name: 'value 3', a: 20, b: 20, c: null }, - { id: 5, name: 'value 4', a: null, b: 90, c: 120 }, - { id: 6, name: 'value 5', a: 80, b: 10, c: null }, - { id: 7, name: 'value 6', a: null, b: null, c: 150 }, - ]); - } - - async function setupVectorSearchTest(db: TestSingleStoreDB) { - await db.execute(sql`drop table if exists \`vector_search\``); - await db.execute( - sql` - create table \`vector_search\` ( - \`id\` integer primary key auto_increment not null, - \`text\` text not null, - \`embedding\` vector(10) not null - ) - `, - ); - await db.insert(vectorSearchTable).values([ - { - id: 1, - text: 'I like dogs', - embedding: [0.6119, 0.1395, 0.2921, 0.3664, 0.4561, 0.7852, 0.1997, 0.5142, 0.5924, 0.0465], - }, - { - id: 2, - text: 'I like cats', - embedding: [0.6075, 0.1705, 0.0651, 0.9489, 0.9656, 0.8084, 0.3046, 0.0977, 0.6842, 0.4402], - }, - ]); - } - - test('table config: unsigned ints', async () => { - const unsignedInts = singlestoreTable('cities1', { - bigint: bigint('bigint', { mode: 'number', unsigned: true }), - int: int('int', { unsigned: true }), - smallint: smallint('smallint', { unsigned: true }), - mediumint: mediumint('mediumint', { unsigned: true }), - tinyint: tinyint('tinyint', { unsigned: true }), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); - expect(intColumn.getSQLType()).toBe('int unsigned'); - expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); - expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); - expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); - }); - - test('table config: signed ints', async () => { - const unsignedInts = singlestoreTable('cities1', { - bigint: bigint('bigint', { mode: 'number' }), - int: int('int'), - smallint: smallint('smallint'), - mediumint: mediumint('mediumint'), - tinyint: tinyint('tinyint'), - }); - - const tableConfig = getTableConfig(unsignedInts); - - const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; - const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; - const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; - const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; - const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; - - expect(bigintColumn.getSQLType()).toBe('bigint'); - expect(intColumn.getSQLType()).toBe('int'); - expect(smallintColumn.getSQLType()).toBe('smallint'); - expect(mediumintColumn.getSQLType()).toBe('mediumint'); - expect(tinyintColumn.getSQLType()).toBe('tinyint'); - }); - - test('table config: primary keys name', async () => { - const table = singlestoreTable('cities', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); - - const tableConfig = getTableConfig(table); - - expect(tableConfig.primaryKeys).toHaveLength(1); - expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); - }); - - test('table configs: unique third param', async () => { - const cities1Table = singlestoreTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: unique('custom_name').on(t.name, t.state), - f1: unique('custom_name1').on(t.name, t.state), - })); - - const tableConfig = getTableConfig(cities1Table); - - expect(tableConfig.uniqueConstraints).toHaveLength(2); - - expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); - expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); - - expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); - expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); - }); - - test('table configs: unique in column', async () => { - const cities1Table = singlestoreTable('cities1', { - id: serial('id').primaryKey(), - name: text('name').notNull().unique(), - state: text('state').unique('custom'), - field: text('field').unique('custom_field'), - }); - - const tableConfig = getTableConfig(cities1Table); - - const columnName = tableConfig.columns.find((it) => it.name === 'name'); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); - expect(columnName?.isUnique).toBeTruthy(); - - const columnState = tableConfig.columns.find((it) => it.name === 'state'); - expect(columnState?.uniqueName).toBe('custom'); - expect(columnState?.isUnique).toBeTruthy(); - - const columnField = tableConfig.columns.find((it) => it.name === 'field'); - expect(columnField?.uniqueName).toBe('custom_field'); - expect(columnField?.isUnique).toBeTruthy(); - }); - - test('select all fields', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const result = await db.select().from(usersTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('select sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('select typed sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersTable.name})`, - }).from(usersTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('select with empty array in inArray', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - const result = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(inArray(usersTable.id, [])) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([]); - }); - - test('select with empty array in notInArray', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - const result = await db - .select({ - name: sql`upper(${usersTable.name})`, - }) - .from(usersTable) - .where(notInArray(usersTable.id, [])) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); - }); - - test('select distinct', async (ctx) => { - const { db } = ctx.singlestore; - - const usersDistinctTable = singlestoreTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - }); - - test('insert returning sql', async (ctx) => { - const { db } = ctx.singlestore; - - const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); - - expect(result.insertId).toBe(1); - }); - - test('delete returning sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(users[0].affectedRows).toBe(1); - }); - - test('update returning sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - expect(users[0].changedRows).toBe(1); - }); - - test('update with returning all fields', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); - expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); - }); - - test('update with returning partial', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); - - const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); - }); - - test('delete with returning all fields', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('delete with returning partial', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('insert + select', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const result = await db.select().from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersTable).values({ id: 2, name: 'Jane' }); - const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); - expect(result2).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); - }); - - test('json insert', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - }).from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); - }); - - test('insert with overridden default values', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); - const result = await db.select().from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('insert many', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([ - { id: 1, name: 'John' }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, - { id: 3, name: 'Jane' }, - { id: 4, name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - jsonb: usersTable.jsonb, - verified: usersTable.verified, - }).from(usersTable) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); - }); - - test('insert many with returning', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.insert(usersTable).values([ - { name: 'John' }, - { name: 'Bruce', jsonb: ['foo', 'bar'] }, - { name: 'Jane' }, - { name: 'Austin', verified: true }, - ]); - - expect(result[0].affectedRows).toBe(4); - }); - - test('select with group by as field', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.name) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('select with exists', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const user = alias(usersTable, 'user'); - const result = await db.select({ name: usersTable.name }).from(usersTable).where( - exists( - db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), - ), - ) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'John' }]); - }); - - test('select with group by as sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('$default function', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); - const selectedOrder = await db.select().from(orders); - - expect(selectedOrder).toEqual([{ - id: 1, - amount: 1, - quantity: 1, - region: 'Ukraine', - product: 'random_string', - }]); - }); - - test('$default with empty array', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`s_orders\``); - await db.execute( - sql` - create table \`s_orders\` ( - \`id\` serial primary key, - \`region\` text default 'Ukraine', - \`product\` text not null - ) - `, - ); - - const users = singlestoreTable('s_orders', { - id: serial('id').primaryKey(), - region: text('region').default('Ukraine'), - product: text('product').$defaultFn(() => 'random_string'), - }); - - await db.insert(users).values({ id: 1 }); - const selectedOrder = await db.select().from(users); - - expect(selectedOrder).toEqual([{ - id: 1, - region: 'Ukraine', - product: 'random_string', - }]); - }); - - test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(sql`${usersTable.name}`, usersTable.id) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('select with group by complex query', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); - - const result = await db.select({ name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, sql`${usersTable.name}`) - .orderBy(asc(usersTable.name)) - .limit(1); - - expect(result).toEqual([{ name: 'Jane' }]); - }); - - test('build query', async (ctx) => { - const { db } = ctx.singlestore; - - const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) - .groupBy(usersTable.id, usersTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, - params: [], - }); - }); - - test('Query check: Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - const query = db - .insert(users) - .values({}) - .toSQL(); - - expect(query).toEqual({ - sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', - params: [], - }); - }); - - test('Query check: Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state').default('UA'), - }); - - const query = db - .insert(users) - .values([{}, {}]) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', - params: [], - }); - }); - - test('Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('empty_insert_single', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values({ id: 1 }); - - const res = await db.select().from(users); - - expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); - }); - - test('Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('empty_insert_multiple', { - id: serial('id').primaryKey(), - name: text('name').default('Dan'), - state: text('state'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, - ); - - await db.insert(users).values([{ id: 1 }, { id: 2 }]); - - const res = await db.select().from(users).orderBy(asc(users.id)); - - expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); - }); - - test('build query insert with onDuplicate', async (ctx) => { - const { db } = ctx.singlestore; - - const query = db.insert(usersTable) - .values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }) - .onDuplicateKeyUpdate({ set: { id: 1, name: 'John1' } }) - .toSQL(); - - expect(query).toEqual({ - sql: - 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (?, ?, default, ?, default) on duplicate key update `id` = ?, `name` = ?', - params: [1, 'John', '["foo","bar"]', 1, 'John1'], - }); - }); - - test('insert with onDuplicate', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }); - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }) - .onDuplicateKeyUpdate({ set: { name: 'John1' } }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John1' }]); - }); - - test('insert conflict', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }); - - await expect((async () => { - db.insert(usersTable).values({ id: 1, name: 'John1' }); - })()).resolves.not.toThrowError(); - }); - - test('insert conflict with ignore', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable) - .values({ id: 1, name: 'John' }); - - await db.insert(usersTable) - .ignore() - .values({ id: 1, name: 'John1' }); - - const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( - eq(usersTable.id, 1), - ); - - expect(res).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('partial join with alias', async (ctx) => { - const { db } = ctx.singlestore; - const customerAlias = alias(usersTable, 'customer'); - - await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select({ - user: { - id: usersTable.id, - name: usersTable.name, - }, - customer: { - id: customerAlias.id, - name: customerAlias.name, - }, - }).from(usersTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersTable.id, 10)) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([{ - user: { id: 10, name: 'Ivan' }, - customer: { id: 11, name: 'Hans' }, - }]); - }); - - test('full join with alias', async (ctx) => { - const { db } = ctx.singlestore; - - const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select().from(users) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(users.id, 10)) - .orderBy(asc(users.id)); - - expect(result).toEqual([{ - users: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('select from alias', async (ctx) => { - const { db } = ctx.singlestore; - - const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); - - const user = alias(users, 'user'); - const customers = alias(users, 'customer'); - - await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); - const result = await db - .select() - .from(user) - .leftJoin(customers, eq(customers.id, 11)) - .where(eq(user.id, 10)) - .orderBy(asc(user.id)); - - expect(result).toEqual([{ - user: { - id: 10, - name: 'Ivan', - }, - customer: { - id: 11, - name: 'Hans', - }, - }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('insert with spaces', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); - const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); - }); - - test('prepared statement', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const statement = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .prepare(); - const result = await statement.execute(); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert: placeholders on columns with encoder', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date('2024-08-07T15:30:00Z'); - - const statement = db.insert(usersTable).values({ - id: 1, - name: 'John', - createdAt: sql.placeholder('createdAt'), - }).prepare(); - - await statement.execute({ createdAt: date }); - - const result = await db - .select({ - id: usersTable.id, - createdAt: usersTable.createdAt, - }) - .from(usersTable); - - expect(result).toEqual([ - { id: 1, createdAt: date }, - ]); - }); - - test('prepared statement reuse', async (ctx) => { - const { db } = ctx.singlestore; - - const stmt = db.insert(usersTable).values({ - verified: true, - id: placeholder('id'), - name: placeholder('name'), - }).prepare(); - - for (let i = 0; i < 10; i++) { - await stmt.execute({ id: i + 1, name: `John ${i}` }); - } - - const result = await db.select({ - id: usersTable.id, - name: usersTable.name, - verified: usersTable.verified, - }).from(usersTable) - .orderBy(asc(usersTable.id)); - - expect(result).toEqual([ - { id: 1, name: 'John 0', verified: true }, - { id: 2, name: 'John 1', verified: true }, - { id: 3, name: 'John 2', verified: true }, - { id: 4, name: 'John 3', verified: true }, - { id: 5, name: 'John 4', verified: true }, - { id: 6, name: 'John 5', verified: true }, - { id: 7, name: 'John 6', verified: true }, - { id: 8, name: 'John 7', verified: true }, - { id: 9, name: 'John 8', verified: true }, - { id: 10, name: 'John 9', verified: true }, - ]); - }); - - test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ id: 1, name: 'John' }); - const stmt = db.select({ - id: usersTable.id, - name: usersTable.name, - }).from(usersTable) - .where(eq(usersTable.id, placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('migrator', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists cities_migration`); - await db.execute(sql`drop table if exists users_migration`); - await db.execute(sql`drop table if exists users12`); - await db.execute(sql`drop table if exists __drizzle_migrations`); - - await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); - - await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); - - const result = await db.select().from(usersMigratorTable); - - expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); - - await db.execute(sql`drop table cities_migration`); - await db.execute(sql`drop table users_migration`); - await db.execute(sql`drop table users12`); - await db.execute(sql`drop table __drizzle_migrations`); - }); - - test('insert via db.execute + select via db.execute', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute( - sql`insert into ${usersTable} (${new Name(usersTable.id.name)},${new Name( - usersTable.name.name, - )}) values (1,${'John'})`, - ); - - const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); - expect(result[0]).toEqual([{ id: 1, name: 'John' }]); - }); - - test('insert via db.execute w/ query builder', async (ctx) => { - const { db } = ctx.singlestore; - - const inserted = await db.execute( - db.insert(usersTable).values({ id: 1, name: 'John' }), - ); - expect(inserted[0].affectedRows).toBe(1); - }); - - test('insert + select all possible dates', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`date\` date, - \`date_as_string\` date, - \`time\` time, - \`datetime\` datetime, - \`datetime_as_string\` datetime, - \`timestamp\` timestamp(6), - \`timestamp_as_string\` timestamp(6), - \`year\` year - ) - `, - ); - - const date = new Date('2022-11-11'); - const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); - - await db.insert(datesTable).values({ - date: date, - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: date, - year: 22, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: dateWithMilliseconds, - timestampAsString: '2022-11-11 12:12:12.123', - }); - - const res = await db.select().from(datesTable); - - expect(res[0]?.date).toBeInstanceOf(Date); - expect(res[0]?.datetime).toBeInstanceOf(Date); - expect(typeof res[0]?.dateAsString).toBe('string'); - expect(typeof res[0]?.datetimeAsString).toBe('string'); - - expect(res).toEqual([{ - date: toLocalDate(new Date('2022-11-11')), - dateAsString: '2022-11-11', - time: '12:12:12', - datetime: new Date('2022-11-11'), - year: 2022, - datetimeAsString: '2022-11-11 12:12:12', - timestamp: new Date('2022-11-11 12:12:12.123'), - timestampAsString: '2022-11-11 12:12:12.123000', - }]); - - await db.execute(sql`drop table if exists \`datestable\``); - }); - - const tableWithEnums = singlestoreTable('enums_test_case', { - id: serial('id').primaryKey(), - enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), - enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), - enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), - }); - - test('SingleStore enum test case #1', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`enums_test_case\``); - - await db.execute(sql` - create table \`enums_test_case\` ( - \`id\` serial primary key, - \`enum1\` ENUM('a', 'b', 'c') not null, - \`enum2\` ENUM('a', 'b', 'c') default 'a', - \`enum3\` ENUM('a', 'b', 'c') not null default 'b' - ) - `); - - await db.insert(tableWithEnums).values([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum3: 'c' }, - { id: 3, enum1: 'a' }, - ]); - - const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); - - await db.execute(sql`drop table \`enums_test_case\``); - - expect(res).toEqual([ - { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, - { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, - { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, - ]); - }); - - test('left join (flat object fields)', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); - - const res = await db.select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: citiesTable.id, - cityName: citiesTable.name, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .orderBy(users2Table.id); - - expect(res).toEqual([ - { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, - { userId: 2, userName: 'Jane', cityId: null, cityName: null }, - ]); - }); - - test('left join (grouped fields)', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); - - const res = await db.select({ - id: users2Table.id, - user: { - name: users2Table.name, - nameUpper: sql`upper(${users2Table.name})`, - }, - city: { - id: citiesTable.id, - name: citiesTable.name, - nameUpper: sql`upper(${citiesTable.name})`, - }, - }).from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .orderBy(asc(users2Table.id)); - - expect(res).toEqual([ - { - id: 1, - user: { name: 'John', nameUpper: 'JOHN' }, - city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, - }, - { - id: 2, - user: { name: 'Jane', nameUpper: 'JANE' }, - city: null, - }, - ]); - }); - - test('select from a many subquery', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - population: db.select({ count: count().as('count') }).from(users2Table).where( - eq(users2Table.cityId, citiesTable.id), - ).as( - 'population', - ), - name: citiesTable.name, - }).from(citiesTable); - - expectTypeOf(res).toEqualTypeOf< - { - population: number; - name: string; - }[] - >(); - - expect(res).toStrictEqual(expect.arrayContaining([{ - population: 1, - name: 'Paris', - }, { - population: 2, - name: 'London', - }])); - }); - - test('select from a one subquery', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable) - .values([{ name: 'Paris' }, { name: 'London' }]); - - await db.insert(users2Table).values([ - { name: 'John', cityId: 1 }, - { name: 'Jane', cityId: 2 }, - { name: 'Jack', cityId: 2 }, - ]); - - const res = await db.select({ - cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) - .as( - 'cityName', - ), - name: users2Table.name, - }).from(users2Table); - - expectTypeOf(res).toEqualTypeOf< - { - cityName: string; - name: string; - }[] - >(); - - expect(res).toStrictEqual(expect.arrayContaining([{ - cityName: 'Paris', - name: 'John', - }, { - cityName: 'London', - name: 'Jane', - }, { - cityName: 'London', - name: 'Jack', - }])); - }); - - test('left join (all fields)', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); - - const res = await db.select().from(users2Table) - .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) - .orderBy(asc(users2Table.id)); - - expect(res).toEqual([ - { - users2: { - id: 1, - name: 'John', - cityId: 1, - }, - cities: { - id: 1, - name: 'Paris', - }, - }, - { - users2: { - id: 2, - name: 'Jane', - cityId: null, - }, - cities: null, - }, - ]); - }); - - test('join subquery', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - - await db.execute( - sql` - create table \`course_categories\` ( - \`id\` serial primary key, - \`name\` text not null - ) - `, - ); - - await db.execute( - sql` - create table \`courses\` ( - \`id\` serial primary key, - \`name\` text not null, - \`category_id\` int - ) - `, - ); - - await db.insert(courseCategoriesTable).values([ - { id: 1, name: 'Category 1' }, - { id: 2, name: 'Category 2' }, - { id: 3, name: 'Category 3' }, - { id: 4, name: 'Category 4' }, - ]); - - await db.insert(coursesTable).values([ - { id: 1, name: 'Development', categoryId: 2 }, - { id: 2, name: 'IT & Software', categoryId: 3 }, - { id: 3, name: 'Marketing', categoryId: 4 }, - { id: 4, name: 'Design', categoryId: 1 }, - ]); - - const sq2 = db - .select({ - categoryId: courseCategoriesTable.id, - category: courseCategoriesTable.name, - total: sql`count(${courseCategoriesTable.id})`, - }) - .from(courseCategoriesTable) - .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) - .as('sq2'); - - const res = await db - .select({ - courseName: coursesTable.name, - categoryId: sq2.categoryId, - }) - .from(coursesTable) - .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) - .orderBy(coursesTable.name); - - expect(res).toEqual([ - { courseName: 'Design', categoryId: 1 }, - { courseName: 'Development', categoryId: 2 }, - { courseName: 'IT & Software', categoryId: 3 }, - { courseName: 'Marketing', categoryId: 4 }, - ]); - - await db.execute(sql`drop table if exists \`courses\``); - await db.execute(sql`drop table if exists \`course_categories\``); - }); - - test('with ... select', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 30, quantity: 3 }, - { region: 'US', product: 'A', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 40, quantity: 4 }, - { region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const regionalSales = db - .$with('regional_sales') - .as( - db - .select({ - region: orders.region, - totalSales: sql`sum(${orders.amount})`.as('total_sales'), - }) - .from(orders) - .groupBy(orders.region), - ); - - const topRegions = db - .$with('top_regions') - .as( - db - .select({ - region: regionalSales.region, - }) - .from(regionalSales) - .where( - gt( - regionalSales.totalSales, - db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), - ), - ), - ); - - const result = await db - .with(regionalSales, topRegions) - .select({ - region: orders.region, - product: orders.product, - productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, - productSales: sql`cast(sum(${orders.amount}) as unsigned)`, - }) - .from(orders) - .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) - .groupBy(orders.region, orders.product) - .orderBy(orders.region, orders.product); - - expect(result).toEqual([ - { - region: 'Europe', - product: 'A', - productUnits: 3, - productSales: 30, - }, - { - region: 'Europe', - product: 'B', - productUnits: 5, - productSales: 50, - }, - { - region: 'US', - product: 'A', - productUnits: 7, - productSales: 70, - }, - { - region: 'US', - product: 'B', - productUnits: 9, - productSales: 90, - }, - ]); - }); - - test('with ... update', async (ctx) => { - const { db } = ctx.singlestore; - - const products = singlestoreTable('products', { - id: serial('id').primaryKey(), - price: decimal('price', { - precision: 15, - scale: 2, - }).notNull(), - cheap: boolean('cheap').notNull().default(false), - }); - - await db.execute(sql`drop table if exists ${products}`); - await db.execute(sql` - create table ${products} ( - id serial primary key, - price decimal(15, 2) not null, - cheap boolean not null default false - ) - `); - - await db.insert(products).values([ - { id: 1, price: '10.99' }, - { id: 2, price: '25.85' }, - { id: 3, price: '32.99' }, - { id: 4, price: '2.50' }, - { id: 5, price: '4.59' }, - ]); - - const averagePrice = db - .$with('average_price') - .as( - db - .select({ - value: sql`avg(${products.price})`.as('value'), - }) - .from(products), - ); - - await db - .with(averagePrice) - .update(products) - .set({ - cheap: true, - }) - .where(lt(products.price, sql`(select * from ${averagePrice})`)); - - const result = await db - .select({ - id: products.id, - }) - .from(products) - .where(eq(products.cheap, true)) - .orderBy(asc(products.id)); - - expect(result).toEqual([ - { id: 1 }, - { id: 4 }, - { id: 5 }, - ]); - }); - - test('with ... delete', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`orders\``); - await db.execute( - sql` - create table \`orders\` ( - \`id\` serial primary key, - \`region\` text not null, - \`product\` text not null, - \`amount\` int not null, - \`quantity\` int not null - ) - `, - ); - - await db.insert(orders).values([ - { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, - { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, - { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, - { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, - { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, - { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, - { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, - { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, - ]); - - const averageAmount = db - .$with('average_amount') - .as( - db - .select({ - value: sql`avg(${orders.amount})`.as('value'), - }) - .from(orders), - ); - - await db - .with(averageAmount) - .delete(orders) - .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); - - const result = await db - .select({ - id: orders.id, - }) - .from(orders) - .orderBy(asc(orders.id)); - - expect(result).toEqual([ - { id: 1 }, - { id: 2 }, - { id: 3 }, - { id: 4 }, - { id: 5 }, - ]); - }); - - test('select from subquery sql', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); - - const sq = db - .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) - .from(users2Table) - .orderBy(asc(users2Table.id)) - .as('sq'); - - const res = await db.select({ name: sq.name }).from(sq); - - expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); - }); - - test('select a field without joining its table', (ctx) => { - const { db } = ctx.singlestore; - - expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); - }); - - test('select all fields from subquery without alias', (ctx) => { - const { db } = ctx.singlestore; - - const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); - - expect(() => db.select().from(sq).prepare()).toThrowError(); - }); - - test('select count()', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); - - const res = await db.select({ count: sql`count(*)` }).from(usersTable); - - expect(res).toEqual([{ count: 2 }]); - }); - - test('select for ...', (ctx) => { - const { db } = ctx.singlestore; - - { - const query = db.select().from(users2Table).for('update').toSQL(); - expect(query.sql).toMatch(/ for update$/); - } - { - const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); - expect(query.sql).toMatch(/ for share skip locked$/); - } - { - const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); - expect(query.sql).toMatch(/ for update nowait$/); - } - }); - - test('having', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { - id: 3, - name: 'New York', - }]); - - await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { - id: 3, - name: 'Jack', - cityId: 2, - }]); - - const result = await db - .select({ - id: citiesTable.id, - name: sql`upper(${citiesTable.name})`.as('upper_name'), - usersCount: sql`count(${users2Table.id})`.as('users_count'), - }) - .from(citiesTable) - .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) - .where(({ name }) => sql`length(${name}) >= 3`) - .groupBy(citiesTable.id) - .having(({ usersCount }) => sql`${usersCount} > 0`) - .orderBy(({ name }) => name); - - expect(result).toEqual([ - { - id: 1, - name: 'LONDON', - usersCount: 2, - }, - { - id: 2, - name: 'PARIS', - usersCount: 1, - }, - ]); - }); - - // TODO: Unskip when views are supported - /* test.skip('view', async (ctx) => { - const { db } = ctx.singlestore; - - const newYorkers1 = singlestoreView('new_yorkers') - .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); - - const newYorkers2 = singlestoreView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); - - const newYorkers3 = singlestoreView('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); - - await db.insert(users2Table).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); - expect(result).toEqual([ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); - }); */ - - test('select from raw sql', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.select({ - id: sql`id`, - name: sql`name`, - }).from(sql`(select 1 as id, 'John' as name) as users`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John' }, - ]); - }); - - test('select from raw sql with joins', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db - .select({ - id: sql`users.id`, - name: sql`users.name`, - userCity: sql`users.city`, - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); - - Expect>; - - expect(result).toEqual([ - { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, - ]); - }); - - test('join on aliased sql from select', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db - .select({ - userId: sql`users.id`.as('userId'), - name: sql`users.name`, - userCity: sql`users.city`, - cityId: sql`cities.id`.as('cityId'), - cityName: sql`cities.name`, - }) - .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) - .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); - - Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> - >; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); - }); - - test('join on aliased sql from with clause', async (ctx) => { - const { db } = ctx.singlestore; - - const users = db.$with('users').as( - db.select({ - id: sql`id`.as('userId'), - name: sql`name`.as('userName'), - city: sql`city`.as('city'), - }).from( - sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, - ), - ); - - const cities = db.$with('cities').as( - db.select({ - id: sql`id`.as('cityId'), - name: sql`name`.as('cityName'), - }).from( - sql`(select 1 as id, 'Paris' as name) as cities`, - ), - ); - - const result = await db - .with(users, cities) - .select({ - userId: users.id, - name: users.name, - userCity: users.city, - cityId: cities.id, - cityName: cities.name, - }) - .from(users) - .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); - - Expect< - Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> - >; - - expect(result).toEqual([ - { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, - ]); - }); - - test('prefixed table', async (ctx) => { - const { db } = ctx.singlestore; - - const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); - - const users = singlestoreTable('test_prefixed_table_with_unique_name', { - id: int('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, - ); - - await db.insert(users).values({ id: 1, name: 'John' }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - - await db.execute(sql`drop table ${users}`); - }); - - test('orderBy with aliased column', (ctx) => { - const { db } = ctx.singlestore; - - const query = db.select({ - test: sql`something`.as('test'), - }).from(users2Table).orderBy((fields) => fields.test).toSQL(); - - expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); - }); - - test('timestamp timezone', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); - - await db.insert(usersTable).values({ id: 1, name: 'With default times' }); - await db.insert(usersTable).values({ - id: 2, - name: 'Without default times', - createdAt: date, - }); - const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); - - // check that the timestamps are set correctly for default times - expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); - - // check that the timestamps are set correctly for non default times - expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); - }); - - test('transaction', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_transactions', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - const products = singlestoreTable('products_transactions', { - id: serial('id').primaryKey(), - price: int('price').notNull(), - stock: int('stock').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop table if exists ${products}`); - - await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); - await db.execute( - sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, - ); - - const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); - const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); - const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); - const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); - - await db.transaction(async (tx) => { - await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); - await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); - }); - - const result = await db.select().from(users); - - expect(result).toEqual([{ id: 1, balance: 90 }]); - - await db.execute(sql`drop table ${users}`); - await db.execute(sql`drop table ${products}`); - }); - - test('transaction rollback', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_transactions_rollback', { - id: serial('id').primaryKey(), - balance: int('balance').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, - ); - - await expect((async () => { - await db.transaction(async (tx) => { - await tx.insert(users).values({ balance: 100 }); - tx.rollback(); - }); - })()).rejects.toThrowError(TransactionRollbackError); - - const result = await db.select().from(users); - - expect(result).toEqual([]); - - await db.execute(sql`drop table ${users}`); - }); - - test('join subquery with join', async (ctx) => { - const { db } = ctx.singlestore; - - const internalStaff = singlestoreTable('internal_staff', { - userId: int('user_id').notNull(), - }); - - const customUser = singlestoreTable('custom_user', { - id: int('id').notNull(), - }); - - const ticket = singlestoreTable('ticket', { - staffId: int('staff_id').notNull(), - }); - - await db.execute(sql`drop table if exists ${internalStaff}`); - await db.execute(sql`drop table if exists ${customUser}`); - await db.execute(sql`drop table if exists ${ticket}`); - - await db.execute(sql`create table internal_staff (user_id integer not null)`); - await db.execute(sql`create table custom_user (id integer not null)`); - await db.execute(sql`create table ticket (staff_id integer not null)`); - - await db.insert(internalStaff).values({ userId: 1 }); - await db.insert(customUser).values({ id: 1 }); - await db.insert(ticket).values({ staffId: 1 }); - - const subq = db - .select() - .from(internalStaff) - .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) - .as('internal_staff'); - - const mainQuery = await db - .select() - .from(ticket) - .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); - - expect(mainQuery).toEqual([{ - ticket: { staffId: 1 }, - internal_staff: { - internal_staff: { userId: 1 }, - custom_user: { id: 1 }, - }, - }]); - - await db.execute(sql`drop table ${internalStaff}`); - await db.execute(sql`drop table ${customUser}`); - await db.execute(sql`drop table ${ticket}`); - }); - - // TODO: Unskip when views are supported - /* test.skip('subquery with view', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_subquery_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 1 }, - { id: 4, name: 'Jill', cityId: 2 }, - ]); - - const sq = db.$with('sq').as(db.select().from(newYorkers)); - const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); - - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 1 }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); - }); */ - - // TODO: Unskip when views are supported - /* test.skip('join view as subquery', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_join_view', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }); - - const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`drop view if exists ${newYorkers}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, - ); - await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); - - await db.insert(users).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 2 }, - { id: 3, name: 'Jack', cityId: 1 }, - { id: 4, name: 'Jill', cityId: 2 }, - ]); - - const sq = db.select().from(newYorkers).as('new_yorkers_sq'); - - const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); - - expect(result).toEqual([ - { - users_join_view: { id: 1, name: 'John', cityId: 1 }, - new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, - }, - { - users_join_view: { id: 2, name: 'Jane', cityId: 2 }, - new_yorkers_sq: null, - }, - { - users_join_view: { id: 3, name: 'Jack', cityId: 1 }, - new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, - }, - { - users_join_view: { id: 4, name: 'Jill', cityId: 2 }, - new_yorkers_sq: null, - }, - ]); - - await db.execute(sql`drop view ${newYorkers}`); - await db.execute(sql`drop table ${users}`); - }); */ - - test('select iterator', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); - - const iter = db.select().from(users) - .orderBy(asc(users.id)) - .iterator(); - - const result: typeof users.$inferSelect[] = []; - - for await (const row of iter) { - result.push(row); - } - - expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); - }); - - test('select iterator w/ prepared statement', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users_iterator', { - id: serial('id').primaryKey(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key)`); - - await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); - - const prepared = db.select().from(users) - .orderBy(asc(users.id)) - .prepare(); - const iter = prepared.iterator(); - const result: typeof users.$inferSelect[] = []; - - for await (const row of iter) { - result.push(row); - } - - expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); - }); - - test('insert undefined', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await expect((async () => { - await db.insert(users).values({ name: undefined }); - })()).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); - }); - - test('update undefined', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name'), - }); - - await db.execute(sql`drop table if exists ${users}`); - - await db.execute( - sql`create table ${users} (id serial not null primary key, name text)`, - ); - - await expect((async () => { - await db.update(users).set({ name: undefined }); - })()).rejects.toThrowError(); - - await expect((async () => { - await db.update(users).set({ id: 1, name: undefined }); - })()).resolves.not.toThrowError(); - - await db.execute(sql`drop table ${users}`); - }); - - test('utc config for datetime', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists \`datestable\``); - await db.execute( - sql` - create table \`datestable\` ( - \`datetime_utc\` datetime(6), - \`datetime\` datetime(6) - ) - `, - ); - const datesTable = singlestoreTable('datestable', { - datetimeUTC: datetime('datetime_utc', { mode: 'date' }), - datetime: datetime('datetime'), - }); - - const dateObj = new Date('2022-11-11'); - const dateUtc = new Date('2022-11-11T12:12:12.122Z'); - - await db.insert(datesTable).values({ - datetimeUTC: dateUtc, - datetime: dateObj, - }); - - const res = await db.select().from(datesTable); - - const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); - const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; - - expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122000'); - expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); - - expect(res[0]?.datetime).toBeInstanceOf(Date); - expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); - - expect(res).toEqual([{ - datetimeUTC: dateUtc, - datetime: new Date('2022-11-11'), - }]); - - await db.execute(sql`drop table if exists \`datestable\``); - }); - - // TODO (https://memsql.atlassian.net/browse/MCDB-63261) allow chaining limit and orderby in subquery - test('set operations (union) from query builder with subquery', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - const citiesQuery = db - .select({ - id: citiesTable.id, - name: citiesTable.name, - orderCol: sql`0`.as('orderCol'), - }) - .from(citiesTable); - - const usersQuery = db - .select({ - id: users2Table.id, - name: users2Table.name, - orderCol: sql`1`.as('orderCol'), - }) - .from(users2Table); - - const unionQuery = db - .select({ - id: sql`id`, - name: sql`name`, - }) - .from( - citiesQuery.union(usersQuery).as('combined'), - ) - .orderBy(sql`orderCol`, sql`id`) - .limit(8); - - const result = await unionQuery; - - expect(result).toHaveLength(8); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Peter' }, - { id: 5, name: 'Ben' }, - ]); - - // union should throw if selected fields are not in the same order - await expect((async () => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).union( - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (union) as function', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const result = await union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 1, name: 'John' }, - ]); - - await expect((async () => { - union( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (union all) from query builder', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq = db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).orderBy(asc(sql`id`)).limit(2).unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).orderBy(asc(sql`id`)).limit(2), - ).as('sq'); - - const result = await db.select().from(sq).orderBy(asc(sql`id`)).limit(3); - - expect(result).toHaveLength(3); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - { id: 1, name: 'New York' }, - { id: 2, name: 'London' }, - ]); - - await expect((async () => { - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).limit(2).unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).limit(2), - ).orderBy(asc(sql`id`)); - })()).rejects.toThrowError(); - }); - - test('set operations (union all) as function', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq = unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).as('sq'); - - const result = await db.select().from(sq).limit(1); - - expect(result).toHaveLength(1); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - ]); - - await expect((async () => { - unionAll( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect) from query builder', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq = db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ) - .as('sq'); - - const result = await db.select().from(sq).orderBy(asc(sql`id`)); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 2, name: 'London' }, - { id: 3, name: 'Tampa' }, - ]); - - await expect((async () => { - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (intersect) as function', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq = await intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).as('sq'); - - const result = await db.select().from(sq).limit(1); - - expect(result).toHaveLength(0); - - expect(result).toEqual([]); - - await expect((async () => { - intersect( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(1); - })()).rejects.toThrowError(); - }); - - test('set operations (except) from query builder', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const result = await db - .select() - .from(citiesTable).except( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - expect(result).toHaveLength(1); - - expect(result).toEqual([ - { id: 1, name: 'New York' }, - ]); - }); - - test('set operations (except) as function', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq = except( - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).as('sq'); - - const result = await db.select().from(sq).limit(3); - - expect(result).toHaveLength(2); - - expect(result).toContainEqual({ id: 2, name: 'London' }); - expect(result).toContainEqual({ id: 3, name: 'Tampa' }); - - await expect((async () => { - except( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable), - db - .select({ id: citiesTable.id, name: citiesTable.name }) - .from(citiesTable).where(eq(citiesTable.id, 1)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - ).limit(3); - })()).rejects.toThrowError(); - }); - - test('define constraints as array', async (ctx) => { - const { db } = ctx.singlestore; - - const table = singlestoreTable('name', { - id: int(), - }, (t) => [ - index('name').on(t.id), - primaryKey({ columns: [t.id], name: 'custom' }), - ]); - - const { indexes, primaryKeys } = getTableConfig(table); - - expect(indexes.length).toBe(1); - expect(primaryKeys.length).toBe(1); - }); - - test('define constraints as array inside third param', async (ctx) => { - const { db } = ctx.singlestore; - - const table = singlestoreTable('name', { - id: int(), - }, (t) => [ - [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], - ]); - - const { indexes, primaryKeys } = getTableConfig(table); - - expect(indexes.length).toBe(1); - expect(primaryKeys.length).toBe(1); - }); - - test.skip('set operations (mixed) from query builder', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq1 = unionAll( - db - .select() - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ).as('sq1'); - - const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); - - const sq3 = await db.select().from(sq2).limit(1).offset(1).as('sq3'); - - const result = await db - .select() - .from(citiesTable) - .except( - db - .select() - .from(sq3), - ); - - expect(result).toHaveLength(2); - - expect(result).toEqual([ - { id: 3, name: 'Tampa' }, - { id: 1, name: 'New York' }, - ]); - - await expect((async () => { - db - .select() - .from(citiesTable).except( - ({ unionAll }) => - unionAll( - db - .select({ name: citiesTable.name, id: citiesTable.id }) - .from(citiesTable).where(gt(citiesTable.id, 1)), - db.select().from(citiesTable).where(eq(citiesTable.id, 2)), - ), - ); - })()).rejects.toThrowError(); - }); - - test('set operations (mixed all) as function with subquery', async (ctx) => { - const { db } = ctx.singlestore; - - await setupSetOperationTest(db); - - const sq1 = except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).as('sq1'); - - const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); - - const sq3 = await db.select().from(sq2).limit(1).as('sq3'); - - const result = await union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - db.select().from(sq3), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - - expect(result).toHaveLength(4); - - // multiple results possible as a result of the filters >= 5 and ==7 because singlestore doesn't guarantee order - // dynamically validate results - const hasValidEntry = (entry: { id: number; name: string }) => { - if (entry.id === 1) return entry.name === 'John'; - if (entry.id > 1 && entry.id < 5) return entry.name === 'Tampa' || entry.name === 'London'; - if (entry.id >= 5 && entry.id !== 7) return true; // Accept any entry with id >= 5 and not 7 - return false; - }; - - for (const entry of result) { - expect(hasValidEntry(entry)).toBe(true); - } - - await expect((async () => { - union( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(eq(users2Table.id, 1)), - except( - db - .select({ id: users2Table.id, name: users2Table.name }) - .from(users2Table).where(gte(users2Table.id, 5)), - db - .select({ name: users2Table.name, id: users2Table.id }) - .from(users2Table).where(eq(users2Table.id, 7)), - ).limit(1), - db - .select().from(citiesTable).where(gt(citiesTable.id, 1)), - ); - })()).rejects.toThrowError(); - }); - - test('aggregate function: count', async (ctx) => { - const { db } = ctx.singlestore; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: count() }).from(table); - const result2 = await db.select({ value: count(table.a) }).from(table); - const result3 = await db.select({ value: countDistinct(table.name) }).from(table); - - expect(result1[0]?.value).toBe(7); - expect(result2[0]?.value).toBe(5); - expect(result3[0]?.value).toBe(6); - }); - - test('aggregate function: avg', async (ctx) => { - const { db } = ctx.singlestore; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: avg(table.b) }).from(table); - const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); - const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); - - expect(result1[0]?.value).toBe('33.3333'); - expect(result2[0]?.value).toBe(null); - expect(result3[0]?.value).toBe('42.5000'); - }); - - test('aggregate function: sum', async (ctx) => { - const { db } = ctx.singlestore; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: sum(table.b) }).from(table); - const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); - const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); - - expect(result1[0]?.value).toBe('200'); - expect(result2[0]?.value).toBe(null); - expect(result3[0]?.value).toBe('170'); - }); - - test('aggregate function: max', async (ctx) => { - const { db } = ctx.singlestore; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: max(table.b) }).from(table); - const result2 = await db.select({ value: max(table.nullOnly) }).from(table); - - expect(result1[0]?.value).toBe(90); - expect(result2[0]?.value).toBe(null); - }); - - test('aggregate function: min', async (ctx) => { - const { db } = ctx.singlestore; - const table = aggregateTable; - await setupAggregateFunctionsTest(db); - - const result1 = await db.select({ value: min(table.b) }).from(table); - const result2 = await db.select({ value: min(table.nullOnly) }).from(table); - - expect(result1[0]?.value).toBe(10); - expect(result2[0]?.value).toBe(null); - }); - - test('simple vector search', async (ctx) => { - const { db } = ctx.singlestore; - const table = vectorSearchTable; - const embedding = [0.42, 0.93, 0.88, 0.57, 0.32, 0.64, 0.76, 0.52, 0.19, 0.81]; // ChatGPT's 10 dimension embedding for "dogs are cool" not sure how accurate but it works - await setupVectorSearchTest(db); - - const withRankEuclidean = db.select({ - id: table.id, - text: table.text, - rank: sql`row_number() over (order by ${euclideanDistance(table.embedding, embedding)})`.as('rank'), - }).from(table).as('with_rank'); - const withRankDotProduct = db.select({ - id: table.id, - text: table.text, - rank: sql`row_number() over (order by ${dotProduct(table.embedding, embedding)})`.as('rank'), - }).from(table).as('with_rank'); - const result1 = await db.select({ id: withRankEuclidean.id, text: withRankEuclidean.text }).from( - withRankEuclidean, - ).where(eq(withRankEuclidean.rank, 1)); - const result2 = await db.select({ id: withRankDotProduct.id, text: withRankDotProduct.text }).from( - withRankDotProduct, - ).where(eq(withRankDotProduct.rank, 1)); - - expect(result1.length).toEqual(1); - expect(result1[0]).toEqual({ id: 1, text: 'I like dogs' }); - - expect(result2.length).toEqual(1); - expect(result2[0]).toEqual({ id: 1, text: 'I like dogs' }); - }); - - test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(6), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { id: 1, name: 'John' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - - const justDates = await db.select({ updatedAt }).from(usersOnUpdate); - - const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - expect(response).toEqual([ - { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, - { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, - { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, - { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, - ]); - const msDelay = 750; - - for (const eachUser of justDates) { - expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); - } - }); - - test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql`drop table if exists ${usersOnUpdate}`); - - await db.execute( - sql` - create table ${usersOnUpdate} ( - id serial not null primary key, - name text not null, - update_counter integer default 1 not null, - updated_at datetime(6), - always_null text - ) - `, - ); - - await db.insert(usersOnUpdate).values([ - { id: 1, name: 'John', alwaysNull: 'this will will be null after updating' }, - { id: 2, name: 'Jane' }, - { id: 3, name: 'Jack' }, - { id: 4, name: 'Jill' }, - ]); - const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); - const initial = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate); - - await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); - - const justDates = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from( - usersOnUpdate, - ); - - const response = await db.select().from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); - - expect(response).toEqual([ - { id: 1, name: 'Angel', updateCounter: 2, updatedAt: expect.any(Date), alwaysNull: null }, - { id: 2, name: 'Jane', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, - { id: 3, name: 'Jack', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, - { id: 4, name: 'Jill', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, - ]); - - const initialRecord = initial.find((record) => record.id === 1); - const updatedRecord = justDates.find((record) => record.id === 1); - - expect(initialRecord?.updatedAt?.valueOf()).not.toBe(updatedRecord?.updatedAt?.valueOf()); - - const msDelay = 2000; - - for (const eachUser of justDates) { - expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); - } - }); - - // mySchema tests - test('mySchema :: select all fields', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); - const result = await db.select().from(usersMySchemaTable); - - expect(result[0]!.createdAt).toBeInstanceOf(Date); - // not timezone based timestamp, thats why it should not work here - // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('mySchema :: select sql', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersMySchemaTable.name})`, - }).from(usersMySchemaTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('mySchema :: select typed sql', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.select({ - name: sql`upper(${usersMySchemaTable.name})`, - }).from(usersMySchemaTable); - - expect(users).toEqual([{ name: 'JOHN' }]); - }); - - test('mySchema :: select distinct', async (ctx) => { - const { db } = ctx.singlestore; - - const usersDistinctTable = singlestoreTable('users_distinct', { - id: int('id').notNull(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${usersDistinctTable}`); - await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); - - await db.insert(usersDistinctTable).values([ - { id: 1, name: 'John' }, - { id: 1, name: 'John' }, - { id: 2, name: 'John' }, - { id: 1, name: 'Jane' }, - ]); - const users = await db.selectDistinct().from(usersDistinctTable).orderBy( - usersDistinctTable.id, - usersDistinctTable.name, - ); - - await db.execute(sql`drop table ${usersDistinctTable}`); - - expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); - }); - - test('mySchema :: insert returning sql', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - const [result, _] = await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); - - expect(result.insertId).toBe(1); - }); - - test('mySchema :: delete returning sql', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); - - expect(users[0].affectedRows).toBe(1); - }); - - test('mySchema :: update with returning partial', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); - const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( - eq(usersMySchemaTable.name, 'John'), - ); - - const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( - usersMySchemaTable, - ) - .where( - eq(usersMySchemaTable.id, 1), - ); - - expect(updatedUsers[0].changedRows).toBe(1); - - expect(users).toEqual([{ id: 1, name: 'Jane' }]); - }); - - test('mySchema :: delete with returning all fields', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersMySchemaTable).values({ name: 'John' }); - const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); - - expect(deletedUser[0].affectedRows).toBe(1); - }); - - test('mySchema :: insert + select', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); - const result = await db.select().from(usersMySchemaTable); - expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); - - await db.insert(usersMySchemaTable).values({ id: 2, name: 'Jane' }); - const result2 = await db.select().from(usersMySchemaTable).orderBy(asc(usersMySchemaTable.id)); - expect(result2).toEqual([ - { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, - { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, - ]); - }); - - test('mySchema :: insert with overridden default values', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ id: 1, name: 'John', verified: true }); - const result = await db.select().from(usersMySchemaTable); - - expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); - }); - - test('mySchema :: insert many', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([ - { id: 1, name: 'John' }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, - { id: 3, name: 'Jane' }, - { id: 4, name: 'Austin', verified: true }, - ]); - const result = await db.select({ - id: usersMySchemaTable.id, - name: usersMySchemaTable.name, - jsonb: usersMySchemaTable.jsonb, - verified: usersMySchemaTable.verified, - }).from(usersMySchemaTable) - .orderBy(asc(usersMySchemaTable.id)); - - expect(result).toEqual([ - { id: 1, name: 'John', jsonb: null, verified: false }, - { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, - { id: 3, name: 'Jane', jsonb: null, verified: false }, - { id: 4, name: 'Austin', jsonb: null, verified: true }, - ]); - }); - - test('mySchema :: select with group by as field', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { - id: 3, - name: 'Jane', - }]); - - const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.name) - .orderBy(asc(usersMySchemaTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); - }); - - test('mySchema :: select with group by as column + sql', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { - id: 3, - name: 'Jane', - }]); - - const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`) - .orderBy(asc(usersMySchemaTable.id)); - - expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); - }); - - test('mySchema :: build query', async (ctx) => { - const { db } = ctx.singlestore; - - const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) - .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) - .toSQL(); - - expect(query).toEqual({ - sql: - `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, - params: [], - }); - }); - - test('mySchema :: insert with spaces', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ id: 1, name: sql`'Jo h n'` }); - const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( - usersMySchemaTable, - ); - - expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); - }); - - test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); - const stmt = db.select({ - id: usersMySchemaTable.id, - name: usersMySchemaTable.name, - }).from(usersMySchemaTable) - .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) - .prepare(); - const result = await stmt.execute({ id: 1 }); - - expect(result).toEqual([{ id: 1, name: 'John' }]); - }); - - test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { - const { db } = ctx.singlestore; - await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); - - await db.execute(sql`drop table if exists \`userstest\``); - await db.execute( - sql` - create table \`userstest\` ( - \`id\` serial primary key, - \`name\` text not null, - \`verified\` boolean not null default false, - \`jsonb\` json, - \`created_at\` timestamp not null default now() - ) - `, - ); - - await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); - await db.insert(usersTable).values({ id: 11, name: 'Hans' }); - - const customerAlias = alias(usersTable, 'customer'); - - const result = await db - .select().from(usersMySchemaTable) - .leftJoin(customerAlias, eq(customerAlias.id, 11)) - .where(eq(usersMySchemaTable.id, 10)); - - expect(result).toEqual([{ - userstest: { - id: 10, - name: 'Ivan', - verified: false, - jsonb: null, - createdAt: result[0]!.userstest.createdAt, - }, - customer: { - id: 11, - name: 'Hans', - verified: false, - jsonb: null, - createdAt: result[0]!.customer!.createdAt, - }, - }]); - }); - - test('insert $returningId: serial as id', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.insert(usersTable).values({ id: 1, name: 'John' }).$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - }[]>(); - - expect(result).toStrictEqual([{ id: 1 }]); - }); - - test('insert $returningId: serial as id, batch insert', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]) - .$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - id: number; - }[]>(); - - // singlestore auto increments when batch inserting, so the ids increment by one - expect(result).toStrictEqual([{ id: 2 }, { id: 3 }]); - }); - - test('insert $returningId: $default as primary key', async (ctx) => { - const { db } = ctx.singlestore; - - const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; - let iterator = 0; - - const usersTableDefFn = singlestoreTable('users_default_fn', { - customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { - const value = uniqueKeys[iterator]!; - iterator++; - return value; - }), - name: text('name').notNull(), - }); - - await setupReturningFunctionsTest(db); - - const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) - // ^? - .$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - customId: string; - }[]>(); - - expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { - customId: 'dyqs529eom0iczo2efxzbcut', - }]); - }); - - test('insert $returningId: $default as primary key with value', async (ctx) => { - const { db } = ctx.singlestore; - - const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; - let iterator = 0; - - const usersTableDefFn = singlestoreTable('users_default_fn', { - customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { - const value = uniqueKeys[iterator]!; - iterator++; - return value; - }), - name: text('name').notNull(), - }); - - await setupReturningFunctionsTest(db); - - const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) - // ^? - .$returningId(); - - expectTypeOf(result).toEqualTypeOf<{ - customId: string; - }[]>(); - - expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); - }); - - // TODO: Unkip this test when views are supported - /* test.skip('mySchema :: view', async (ctx) => { - const { db } = ctx.singlestore; - - const newYorkers1 = mySchema.view('new_yorkers') - .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); - - const newYorkers2 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); - - const newYorkers3 = mySchema.view('new_yorkers', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - cityId: int('city_id').notNull(), - }).existing(); - - await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); - - await db.insert(citiesMySchemaTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); - - await db.insert(users2MySchemaTable).values([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - { id: 3, name: 'Jack', cityId: 2 }, - ]); - - { - const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); - expect(result).toEqual([ - { id: 1, name: 'John', cityId: 1 }, - { id: 2, name: 'Jane', cityId: 1 }, - ]); - } - - { - const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); - expect(result).toEqual([ - { name: 'John' }, - { name: 'Jane' }, - ]); - } - - await db.execute(sql`drop view ${newYorkers1}`); - }); */ - - test('limit 0', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select() - .from(usersTable) - .limit(0); - - expect(users).toEqual([]); - }); - - test('limit -1', async (ctx) => { - const { db } = ctx.singlestore; - - await db.insert(usersTable).values({ name: 'John' }); - const users = await db - .select() - .from(usersTable) - .limit(-1); - - expect(users.length).toBeGreaterThan(0); - }); - - test('sql operator as cte', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); - await db.insert(users).values([ - { name: 'John' }, - { name: 'Jane' }, - ]); - - const sq1 = db.$with('sq', { - userId: users.id, - data: { - name: users.name, - }, - }).as(sql`select * from ${users} where ${users.name} = 'John'`); - const result1 = await db.with(sq1).select().from(sq1); - - const sq2 = db.$with('sq', { - userId: users.id, - data: { - name: users.name, - }, - }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); - const result2 = await db.with(sq2).select().from(sq1); - - expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); - expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); - }); - - test('cross join', async (ctx) => { - const { db } = ctx.singlestore; - - await db - .insert(usersTable) - .values([ - { name: 'John' }, - { name: 'Jane' }, - ]); - - await db - .insert(citiesTable) - .values([ - { name: 'Seattle' }, - { name: 'New York City' }, - ]); - - const result = await db - .select({ - user: usersTable.name, - city: citiesTable.name, - }) - .from(usersTable) - .crossJoin(citiesTable) - .orderBy(usersTable.name, citiesTable.name); - - expect(result).toStrictEqual([ - { city: 'New York City', user: 'Jane' }, - { city: 'Seattle', user: 'Jane' }, - { city: 'New York City', user: 'John' }, - { city: 'Seattle', user: 'John' }, - ]); - }); - - test('left join (lateral)', async (ctx) => { - const { db } = ctx.singlestore; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(eq(users2Table.cityId, citiesTable.id)) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .leftJoinLateral(sq, sql`true`) - .orderBy(citiesTable.id); - - expect(res).toStrictEqual([ - { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, - { cityId: 2, cityName: 'London', userId: null, userName: null }, - ]); - }); - - test('inner join (lateral)', async (ctx) => { - const { db } = ctx.singlestore; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(eq(users2Table.cityId, citiesTable.id)) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .innerJoinLateral(sq, sql`true`); - - expect(res).toStrictEqual([ - { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, - ]); - }); - - test('cross join (lateral)', async (ctx) => { - const { db } = ctx.singlestore; - - await db - .insert(citiesTable) - .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); - - await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { - name: 'Patrick', - cityId: 2, - }]); - - const sq = db - .select({ - userId: users2Table.id, - userName: users2Table.name, - cityId: users2Table.cityId, - }) - .from(users2Table) - .where(eq(users2Table.cityId, citiesTable.id)) - .as('sq'); - - const res = await db - .select({ - cityId: citiesTable.id, - cityName: citiesTable.name, - userId: sq.userId, - userName: sq.userName, - }) - .from(citiesTable) - .crossJoinLateral(sq) - .orderBy(sq.userId, citiesTable.id); - - expect(res).toStrictEqual([ - { - cityId: 1, - cityName: 'Paris', - userId: 1, - userName: 'John', - }, - { - cityId: 2, - cityName: 'London', - userId: 2, - userName: 'Jane', - }, - { - cityId: 2, - cityName: 'London', - userId: 3, - userName: 'Patrick', - }, - ]); - }); - - test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { - const { db } = ctx.singlestore; - - const users = singlestoreTable('users', { - id: serial('id').primaryKey(), - name: text('name').notNull(), - updatedAt: timestamp('updated_at') - .notNull() - .$onUpdate(() => sql`current_timestamp`), - }); - - await db.execute(sql`drop table if exists ${users}`); - await db.execute( - sql` - create table ${users} ( - \`id\` serial primary key, - \`name\` text not null, - \`updated_at\` timestamp not null - ) - `, - ); - - await db.insert(users).values({ - name: 'John', - }); - const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); - await new Promise((resolve) => setTimeout(resolve, 1000)); - - const now = Date.now(); - await new Promise((resolve) => setTimeout(resolve, 1000)); - await db.update(users).set({ - name: 'John', - }); - const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); - - expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); - expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); - }); - - test('RQB v2 simple find first - no rows', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - - test('RQB v2 simple find first - multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - - test('RQB v2 simple find first - with relation', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - - test('RQB v2 simple find first - placeholders', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - - test('RQB v2 simple find many - no rows', async (ctx) => { - const { db } = ctx.singlestore; - - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - - test('RQB v2 simple find many - multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - }); - - test('RQB v2 simple find many - with relation', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - - test('RQB v2 simple find many - placeholders', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - - test('RQB v2 transaction find first - no rows', async (ctx) => { - const { db } = ctx.singlestore; - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst(); - - expect(result).toStrictEqual(undefined); - }); - }); - - test('RQB v2 transaction find first - multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - }); - - test('RQB v2 transaction find first - with relation', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findFirst({ - with: { - posts: { - orderBy: { - id: 'asc', - }, - }, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual({ - id: 1, - createdAt: date, - name: 'First', - posts: [{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }], - }); - }); - }); - - test('RQB v2 transaction find first - placeholders', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findFirst({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual({ - id: 2, - createdAt: date, - name: 'Second', - }); - }); - }); - - test('RQB v2 transaction find many - no rows', async (ctx) => { - const { db } = ctx.singlestore; - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany(); - - expect(result).toStrictEqual([]); - }); - }); - - test('RQB v2 transaction find many - multiple rows', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbUser.findMany({ - orderBy: { - id: 'desc', - }, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }, { - id: 1, - createdAt: date, - name: 'First', - }]); - }); - }); - - test('RQB v2 transaction find many - with relation', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.insert(rqbPost).values([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - }]); - - await db.transaction(async (db) => { - const result = await db.query.rqbPost.findMany({ - with: { - author: true, - }, - orderBy: { - id: 'asc', - }, - }); - - expect(result).toStrictEqual([{ - id: 1, - userId: 1, - createdAt: date, - content: null, - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }, { - id: 2, - userId: 1, - createdAt: date, - content: 'Has message this time', - author: { - id: 1, - createdAt: date, - name: 'First', - }, - }]); - }); - }); - - test('RQB v2 transaction find many - placeholders', async (ctx) => { - const { db } = ctx.singlestore; - - const date = new Date(120000); - - await db.insert(rqbUser).values([{ - id: 1, - createdAt: date, - name: 'First', - }, { - id: 2, - createdAt: date, - name: 'Second', - }]); - - await db.transaction(async (db) => { - const query = db.query.rqbUser.findMany({ - where: { - id: { - eq: sql.placeholder('filter'), - }, - }, - orderBy: { - id: 'asc', - }, - }).prepare(); - - const result = await query.execute({ - filter: 2, - }); - - expect(result).toStrictEqual([{ - id: 2, - createdAt: date, - name: 'Second', - }]); - }); - }); - - test('all types', async (ctx) => { - const { db } = ctx.singlestore; - - await db.execute(sql` - CREATE TABLE \`all_types\` ( - \`scol\` serial, - \`bigint53\` bigint, - \`bigint64\` bigint, - \`bigint_string\` bigint, - \`binary\` binary, - \`boolean\` boolean, - \`char\` char, - \`date\` date, - \`date_str\` date, - \`datetime\` datetime, - \`datetime_str\` datetime, - \`decimal\` decimal, - \`decimal_num\` decimal(30), - \`decimal_big\` decimal(30), - \`double\` double, - \`float\` float, - \`int\` int, - \`json\` json, - \`med_int\` mediumint, - \`small_int\` smallint, - \`real\` real, - \`text\` text, - \`time\` time, - \`timestamp\` timestamp, - \`timestamp_str\` timestamp, - \`tiny_int\` tinyint, - \`varbin\` varbinary(16), - \`varchar\` varchar(255), - \`year\` year, - \`enum\` enum('enV1','enV2'), - \`vec_i8\` vector(5, I8), - \`vec_i16\` vector(5, I16), - \`vec_i32\` vector(5, I32), - \`vec_i64\` vector(5, I64), - \`vec_f32\` vector(5, F32), - \`vec_f64\` vector(5, F64), - shard key(\`scol\`) - ); - `); - - await db.insert(allTypesTable).values({ - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date(1741743161623), - dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - datetime: new Date(1741743161623), - datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - enum: 'enV1', - float: 1.048596, - real: 1.048596, - text: 'C4-', - int: 621, - json: { - str: 'strval', - arr: ['str', 10], - }, - medInt: 560, - smallInt: 14, - time: '04:13:22', - timestamp: new Date(1741743161623), - timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - vectorF32: [0.735482, -0.291647, 1.183529, -2.406378, 0.014263], - vectorF64: [ - 0.3918573842719283, - -1.682530118745203, - 2.014963587205109, - -0.005832741903218165, - 0.7841029456712038, - ], - vectorI8: [-2, 8, 127, 85, -128], - vectorI16: [-2, 8, 127, 85, -128], - vectorI32: [15342, -27894, 6271, -10385, 31056], - vectorI64: [ - 4829301283746501823n, - -7203847501293847201n, - 1623847561928374650n, - -5938475628374651983n, - 803745610293847561n, - ], - }); - - const rawRes = await db.select().from(allTypesTable); - - type ExpectedType = { - serial: number; - bigint53: number | null; - bigint64: bigint | null; - bigintString: string | null; - binary: string | null; - boolean: boolean | null; - char: string | null; - date: Date | null; - dateStr: string | null; - datetime: Date | null; - datetimeStr: string | null; - decimal: string | null; - decimalNum: number | null; - decimalBig: bigint | null; - double: number | null; - float: number | null; - int: number | null; - json: unknown; - medInt: number | null; - smallInt: number | null; - real: number | null; - text: string | null; - time: string | null; - timestamp: Date | null; - timestampStr: string | null; - tinyInt: number | null; - varbin: string | null; - varchar: string | null; - year: number | null; - enum: 'enV1' | 'enV2' | null; - vectorI8: number[] | null; - vectorI16: number[] | null; - vectorI32: number[] | null; - vectorI64: bigint[] | null; - vectorF32: number[] | null; - vectorF64: number[] | null; - }[]; - - const expectedRes: ExpectedType = [ - { - serial: 1, - bigint53: 9007199254740991, - bigint64: 5044565289845416380n, - bigintString: '5044565289845416380', - binary: '1', - boolean: true, - char: 'c', - date: new Date('2025-03-12T00:00:00.000Z'), - dateStr: '2025-03-12', - datetime: new Date('2025-03-12T01:32:41.000Z'), - datetimeStr: '2025-03-12 01:32:41', - decimal: '47521', - decimalNum: 9007199254740991, - decimalBig: 5044565289845416380n, - double: 15.35325689124218, - float: 1.0486, - int: 621, - json: { arr: ['str', 10], str: 'strval' }, - medInt: 560, - smallInt: 14, - real: 1.048596, - text: 'C4-', - time: '04:13:22', - timestamp: new Date('2025-03-12T01:32:41.000Z'), - timestampStr: '2025-03-12 01:32:41', - tinyInt: 7, - varbin: '1010110101001101', - varchar: 'VCHAR', - year: 2025, - enum: 'enV1', - vectorF32: [...new Float32Array([0.735482, -0.291647, 1.183529, -2.406378, 0.014263])], - vectorF64: [ - 0.3918573842719283, - -1.682530118745203, - 2.014963587205109, - -0.005832741903218165, - 0.7841029456712038, - ], - vectorI8: [-2, 8, 127, 85, -128], - vectorI16: [-2, 8, 127, 85, -128], - vectorI32: [15342, -27894, 6271, -10385, 31056], - vectorI64: [ - 4829301283746501823n, - -7203847501293847201n, - 1623847561928374650n, - -5938475628374651983n, - 803745610293847561n, - ], - }, - ]; - - expectTypeOf(rawRes).toEqualTypeOf(); - expect(rawRes).toStrictEqual(expectedRes); - }); - }); -} diff --git a/integration-tests/tests/singlestore/singlestore-custom.test.ts b/integration-tests/tests/singlestore/singlestore-custom.test.ts index bc57f313dd..4bc3648290 100644 --- a/integration-tests/tests/singlestore/singlestore-custom.test.ts +++ b/integration-tests/tests/singlestore/singlestore-custom.test.ts @@ -1,5 +1,4 @@ import retry from 'async-retry'; -import type Docker from 'dockerode'; import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import { drizzle } from 'drizzle-orm/singlestore'; @@ -24,23 +23,21 @@ import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { toLocalDate } from '~/utils'; import relations from './relations'; -import { createDockerDB } from './singlestore-common'; -const ENABLE_LOGGING = false; +type TestSingleStoreDB = SingleStoreDriverDatabase; +declare module 'vitest' { + interface TestContext { + singlestore: { + db: TestSingleStoreDB; + }; + } +} let db: SingleStoreDriverDatabase; let client: mysql2.Connection; -let container: Docker.Container | undefined; beforeAll(async () => { - let connectionString; - if (process.env['SINGLESTORE_CONNECTION_STRING']) { - connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } + const connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); @@ -57,12 +54,11 @@ beforeAll(async () => { }); await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); + db = drizzle({ client, relations }); }); afterAll(async () => { await client?.end(); - await container?.stop().catch(console.error); }); beforeEach((ctx) => { diff --git a/integration-tests/tests/singlestore/singlestore-prefixed.test.ts b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts index 24a8b3509f..5822c4b0e4 100644 --- a/integration-tests/tests/singlestore/singlestore-prefixed.test.ts +++ b/integration-tests/tests/singlestore/singlestore-prefixed.test.ts @@ -1,5 +1,4 @@ import retry from 'async-retry'; -import type Docker from 'dockerode'; import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; import type { Equal } from 'drizzle-orm'; import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; @@ -26,23 +25,16 @@ import { migrate } from 'drizzle-orm/singlestore/migrator'; import * as mysql2 from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { Expect, toLocalDate } from '~/utils'; -import { createDockerDB } from './singlestore-common'; const ENABLE_LOGGING = false; let db: SingleStoreDriverDatabase; let client: mysql2.Connection; -let container: Docker.Container | undefined; beforeAll(async () => { - let connectionString; - if (process.env['SINGLESTORE_CONNECTION_STRING']) { - connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; - } else { - const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); - connectionString = conStr; - container = contrainerObj; - } + const connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; + if (!connectionString) throw new Error(); + client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); @@ -60,12 +52,11 @@ beforeAll(async () => { await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); - db = drizzle(client, { logger: ENABLE_LOGGING }); + db = drizzle({ client, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); - await container?.stop().catch(console.error); }); const tablePrefix = 'drizzle_tests_'; diff --git a/integration-tests/tests/singlestore/singlestore-proxy.test.ts b/integration-tests/tests/singlestore/singlestore-proxy.test.ts index 49b5a85e6f..be23f8c3c3 100644 --- a/integration-tests/tests/singlestore/singlestore-proxy.test.ts +++ b/integration-tests/tests/singlestore/singlestore-proxy.test.ts @@ -1,133 +1,7 @@ -import retry from 'async-retry'; -import type { SingleStoreRemoteDatabase } from 'drizzle-orm/singlestore-proxy'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/singlestore-proxy'; -import * as mysql2 from 'mysql2/promise'; -import { afterAll, beforeAll, beforeEach } from 'vitest'; -import { skipTests } from '~/common'; -import relations from './relations'; -import { createDockerDB, tests } from './singlestore-common'; +import { tests } from './common'; +import { proxyTest } from './instrumentation'; -const ENABLE_LOGGING = false; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -class ServerSimulator { - constructor(private db: mysql2.Connection) {} - - async query(sql: string, params: any[], method: 'all' | 'execute') { - if (method === 'all') { - try { - const result = await this.db.query({ - sql, - values: params, - rowsAsArray: true, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result[0] as any }; - } catch (e: any) { - return { error: e }; - } - } else if (method === 'execute') { - try { - const result = await this.db.query({ - sql, - values: params, - typeCast: function(field: any, next: any) { - if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { - return field.string(); - } - return next(); - }, - }); - - return { data: result as any }; - } catch (e: any) { - return { error: e }; - } - } else { - return { error: 'Unknown method value' }; - } - } - - async migrations(queries: string[]) { - await this.db.query('START TRANSACTION'); - try { - for (const query of queries) { - await this.db.query(query); - } - await this.db.query('COMMIT'); - } catch (e) { - await this.db.query('ROLLBACK'); - throw e; - } - - return {}; - } -} - -let db: SingleStoreRemoteDatabase; -let client: mysql2.Connection; -let serverSimulator: ServerSimulator; - -beforeAll(async () => { - let connectionString; - if (process.env['SINGLESTORE_CONNECTION_STRING']) { - connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - - await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); - await client.changeUser({ database: 'drizzle' }); - - serverSimulator = new ServerSimulator(client); - db = proxyDrizzle(async (sql, params, method) => { - try { - const response = await serverSimulator.query(sql, params, method); - - if (response.error !== undefined) { - throw response.error; - } - - return { rows: response.data }; - } catch (e: any) { - console.error('Error from singlestore proxy server:', e.message); - throw e; - } - }, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - await client?.end(); -}); - -beforeEach((ctx) => { - ctx.singlestore = { - db, - }; -}); - -skipTests([ +const exclude = [ 'select iterator w/ prepared statement', 'select iterator', 'nested transaction rollback', @@ -144,6 +18,6 @@ skipTests([ 'RQB v2 transaction find many - multiple rows', 'RQB v2 transaction find many - with relation', 'RQB v2 transaction find many - placeholders', -]); +]; -tests(); +tests(proxyTest, exclude); diff --git a/integration-tests/tests/relational/singlestore.relations.ts b/integration-tests/tests/singlestore/singlestore.relations.ts similarity index 98% rename from integration-tests/tests/relational/singlestore.relations.ts rename to integration-tests/tests/singlestore/singlestore.relations.ts index b465d65099..de78deffe9 100644 --- a/integration-tests/tests/relational/singlestore.relations.ts +++ b/integration-tests/tests/singlestore/singlestore.relations.ts @@ -1,5 +1,5 @@ import { defineRelations } from 'drizzle-orm'; -import * as schema from './singlestore.schema.ts'; +import * as schema from './singlestore.schema'; export default defineRelations(schema, (r) => ({ usersView: { diff --git a/integration-tests/tests/relational/singlestore.test.ts b/integration-tests/tests/singlestore/singlestore.rels.test.ts similarity index 99% rename from integration-tests/tests/relational/singlestore.test.ts rename to integration-tests/tests/singlestore/singlestore.rels.test.ts index a17603e4f8..21109cf436 100644 --- a/integration-tests/tests/relational/singlestore.test.ts +++ b/integration-tests/tests/singlestore/singlestore.rels.test.ts @@ -7,7 +7,7 @@ import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './singlestore.relations.ts'; +import relations from './singlestore.relations'; import { allTypesTable, commentsTable, @@ -23,7 +23,7 @@ import { students, usersTable, usersToGroupsTable, -} from './singlestore.schema.ts'; +} from './singlestore.schema'; const ENABLE_LOGGING = false; @@ -100,7 +100,7 @@ beforeAll(async () => { } await client.query(`CREATE DATABASE IF NOT EXISTS drizzle_rqb;`); await client.changeUser({ database: 'drizzle_rqb' }); - db = drizzle(client, { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client, relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); afterAll(async () => { diff --git a/integration-tests/tests/relational/singlestore.schema.ts b/integration-tests/tests/singlestore/singlestore.schema.ts similarity index 99% rename from integration-tests/tests/relational/singlestore.schema.ts rename to integration-tests/tests/singlestore/singlestore.schema.ts index 530566d421..7257c65d52 100644 --- a/integration-tests/tests/relational/singlestore.schema.ts +++ b/integration-tests/tests/singlestore/singlestore.schema.ts @@ -79,9 +79,7 @@ export const usersToGroupsTable = singlestoreTable( userId: bigint({ mode: 'number' }).notNull(), groupId: bigint({ mode: 'number' }).notNull(), }, - (t) => ({ - pk: primaryKey(t.userId, t.groupId), - }), + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { diff --git a/integration-tests/tests/singlestore/singlestore.test.ts b/integration-tests/tests/singlestore/singlestore.test.ts index cd68ccd4c6..441b357d06 100644 --- a/integration-tests/tests/singlestore/singlestore.test.ts +++ b/integration-tests/tests/singlestore/singlestore.test.ts @@ -1,62 +1,6 @@ -import retry from 'async-retry'; -import { drizzle } from 'drizzle-orm/singlestore'; -import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; -import * as mysql2 from 'mysql2/promise'; -import { afterAll, beforeAll, beforeEach } from 'vitest'; -import relations from './relations'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './singlestore-cache'; -import { createDockerDB, tests } from './singlestore-common'; +import { tests } from './common'; +import { tests as cacheTests } from './common-cache'; +import { singleStoreTest } from './instrumentation'; -const ENABLE_LOGGING = false; - -let db: SingleStoreDriverDatabase; -let dbGlobalCached: SingleStoreDriverDatabase; -let cachedDb: SingleStoreDriverDatabase; -let client: mysql2.Connection; - -beforeAll(async () => { - let connectionString; - if (process.env['SINGLESTORE_CONNECTION_STRING']) { - connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; - } else { - const { connectionString: conStr } = await createDockerDB(); - connectionString = conStr; - } - client = await retry(async () => { - client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); - await client.connect(); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.end(); - }, - }); - - await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); - await client.changeUser({ database: 'drizzle' }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); - -afterAll(async () => { - await client?.end(); -}); - -beforeEach((ctx) => { - ctx.singlestore = { - db, - }; - ctx.cachedSingleStore = { - db: cachedDb, - dbGlobalCached, - }; -}); - -cacheTests(); -tests(); +cacheTests(singleStoreTest); +tests(singleStoreTest); diff --git a/integration-tests/tests/sqlite/better-sqlite.test.ts b/integration-tests/tests/sqlite/better-sqlite.test.ts index a56b7c69b3..40412132fa 100644 --- a/integration-tests/tests/sqlite/better-sqlite.test.ts +++ b/integration-tests/tests/sqlite/better-sqlite.test.ts @@ -1,39 +1,18 @@ -import Database from 'better-sqlite3'; import { sql } from 'drizzle-orm'; -import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; +import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; +import { betterSqlite3Test as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: BetterSQLite3Database; -let client: Database.Database; - -beforeAll(async () => { - const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - client = new Database(dbPath); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { db.run(sql`drop table if exists another_users`); db.run(sql`drop table if exists users12`); db.run(sql`drop table if exists __drizzle_migrations`); - migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + migrate(db as BetterSQLite3Database, { migrationsFolder: './drizzle2/sqlite' }); db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = db.select().from(usersMigratorTable).all(); @@ -49,7 +28,105 @@ test('migrator', async () => { db.run(sql`drop table __drizzle_migrations`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = migrate(db as BetterSQLite3Database, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = migrate(db as BetterSQLite3Database, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + migrate(db as BetterSQLite3Database, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = migrate(db as BetterSQLite3Database, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ /** * doesn't work properly: * Expect: should rollback transaction and don't insert/ update data @@ -57,5 +134,5 @@ skipTests([ */ 'transaction rollback', 'nested transaction rollback', -]); -tests(); +]; +tests(test, skip); diff --git a/integration-tests/tests/relational/bettersqlite-v1.test.ts b/integration-tests/tests/sqlite/bettersqlite-v1.test.ts similarity index 99% rename from integration-tests/tests/relational/bettersqlite-v1.test.ts rename to integration-tests/tests/sqlite/bettersqlite-v1.test.ts index 811103b4e6..4f2a82c286 100644 --- a/integration-tests/tests/relational/bettersqlite-v1.test.ts +++ b/integration-tests/tests/sqlite/bettersqlite-v1.test.ts @@ -3,7 +3,7 @@ import Database from 'better-sqlite3'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './sqlite.schema.ts'; +import * as schema from './sqlite.schema'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; @@ -19,7 +19,7 @@ let db: BetterSQLite3Database; beforeAll(() => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - db = drizzle(new Database(dbPath), { schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client: new Database(dbPath), schema, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); beforeEach(() => { diff --git a/integration-tests/tests/relational/bettersqlite.test.ts b/integration-tests/tests/sqlite/bettersqlite.test.ts similarity index 99% rename from integration-tests/tests/relational/bettersqlite.test.ts rename to integration-tests/tests/sqlite/bettersqlite.test.ts index 4162b272f6..fb9cc1549b 100644 --- a/integration-tests/tests/relational/bettersqlite.test.ts +++ b/integration-tests/tests/sqlite/bettersqlite.test.ts @@ -4,7 +4,7 @@ import { defineRelations, DrizzleError, eq, sql, TransactionRollbackError } from import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; import { alias } from 'drizzle-orm/sqlite-core'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './sqlite.relations.ts'; +import relations from './sqlite.relations'; import { allTypesTable, commentsTable, @@ -16,7 +16,7 @@ import { students, usersTable, usersToGroupsTable, -} from './sqlite.schema.ts'; +} from './sqlite.schema'; const ENABLE_LOGGING = false; @@ -25,7 +25,7 @@ let db: BetterSQLite3Database; beforeAll(() => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - db = drizzle(new Database(dbPath), { relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); + db = drizzle({ client: new Database(dbPath), relations, logger: ENABLE_LOGGING, casing: 'snake_case' }); }); beforeEach(() => { diff --git a/integration-tests/tests/sqlite/d1-batch.test.ts b/integration-tests/tests/sqlite/d1-batch.test.ts index 8228331151..f270a3a838 100644 --- a/integration-tests/tests/sqlite/d1-batch.test.ts +++ b/integration-tests/tests/sqlite/d1-batch.test.ts @@ -46,9 +46,7 @@ export const usersToGroupsTable = sqliteTable( () => groupsTable.id, ), }, - (t) => ({ - pk: primaryKey({ columns: [t.userId, t.groupId] }), - }), + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { diff --git a/integration-tests/tests/sqlite/d1.test.ts b/integration-tests/tests/sqlite/d1.test.ts index de66361dc9..4935553c8d 100644 --- a/integration-tests/tests/sqlite/d1.test.ts +++ b/integration-tests/tests/sqlite/d1.test.ts @@ -1,46 +1,21 @@ -import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; -import { createSQLiteDB } from '@miniflare/shared'; import { sql } from 'drizzle-orm'; import type { DrizzleD1Database } from 'drizzle-orm/d1'; -import { drizzle } from 'drizzle-orm/d1'; import { migrate } from 'drizzle-orm/d1/migrator'; -import { beforeAll, beforeEach, expect, test } from 'vitest'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; +import { d1Test as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; +import { tests as cacheTests } from './sqlite-common-cache'; -const ENABLE_LOGGING = false; - -let db: DrizzleD1Database; -let dbGlobalCached: DrizzleD1Database; -let cachedDb: DrizzleD1Database; - -beforeAll(async () => { - const sqliteDb = await createSQLiteDB(':memory:'); - const d1db = new D1Database(new D1DatabaseAPI(sqliteDb)); - db = drizzle(d1db, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(d1db, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(d1db, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; - ctx.cachedSqlite = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as DrizzleD1Database, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -56,13 +31,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as DrizzleD1Database, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -78,7 +56,105 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as DrizzleD1Database, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as DrizzleD1Database, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as DrizzleD1Database, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as DrizzleD1Database, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ // Cannot convert 49,50,55 to a BigInt 'insert bigint values', // SyntaxError: Unexpected token , in JSON at position 2 @@ -96,6 +172,6 @@ skipTests([ 'select from alias', 'join view as subquery', 'cross join', -]); -cacheTests(); -tests(); +]; +cacheTests(test, skip); +tests(test, skip); diff --git a/integration-tests/tests/relational/db.ts b/integration-tests/tests/sqlite/db.ts similarity index 87% rename from integration-tests/tests/relational/db.ts rename to integration-tests/tests/sqlite/db.ts index 00199003d0..5ad3d3b814 100644 --- a/integration-tests/tests/relational/db.ts +++ b/integration-tests/tests/sqlite/db.ts @@ -3,11 +3,11 @@ import Database from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import util from 'node:util'; -import * as schema from './tables.ts'; +import * as schema from './tables'; async function main() { const bdb = new Database(process.env['SQLITE_DB_PATH']!); - const db = drizzle(bdb, { schema, logger: true }); + const db = drizzle({ client: bdb, schema, logger: true }); const result = db._query.users.findMany({ columns: { diff --git a/integration-tests/tests/sqlite/durable-objects/index.ts b/integration-tests/tests/sqlite/durable-objects/index.ts index d866544983..7a0f3036dc 100644 --- a/integration-tests/tests/sqlite/durable-objects/index.ts +++ b/integration-tests/tests/sqlite/durable-objects/index.ts @@ -1,6 +1,5 @@ /// -import { expect } from 'chai'; import { DurableObject } from 'cloudflare:workers'; import { and, @@ -45,6 +44,7 @@ import { union, unionAll, } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { type Equal, Expect } from '~/utils'; import migrations from './drizzle/migrations'; @@ -110,9 +110,7 @@ export const pkExampleTable = sqliteTable('pk_example', { id: integer('id').notNull(), name: text('name').notNull(), email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey({ columns: [table.id, table.name] }), -})); +}, (table) => [primaryKey({ columns: [table.id, table.name] })]); export const bigIntExample = sqliteTable('big_int_example', { id: integer('id').primaryKey(), diff --git a/integration-tests/tests/sqlite/instrumentation.ts b/integration-tests/tests/sqlite/instrumentation.ts new file mode 100644 index 0000000000..ca46a78e42 --- /dev/null +++ b/integration-tests/tests/sqlite/instrumentation.ts @@ -0,0 +1,926 @@ +import { type Client as LibSQLClient, createClient, type InArgs, type InStatement } from '@libsql/client'; +import { + createClient as createHttpClient, + type InArgs as HttpInArgs, + type InStatement as HttpInStatement, +} from '@libsql/client/http'; +import { + createClient as createNodeClient, + type InArgs as NodeInArgs, + type InStatement as NodeInStatement, +} from '@libsql/client/node'; +import { + createClient as createSqlite3Client, + type InArgs as Sqlite3InArgs, + type InStatement as Sqlite3InStatement, +} from '@libsql/client/sqlite3'; +import { + type Client as LibSQLWsClient, + createClient as createWsClient, + type InArgs as WsInArgs, + type InStatement as WsInStatement, +} from '@libsql/client/ws'; +import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; +import { createSQLiteDB } from '@miniflare/shared'; +import { Database as SqliteCloudDatabase, SQLiteCloudRowset } from '@sqlitecloud/drivers'; +import { Database as TursoDatabase } from '@tursodatabase/database'; +import retry from 'async-retry'; +import type BetterSqlite3 from 'better-sqlite3'; +import Client from 'better-sqlite3'; +import { + type AnyRelationsBuilderConfig, + defineRelations, + type ExtractTablesFromSchema, + type ExtractTablesWithRelations, + getTableName, + is, + type RelationsBuilder, + type RelationsBuilderConfig, + Table, +} from 'drizzle-orm'; +import { drizzle as drizzleBetterSqlite3 } from 'drizzle-orm/better-sqlite3'; +import { Cache, type MutationOption } from 'drizzle-orm/cache/core'; +import type { CacheConfig } from 'drizzle-orm/cache/core/types'; +import { drizzle as drizzleD1 } from 'drizzle-orm/d1'; +import { drizzle as drizzleLibSQL, type LibSQLDatabase } from 'drizzle-orm/libsql'; +import { drizzle as drizzleLibSQLHttp } from 'drizzle-orm/libsql/http'; +import { drizzle as drizzleLibSQLNode } from 'drizzle-orm/libsql/node'; +import { drizzle as drizzleLibSQLSqlite3 } from 'drizzle-orm/libsql/sqlite3'; +import { drizzle as drizzleLibSQLWs } from 'drizzle-orm/libsql/ws'; +import { drizzle as drizzleSqlJs } from 'drizzle-orm/sql-js'; +import { drizzle as drizzleSqliteCloud } from 'drizzle-orm/sqlite-cloud'; +import { BaseSQLiteDatabase, SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; +import { drizzle as drizzleProxy } from 'drizzle-orm/sqlite-proxy'; +import { drizzle as drizzleTursoDatabase } from 'drizzle-orm/tursodatabase/database'; +import Keyv from 'keyv'; +import type { Database as SQLJsDatabase } from 'sql.js'; +import initSqlJs from 'sql.js'; +import { test as base } from 'vitest'; +import relations from './relations'; +import sqliteRelations from './sqlite.relations'; +import * as sqliteSchema from './sqlite.schema'; + +// oxlint-disable-next-line drizzle-internal/require-entity-kind +export class TestCache extends Cache { + private globalTtl: number = 1000; + private usedTablesPerKey: Record = {}; + + constructor(private readonly strat: 'explicit' | 'all', private kv: Keyv = new Keyv()) { + super(); + } + + override strategy() { + return this.strat; + } + + override async get(key: string, _tables: string[], _isTag: boolean): Promise { + const res = await this.kv.get(key) ?? undefined; + return res; + } + + override async put( + key: string, + response: any, + tables: string[], + isTag: boolean, + config?: CacheConfig, + ): Promise { + await this.kv.set(key, response, config ? config.ex : this.globalTtl); + for (const table of tables) { + const keys = this.usedTablesPerKey[table]; + if (keys === undefined) { + this.usedTablesPerKey[table] = [key]; + } else { + keys.push(key); + } + } + } + + override async onMutate(params: MutationOption): Promise { + const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; + const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; + + const keysToDelete = new Set(); + + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + const keys = this.usedTablesPerKey[tableName] ?? []; + for (const key of keys) keysToDelete.add(key); + } + + if (keysToDelete.size > 0 || tagsArray.length > 0) { + for (const tag of tagsArray) { + await this.kv.delete(tag); + } + + for (const key of keysToDelete) { + await this.kv.delete(key); + for (const table of tablesArray) { + const tableName = is(table, Table) ? getTableName(table) : table as string; + this.usedTablesPerKey[tableName] = []; + } + } + } + } +} + +// eslint-disable-next-line drizzle-internal/require-entity-kind +class ServerSimulator { + constructor(private db: BetterSqlite3.Database) {} + + async query(sql: string, params: any[], method: string) { + if (method === 'run') { + try { + const result = this.db.prepare(sql).run(params); + return { data: result as any }; + } catch (e: any) { + return { error: e.message }; + } + } else if (method === 'all' || method === 'values') { + try { + const rows = this.db.prepare(sql).raw().all(params); + return { data: rows }; + } catch (e: any) { + return { error: e.message }; + } + } else if (method === 'get') { + try { + const row = this.db.prepare(sql).raw().get(params); + return { data: row }; + } catch (e: any) { + return { error: e.message }; + } + } else { + return { error: 'Unknown method value' }; + } + } + + migrations(queries: string[]) { + this.db.exec('BEGIN'); + try { + for (const query of queries) { + this.db.exec(query); + } + this.db.exec('COMMIT'); + } catch { + this.db.exec('ROLLBACK'); + } + + return {}; + } +} + +export const _push = async ( + query: (sql: string, params: any[]) => Promise, + schema: any, +) => { + const { diff } = await import('../../../drizzle-kit/tests/sqlite/mocks' as string); + + const res = await diff({}, schema, []); + + for (const s of res.sqlStatements) { + await query(s, []).catch((e) => { + console.error(s); + console.error(e); + throw e; + }); + } +}; + +export const prepareSQLiteCloudClient = async (uri: string) => { + const client = new SqliteCloudDatabase(uri); + + // TODO: revise: maybe I should create run and all funcs instead of query func + const all = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return await new Promise((resolve, reject) => { + (params.length ? stmt.bind(...params) : stmt).all((e: Error | null, d: SQLiteCloudRowset) => { + if (e) return reject(e); + + return resolve(d.map((v) => Object.fromEntries(Object.entries(v)))); + }); + }); + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return await new Promise((resolve, reject) => { + (params.length ? stmt.bind(...params) : stmt).run((e: Error | null, d: SQLiteCloudRowset) => { + if (e) return reject(e); + + return resolve(d); + }); + }); + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareBetterSqlite3Client = () => { + const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; + const client = new Client(dbPath); + + const all = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return stmt.all(...params); + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return stmt.run(...params) as any; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareTursoDatabaseClient = () => { + const client = new TursoDatabase(':memory:'); + + const all = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return stmt.all(...params); + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + return stmt.run(...params) as any; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareLibSQLClient = async (url: string, authToken?: string) => { + const client = createClient({ url, authToken }); + // TODO revise: should I add here do-while loop for client creation? + + // client = await retry(async () => { + // client = createClient({ url, authToken, intMode: 'number' }); + // return client; + // }, { + // retries: 20, + // factor: 1, + // minTimeout: 250, + // maxTimeout: 250, + // randomize: false, + // onRetry() { + // client?.close(); + // }, + // }); + + const all = async (sql: string, params: any[] = []) => { + const stmt: InStatement = { sql, args: params as InArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt: InStatement = { sql, args: params as InArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareLibSQLWsClient = async (url: string, authToken?: string) => { + const client = createWsClient({ url, authToken }); + + const all = async (sql: string, params: any[] = []) => { + const stmt: WsInStatement = { sql, args: params as WsInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt: WsInStatement = { sql, args: params as WsInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareLibSQLSqlite3Client = (url: string = ':memory:') => { + const client = createSqlite3Client({ url }); + + const all = async (sql: string, params: any[] = []) => { + const stmt: Sqlite3InStatement = { sql, args: params as Sqlite3InArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt: Sqlite3InStatement = { sql, args: params as Sqlite3InArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareLibSQLNodeClient = async (url: string, authToken?: string) => { + const client = createNodeClient({ url, authToken }); + + const all = async (sql: string, params: any[] = []) => { + const stmt: NodeInStatement = { sql, args: params as NodeInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt: NodeInStatement = { sql, args: params as NodeInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareLibSQLHttpClient = async (url: string, authToken?: string) => { + const client = createHttpClient({ url, authToken }); + + const all = async (sql: string, params: any[] = []) => { + const stmt: HttpInStatement = { sql, args: params as HttpInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt: HttpInStatement = { sql, args: params as HttpInArgs }; + + const result = await client!.execute(stmt); + return result.rows; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +export const prepareD1Client = async () => { + const sqliteDb = await createSQLiteDB(':memory:'); + const client = new D1Database(new D1DatabaseAPI(sqliteDb)); + + const all = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + const result = await stmt.bind(...params).all(); + return result.results as any[]; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + const result = await stmt.bind(...params).run(); + return result.results as any[]; + }; + + const batch = async (statements: string[]) => { + return await client.batch(statements.map((x) => client.prepare(x))); + }; + + return { client, all, run, batch }; +}; + +export const prepareSqlJs = async () => { + const SQL = await initSqlJs(); + const client = new SQL.Database(); + + const all = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + stmt.bind(params); + const rows: any[] = []; + while (stmt.step()) { + rows.push(stmt.getAsObject()); + } + stmt.free(); + + return rows; + }; + + const run = async (sql: string, params: any[] = []) => { + const stmt = client.prepare(sql); + const result = stmt.run(params); + stmt.free(); + return result as any; + }; + + const batch = async (statements: string[]) => { + return Promise.all( + statements.map((x) => run(x)), + ).then((results) => [results] as any); + }; + + return { client, all, run, batch }; +}; + +const providerClosure = async (items: T[]) => { + return async () => { + while (true) { + const c = items.shift(); + if (!c) { + await new Promise((resolve) => setTimeout(resolve, 50)); + continue; + } + return { + ...c, + release: () => { + items.push(c); + }, + }; + } + }; +}; + +export const providerForSQLiteCloud = async () => { + const url = process.env['SQLITE_MANY_CLOUD_CONNECTION_STRING']; + if (url === undefined) throw new Error('SQLITE_MANY_CLOUD_CONNECTION_STRING is not set.'); + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareSQLiteCloudClient(urlI))); + + return providerClosure(clients); +}; + +export const providerForTursoDatabase = async () => { + const clients = [prepareTursoDatabaseClient()]; + + return providerClosure(clients); +}; + +export const providerForLibSQL = async () => { + const url = process.env['LIBSQL_URL']; + const authToken = process.env['LIBSQL_AUTH_TOKEN']; + if (url === undefined) throw new Error('LIBSQL_URL is not set.'); + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareLibSQLClient(urlI, authToken))); + + return providerClosure(clients); +}; +export const providerForLibSQLWs = async () => { + const url = process.env['LIBSQL_REMOTE_MANY_URL']; + const authToken = process.env['LIBSQL_REMOTE_TOKEN']; + if (url === undefined) { + throw new Error('LIBSQL_REMOTE_MANY_URL is not set.'); + } + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareLibSQLWsClient(urlI, authToken))); + + return providerClosure(clients); +}; +export const providerForLibSQLSqlite3 = async () => { + const clients = [prepareLibSQLSqlite3Client()]; + + return providerClosure(clients); +}; + +export const providerForLibSQLNode = async () => { + const url = process.env['LIBSQL_URL']; + const authToken = process.env['LIBSQL_AUTH_TOKEN']; + if (url === undefined) { + throw new Error('LIBSQL_URL is not set.'); + } + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareLibSQLNodeClient(urlI, authToken))); + + return providerClosure(clients); +}; +export const providerForLibSQLHttp = async () => { + const url = process.env['LIBSQL_REMOTE_MANY_URL']; + const authToken = process.env['LIBSQL_REMOTE_TOKEN']; + if (url === undefined) { + throw new Error('LIBSQL_REMOTE_MANY_URL is not set.'); + } + const uris = url.split(';').filter((val) => val !== ''); + const clients = await Promise.all(uris.map(async (urlI) => await prepareLibSQLHttpClient(urlI, authToken))); + + return providerClosure(clients); +}; + +export const providerForBetterSqlite3 = async () => { + const clients = [prepareBetterSqlite3Client()]; + + return providerClosure(clients); +}; +export const providerForD1 = async () => { + const clients = [await prepareD1Client()]; + + return providerClosure(clients); +}; +export const providerForSqlJs = async () => { + const clients = [await prepareSqlJs()]; + + return providerClosure(clients); +}; + +type ProviderForSQLiteCloud = Awaited>; +type ProviderForTursoDatabase = Awaited>; +type ProviderForLibSQL = Awaited>; +type ProviderForLibSQLWs = Awaited>; +type ProviderForLibSQLSqlite3 = Awaited>; +type ProviderForLibSQLNode = Awaited>; +type ProviderForLibSQLHttp = Awaited>; +type ProviderForBetterSqlite3 = Awaited>; +type ProviderForD1 = Awaited>; +type ProviderForSqlJs = Awaited>; + +type Provider = + | ProviderForSQLiteCloud + | ProviderForTursoDatabase + | ProviderForLibSQL + | ProviderForLibSQLWs + | ProviderForLibSQLSqlite3 + | ProviderForLibSQLNode + | ProviderForLibSQLHttp + | ProviderForBetterSqlite3 + | ProviderForD1 + | ProviderForSqlJs; + +export type SqliteSchema_ = Record< + string, + | SQLiteTable + | SQLiteView + | unknown +>; + +const testFor = ( + vendor: + | 'sqlite-cloud' + | 'proxy' + | 'tursodatabase' + | 'libsql' + | 'libsql-turso' + | 'libsql-turso-v1' + | 'libsql-ws' + | 'libsql-sqlite3' + | 'libsql-node' + | 'libsql-http' + | 'better-sqlite3' + | 'd1' + | 'sql-js', +) => { + return base.extend<{ + provider: Provider; + kit: { + client: any; + all: (sql: string, params?: any[]) => Promise; + run: (sql: string, params?: any[]) => Promise; + batch: (statements: string[]) => Promise; + }; + client: + | BetterSqlite3.Database + | SqliteCloudDatabase + | TursoDatabase + | LibSQLClient + | LibSQLWsClient + | D1Database + | SQLJsDatabase; + db: BaseSQLiteDatabase<'async' | 'sync', any, any, typeof relations>; + push: (schema: any) => Promise; + createDB: { + ( + schema: S, + ): BaseSQLiteDatabase<'async' | 'sync', any, any, ReturnType>>; + ( + schema: S, + cb: (helpers: RelationsBuilder>) => TConfig, + ): BaseSQLiteDatabase< + 'async' | 'sync', + any, + any, + ExtractTablesWithRelations> + >; + }; + caches: { + all: BaseSQLiteDatabase<'async' | 'sync', any, any, typeof relations>; + explicit: BaseSQLiteDatabase<'async' | 'sync', any, any, typeof relations>; + }; + }>({ + provider: [ + // oxlint-disable-next-line no-empty-pattern + async ({}, use) => { + const provider = vendor === 'sqlite-cloud' + ? await providerForSQLiteCloud() + : vendor === 'tursodatabase' + ? await providerForTursoDatabase() + : vendor === 'libsql' || vendor === 'libsql-turso' || vendor === 'libsql-turso-v1' + ? await providerForLibSQL() + : vendor === 'libsql-ws' + ? await providerForLibSQLWs() + : vendor === 'libsql-sqlite3' + ? await providerForLibSQLSqlite3() + : vendor === 'libsql-node' + ? await providerForLibSQLNode() + : vendor === 'libsql-http' + ? await providerForLibSQLHttp() + : vendor === 'proxy' || vendor === 'better-sqlite3' + ? await providerForBetterSqlite3() + : vendor === 'd1' + ? await providerForD1() + : vendor === 'sql-js' + ? await providerForSqlJs() + : '' as never; + + await use(provider); + }, + { scope: 'file' }, + ], + kit: [ + async ({ provider }, use) => { + const { client, batch, all, run, release } = await provider(); + await use({ client: client, all, run, batch }); + release(); + }, + { scope: 'test' }, + ], + client: [ + async ({ kit }, use) => { + await use(kit.client); + }, + { scope: 'test' }, + ], + db: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from sqlite proxy server:', e.message); + throw e; + } + }; + await use(drizzleProxy(proxyHandler, { relations })); + return; + } + + const db = vendor === 'sqlite-cloud' + ? drizzleSqliteCloud({ client: kit.client as any, relations }) + : vendor === 'tursodatabase' + ? drizzleTursoDatabase({ client: kit.client, relations }) + : vendor === 'libsql' + ? drizzleLibSQL({ client: kit.client, relations }) + : vendor === 'libsql-ws' + ? drizzleLibSQLWs({ client: kit.client, relations }) + : vendor === 'libsql-sqlite3' + ? drizzleLibSQLSqlite3({ client: kit.client, relations }) + : vendor === 'libsql-node' + ? drizzleLibSQLNode({ client: kit.client, relations }) + : vendor === 'libsql-http' + ? drizzleLibSQLHttp({ client: kit.client, relations }) + : vendor === 'better-sqlite3' + ? drizzleBetterSqlite3({ client: kit.client, relations }) + : vendor === 'd1' + ? drizzleD1(kit.client, { relations }) + : vendor === 'sql-js' + ? drizzleSqlJs(kit.client, { relations }) + : '' as never; + + await use(db); + }, + { scope: 'test' }, + ], + push: [ + async ({ kit }, use) => { + const push = ( + schema: any, + ) => _push(kit.run, schema); + + await use(push); + }, + { scope: 'test' }, + ], + createDB: [ + async ({ kit }, use) => { + const createDB = ( + schema: S, + cb?: ( + helpers: RelationsBuilder>, + ) => RelationsBuilderConfig>, + ) => { + const relations = cb ? defineRelations(schema, cb) : defineRelations(schema); + + if (vendor === 'sqlite-cloud') return drizzleSqliteCloud({ client: kit.client, relations }); + if (vendor === 'tursodatabase') return drizzleTursoDatabase({ client: kit.client, relations }); + if (vendor === 'libsql' || vendor === 'libsql-turso' || vendor === 'libsql-turso-v1') { + return drizzleLibSQL({ client: kit.client, relations }); + } + if (vendor === 'libsql-ws') return drizzleLibSQLWs({ client: kit.client, relations }); + if (vendor === 'libsql-sqlite3') return drizzleLibSQLSqlite3({ client: kit.client, relations }); + if (vendor === 'libsql-node') return drizzleLibSQLNode({ client: kit.client, relations }); + if (vendor === 'libsql-http') return drizzleLibSQLHttp({ client: kit.client, relations }); + if (vendor === 'better-sqlite3') return drizzleBetterSqlite3({ client: kit.client, relations }); + if (vendor === 'd1') return drizzleD1(kit.client, { relations }); + if (vendor === 'sql-js') return drizzleSqlJs(kit.client, { relations }); + + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw response.error; + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from sqlite proxy server:', e.message); + throw e; + } + }; + return drizzleProxy(proxyHandler, { relations }); + } + throw new Error(); + }; + + await use(createDB); + }, + { scope: 'test' }, + ], + caches: [ + async ({ kit }, use) => { + if (vendor === 'proxy') { + const serverSimulator = new ServerSimulator(kit.client); + const proxyHandler = async (sql: string, params: any[], method: any) => { + try { + const response = await serverSimulator.query(sql, params, method); + + if (response.error !== undefined) { + throw new Error(response.error); + } + + return { rows: response.data }; + } catch (e: any) { + console.error('Error from sqlite proxy server:', e.message); + throw e; + } + }; + const db1 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('all') }); + const db2 = drizzleProxy(proxyHandler, { relations, cache: new TestCache('explicit') }); + await use({ all: db1, explicit: db2 }); + return; + } + + const config1 = { client: kit.client as any, relations, cache: new TestCache('all') }; + const config2 = { client: kit.client as any, relations, cache: new TestCache('explicit') }; + + const db1 = vendor === 'sqlite-cloud' + ? drizzleSqliteCloud(config1) + : vendor === 'tursodatabase' + ? drizzleTursoDatabase(config1) + : vendor === 'libsql' || vendor === 'libsql-turso' || vendor === 'libsql-turso-v1' + ? drizzleLibSQL(config1) + : vendor === 'libsql-ws' + ? drizzleLibSQLWs(config1) + : vendor === 'libsql-sqlite3' + ? drizzleLibSQLSqlite3(config1) + : vendor === 'libsql-node' + ? drizzleLibSQLNode(config1) + : vendor === 'libsql-http' + ? drizzleLibSQLHttp(config1) + : vendor === 'better-sqlite3' + ? drizzleBetterSqlite3(config1) + : vendor === 'd1' + ? drizzleD1(config1.client, { cache: config1.cache, relations: config1.relations }) + : vendor === 'sql-js' + ? drizzleSqlJs(config1.client, { cache: config1.cache, relations: config1.relations }) + : '' as never; + + const db2 = vendor === 'sqlite-cloud' + ? drizzleSqliteCloud(config2) + : vendor === 'tursodatabase' + ? drizzleTursoDatabase(config2) + : vendor === 'libsql' || vendor === 'libsql-turso' || vendor === 'libsql-turso-v1' + ? drizzleLibSQL(config2) + : vendor === 'libsql-ws' + ? drizzleLibSQLWs(config2) + : vendor === 'libsql-sqlite3' + ? drizzleLibSQLSqlite3(config2) + : vendor === 'libsql-node' + ? drizzleLibSQLNode(config2) + : vendor === 'libsql-http' + ? drizzleLibSQLHttp(config2) + : vendor === 'better-sqlite3' + ? drizzleBetterSqlite3(config2) + : vendor === 'd1' + ? drizzleD1(config2.client, { cache: config2.cache, relations: config2.relations }) + : vendor === 'sql-js' + ? drizzleSqlJs(config2.client, { cache: config2.cache, relations: config2.relations }) + : '' as never; + + await use({ all: db1, explicit: db2 }); + }, + { scope: 'test' }, + ], + }); +}; + +export const sqliteCloudTest = testFor('sqlite-cloud'); +export const tursoDatabaseTest = testFor('tursodatabase'); +export const libSQLTest = testFor('libsql'); +export const libSQLWsTest = testFor('libsql-ws'); +export const libSQLSqlite3Test = testFor('libsql-sqlite3'); +export const libSQLNodeTest = testFor('libsql-node'); +export const libSQLHttpTest = testFor('libsql-http'); +export const betterSqlite3Test = testFor('better-sqlite3'); +export const d1Test = testFor('d1'); +export const sqlJsTest = testFor('sql-js'); +export const libSQLTursoTest = testFor('libsql-turso').extend<{ db: LibSQLDatabase }>({ + db: [ + async ({ kit }, use) => { + const db = drizzleLibSQL({ + client: kit.client, + relations: sqliteRelations, + casing: 'snake_case', + }) as LibSQLDatabase; + + await use(db); + }, + { scope: 'test' }, + ], +}); +export const libSQLTursoV1Test = testFor('libsql-turso-v1').extend<{ db: LibSQLDatabase }>({ + db: [ + async ({ kit }, use) => { + const db = drizzleLibSQL({ + client: kit.client, + schema: sqliteSchema, + casing: 'snake_case', + }) as LibSQLDatabase; + + await use(db); + }, + { scope: 'test' }, + ], +}); +export const proxyTest = testFor('proxy').extend<{ serverSimulator: ServerSimulator }>({ + serverSimulator: [ + async ({ client }, use) => { + const serverSimulator = new ServerSimulator(client as BetterSqlite3.Database); + await use(serverSimulator); + }, + { scope: 'test' }, + ], +}); + +export type Test = ReturnType; diff --git a/integration-tests/tests/sqlite/libsql-batch.test.ts b/integration-tests/tests/sqlite/libsql-batch.test.ts index a8e1eb426b..336826be2b 100644 --- a/integration-tests/tests/sqlite/libsql-batch.test.ts +++ b/integration-tests/tests/sqlite/libsql-batch.test.ts @@ -45,9 +45,7 @@ export const usersToGroupsTable = sqliteTable( () => groupsTable.id, ), }, - (t) => ({ - pk: primaryKey({ columns: [t.userId, t.groupId] }), - }), + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { @@ -159,7 +157,7 @@ beforeAll(async () => { client?.close(); }, }); - db = drizzle(client, { schema, logger: ENABLE_LOGGING, relations: relationsV2 }); + db = drizzle({ client, schema, logger: ENABLE_LOGGING, relations: relationsV2 }); }); afterAll(async () => { @@ -512,6 +510,32 @@ test('insert + findManyWith + db.all', async () => { ]); }); +test('reproduce "insert + update + select + select partial" test bug', async () => { + await client.execute('drop table if exists "users";'); + await client.execute(` + CREATE TABLE "users" ( + "id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, + "name" text NOT NULL, + "verified" integer DEFAULT 0 NOT NULL, + "invited_by" integer + ); + `); + + // const query1 = db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }).toSQL(); + // console.log(query1); + const sql1 = 'insert into "users" ("id", "name", "verified", "invited_by") values (?, ?, ?, null) returning "id"'; + const params1 = [1, 'John', 0]; + const res1 = await client.execute({ sql: sql1 as string, args: params1 as any[] }); + console.log(res1); + + // const query2 = db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)).toSQL(); + // console.log(query2); + const sql2 = 'update "users" set "name" = ? where "users"."id" = ?'; + const params2 = ['Dan', 1]; + const res2 = await client.execute({ sql: sql2 as string, args: params2 as any[] }); + console.log(res2); +}); + // batch api for insert + update + select test('insert + update + select + select partial', async () => { const batchResponse = await db.batch([ diff --git a/integration-tests/tests/sqlite/libsql-http.test.ts b/integration-tests/tests/sqlite/libsql-http.test.ts index dab6d7df8f..a1a9f76290 100644 --- a/integration-tests/tests/sqlite/libsql-http.test.ts +++ b/integration-tests/tests/sqlite/libsql-http.test.ts @@ -1,58 +1,19 @@ -import { type Client, createClient } from '@libsql/client/http'; -import retry from 'async-retry'; import { asc, eq, getTableColumns, sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; -import { drizzle } from 'drizzle-orm/libsql/http'; import { migrate } from 'drizzle-orm/libsql/migrator'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { randomString } from '~/utils'; +import { libSQLHttpTest as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable, usersOnUpdate } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; -let client: Client; - -beforeAll(async () => { - const url = process.env['LIBSQL_REMOTE_URL']; - const authToken = process.env['LIBSQL_REMOTE_TOKEN']; - if (!url) { - throw new Error('LIBSQL_REMOTE_URL is not set'); - } - client = await retry(async () => { - client = createClient({ url, authToken }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as LibSQLDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -68,13 +29,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -90,9 +54,105 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.sqlite; +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +test('test $onUpdateFn and $onUpdate works as $default', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -132,9 +192,7 @@ test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { } }); -test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.sqlite; - +test('test $onUpdateFn and $onUpdate works updating', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -177,11 +235,11 @@ test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { } }); -skipTests([ +const skip = [ 'delete with limit and order by', 'update with limit and order by', 'test $onUpdateFn and $onUpdate works as $default', 'test $onUpdateFn and $onUpdate works updating', -]); +]; -tests(); +tests(test, skip); diff --git a/integration-tests/tests/sqlite/libsql-node.test.ts b/integration-tests/tests/sqlite/libsql-node.test.ts index eb8049c8d7..7ebe8c4d5c 100644 --- a/integration-tests/tests/sqlite/libsql-node.test.ts +++ b/integration-tests/tests/sqlite/libsql-node.test.ts @@ -1,58 +1,19 @@ -import { type Client, createClient } from '@libsql/client/node'; -import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; -import { drizzle } from 'drizzle-orm/libsql/node'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { randomString } from '~/utils'; +import { libSQLNodeTest as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; -let client: Client; - -beforeAll(async () => { - const url = process.env['LIBSQL_URL']; - const authToken = process.env['LIBSQL_AUTH_TOKEN']; - if (!url) { - throw new Error('LIBSQL_URL is not set'); - } - client = await retry(async () => { - client = createClient({ url, authToken }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as LibSQLDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -68,13 +29,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -90,9 +54,107 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ 'delete with limit and order by', 'update with limit and order by', -]); +]; -tests(); +tests(test, skip); diff --git a/integration-tests/tests/sqlite/libsql-sqlite3.test.ts b/integration-tests/tests/sqlite/libsql-sqlite3.test.ts index 37f8986167..5a82a69607 100644 --- a/integration-tests/tests/sqlite/libsql-sqlite3.test.ts +++ b/integration-tests/tests/sqlite/libsql-sqlite3.test.ts @@ -1,54 +1,21 @@ -import { type Client, createClient } from '@libsql/client/sqlite3'; -import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { drizzle } from 'drizzle-orm/libsql/sqlite3'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; +import { libSQLSqlite3Test as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; -let client: Client; - -beforeAll(async () => { - const url = ':memory:'; - client = await retry(async () => { - client = createClient({ url }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as LibSQLDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -64,13 +31,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -86,13 +56,111 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ 'delete with limit and order by', 'update with limit and order by', 'transaction', 'transaction rollback', 'nested transaction', 'nested transaction rollback', -]); +]; -tests(); +tests(test, skip); diff --git a/integration-tests/tests/sqlite/libsql-ws.test.ts b/integration-tests/tests/sqlite/libsql-ws.test.ts index 6ce7ac8029..196639481e 100644 --- a/integration-tests/tests/sqlite/libsql-ws.test.ts +++ b/integration-tests/tests/sqlite/libsql-ws.test.ts @@ -1,58 +1,21 @@ -import { type Client, createClient } from '@libsql/client/ws'; -import retry from 'async-retry'; import { asc, eq, getTableColumns, sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { drizzle } from 'drizzle-orm/libsql/ws'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; +import { libSQLWsTest as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable, usersOnUpdate } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; -let client: Client; - -beforeAll(async () => { - const url = process.env['LIBSQL_REMOTE_URL']; - const authToken = process.env['LIBSQL_REMOTE_TOKEN']; - if (!url) { - throw new Error('LIBSQL_REMOTE_URL is not set'); - } - client = await retry(async () => { - client = createClient({ url, authToken }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as LibSQLDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -68,13 +31,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -90,9 +56,105 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.sqlite; +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +test('test $onUpdateFn and $onUpdate works as $default', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -132,9 +194,7 @@ test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { } }); -test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.sqlite; - +test('test $onUpdateFn and $onUpdate works updating', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -177,13 +237,13 @@ test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { } }); -skipTests([ +const skip = [ 'delete with limit and order by', 'update with limit and order by', 'join view as subquery', 'test $onUpdateFn and $onUpdate works as $default', 'test $onUpdateFn and $onUpdate works updating', 'prepared statement reuse', -]); +]; -tests(); +tests(test, skip); diff --git a/integration-tests/tests/sqlite/libsql.test.ts b/integration-tests/tests/sqlite/libsql.test.ts index 70426ec1e3..83bf49c845 100644 --- a/integration-tests/tests/sqlite/libsql.test.ts +++ b/integration-tests/tests/sqlite/libsql.test.ts @@ -1,66 +1,21 @@ -import { type Client, createClient } from '@libsql/client'; -import retry from 'async-retry'; import { sql } from 'drizzle-orm'; -import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; +import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; +import { libSQLTest as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; - -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; -let dbGlobalCached: LibSQLDatabase; -let cachedDb: LibSQLDatabase; -let client: Client; - -beforeAll(async () => { - const url = process.env['LIBSQL_URL']; - const authToken = process.env['LIBSQL_AUTH_TOKEN']; - if (!url) { - throw new Error('LIBSQL_URL is not set'); - } - client = await retry(async () => { - client = createClient({ url, authToken }); - return client; - }, { - retries: 20, - factor: 1, - minTimeout: 250, - maxTimeout: 250, - randomize: false, - onRetry() { - client?.close(); - }, - }); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); - cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); - dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); -}); +import { tests as cacheTests } from './sqlite-common-cache'; -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; - ctx.cachedSqlite = { - db: cachedDb, - dbGlobalCached, - }; -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as LibSQLDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -76,13 +31,16 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -test('migrator : migrate with custom table', async () => { +test('migrator : migrate with custom table', async ({ db }) => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable: customTable, + }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); @@ -98,10 +56,108 @@ test('migrator : migrate with custom table', async () => { await db.run(sql`drop table ${sql.identifier(customTable)}`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as LibSQLDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ 'delete with limit and order by', 'update with limit and order by', -]); +]; -cacheTests(); -tests(); +cacheTests(test, skip); +tests(test, skip); diff --git a/integration-tests/tests/replicas/sqlite.test.ts b/integration-tests/tests/sqlite/replicas.test.ts similarity index 100% rename from integration-tests/tests/replicas/sqlite.test.ts rename to integration-tests/tests/sqlite/replicas.test.ts diff --git a/integration-tests/tests/sqlite/sql-js.test.ts b/integration-tests/tests/sqlite/sql-js.test.ts index f217bec22e..849373c248 100644 --- a/integration-tests/tests/sqlite/sql-js.test.ts +++ b/integration-tests/tests/sqlite/sql-js.test.ts @@ -1,41 +1,21 @@ import { sql } from 'drizzle-orm'; import type { SQLJsDatabase } from 'drizzle-orm/sql-js'; -import { drizzle } from 'drizzle-orm/sql-js'; import { migrate } from 'drizzle-orm/sql-js/migrator'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; import type { Database } from 'sql.js'; import initSqlJs from 'sql.js'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; +import { expect } from 'vitest'; import { skipTests } from '~/common'; +import { sqlJsTest as test } from './instrumentation'; import relations from './relations'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; -const ENABLE_LOGGING = false; - -let db: SQLJsDatabase; -let client: Database; - -beforeAll(async () => { - const SQL = await initSqlJs(); - client = new SQL.Database(); - db = drizzle(client, { logger: ENABLE_LOGGING, relations }); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - -afterAll(async () => { - client?.close(); -}); - -test('migrator', async () => { +test('migrator', async ({ db }) => { db.run(sql`drop table if exists another_users`); db.run(sql`drop table if exists users12`); db.run(sql`drop table if exists __drizzle_migrations`); - migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + migrate(db as SQLJsDatabase, { migrationsFolder: './drizzle2/sqlite' }); db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = db.select().from(usersMigratorTable).all(); @@ -51,7 +31,105 @@ test('migrator', async () => { db.run(sql`drop table __drizzle_migrations`); }); -skipTests([ +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = migrate(db as SQLJsDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = migrate(db as SQLJsDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + migrate(db as SQLJsDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = migrate(db as SQLJsDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ /** * doesn't work properly: * Expect: should rollback transaction and don't insert/ update data @@ -61,5 +139,5 @@ skipTests([ 'nested transaction rollback', 'delete with limit and order by', 'update with limit and order by', -]); -tests(); +]; +tests(test, skip); diff --git a/integration-tests/tests/sqlite/sqlite-cloud.test.ts b/integration-tests/tests/sqlite/sqlite-cloud.test.ts index 56347cbb97..8a3218f638 100644 --- a/integration-tests/tests/sqlite/sqlite-cloud.test.ts +++ b/integration-tests/tests/sqlite/sqlite-cloud.test.ts @@ -1,45 +1,12 @@ -import { Database } from '@sqlitecloud/drivers'; import { sql } from 'drizzle-orm'; import type { SQLiteCloudDatabase } from 'drizzle-orm/sqlite-cloud'; -import { drizzle } from 'drizzle-orm/sqlite-cloud'; import { migrate } from 'drizzle-orm/sqlite-cloud/migrator'; -import { type BaseSQLiteDatabase, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { getTableConfig, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { expect } from 'vitest'; +import { sqliteCloudTest as test } from './instrumentation'; import relations from './relations'; import { tests } from './sqlite-common'; -declare module 'vitest' { - interface TestContext { - sqlite: { - db: BaseSQLiteDatabase<'async' | 'sync', any, Record, typeof relations>; - }; - } -} - -const ENABLE_LOGGING = false; - -let db: SQLiteCloudDatabase; -let client: Database | undefined; - -beforeAll(async () => { - const connectionString = process.env['SQLITE_CLOUD_CONNECTION_STRING']; - if (!connectionString) throw new Error('SQLITE_CLOUD_CONNECTION_STRING is not set'); - - client = new Database(connectionString); - db = drizzle(connectionString, { logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - export const usersMigratorTable = sqliteTable('users12', { id: integer('id').primaryKey(), name: text('name').notNull(), @@ -52,12 +19,12 @@ export const anotherUsersMigratorTable = sqliteTable('another_users', { email: text('email').notNull(), }); -test('migrator', async () => { +test.concurrent('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as SQLiteCloudDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -72,9 +39,108 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -skipTests([ +test.concurrent('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as SQLiteCloudDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test.concurrent('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as SQLiteCloudDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.tableExists).toStrictEqual(false); +}); + +test.concurrent('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as SQLiteCloudDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as SQLiteCloudDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ tableExists: boolean | number }>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.tableExists).toStrictEqual(true); +}); + +const skip = [ // Currently not supported by provider 'update with limit and order by', 'delete with limit and order by', -]); -tests(); +]; + +tests(test, skip); diff --git a/integration-tests/tests/sqlite/sqlite-common-cache.ts b/integration-tests/tests/sqlite/sqlite-common-cache.ts index 6d29656b61..e85d0985ef 100644 --- a/integration-tests/tests/sqlite/sqlite-common-cache.ts +++ b/integration-tests/tests/sqlite/sqlite-common-cache.ts @@ -1,91 +1,7 @@ -import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; -import type { MutationOption } from 'drizzle-orm/cache/core'; -import { Cache } from 'drizzle-orm/cache/core'; -import type { CacheConfig } from 'drizzle-orm/cache/core/types'; -import { alias, type BaseSQLiteDatabase, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; -import Keyv from 'keyv'; -import { beforeEach, describe, expect, test, vi } from 'vitest'; -import type relations from './relations'; - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestGlobalCache extends Cache { - private globalTtl: number = 1000; - private usedTablesPerKey: Record = {}; - - constructor(private kv: Keyv = new Keyv()) { - super(); - } - - override strategy(): 'explicit' | 'all' { - return 'all'; - } - override async get(key: string, _tables: string[], _isTag: boolean): Promise { - const res = await this.kv.get(key) ?? undefined; - return res; - } - override async put( - key: string, - response: any, - tables: string[], - isTag: boolean, - config?: CacheConfig, - ): Promise { - await this.kv.set(key, response, config ? config.ex : this.globalTtl); - for (const table of tables) { - const keys = this.usedTablesPerKey[table]; - if (keys === undefined) { - this.usedTablesPerKey[table] = [key]; - } else { - keys.push(key); - } - } - } - override async onMutate(params: MutationOption): Promise { - const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; - const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; - - const keysToDelete = new Set(); - - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - const keys = this.usedTablesPerKey[tableName] ?? []; - for (const key of keys) keysToDelete.add(key); - } - - if (keysToDelete.size > 0 || tagsArray.length > 0) { - for (const tag of tagsArray) { - await this.kv.delete(tag); - } - - for (const key of keysToDelete) { - await this.kv.delete(key); - for (const table of tablesArray) { - const tableName = is(table, Table) ? getTableName(table) : table as string; - this.usedTablesPerKey[tableName] = []; - } - } - } - } -} - -// eslint-disable-next-line drizzle-internal/require-entity-kind -export class TestCache extends TestGlobalCache { - override strategy(): 'explicit' | 'all' { - return 'explicit'; - } -} - -declare module 'vitest' { - interface TestContext { - cachedSqlite: { - db: BaseSQLiteDatabase; - dbGlobalCached: BaseSQLiteDatabase; - }; - sqlite: { - db: BaseSQLiteDatabase<'async' | 'sync', any, Record, typeof relations>; - }; - } -} +import { eq, sql } from 'drizzle-orm'; +import { alias, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { describe, expect, vi } from 'vitest'; +import { Test } from './instrumentation'; const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), @@ -101,16 +17,19 @@ const postsTable = sqliteTable('posts', { userId: integer('user_id').references(() => usersTable.id), }); -export function tests() { +export function tests(test: Test, exclude: string[] = []) { + test.beforeEach(({ task, skip }) => { + if (exclude.includes(task.name)) skip(); + }); describe('common_cache', () => { - beforeEach(async (ctx) => { - const { db, dbGlobalCached } = ctx.cachedSqlite; - await db.run(sql`drop table if exists users`); - await db.run(sql`drop table if exists posts`); - await db.$cache?.invalidate({ tables: 'users' }); - await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); + test.beforeEach(async ({ caches }) => { + const { explicit, all } = caches; + await explicit.run(sql`drop table if exists users`); + await explicit.run(sql`drop table if exists posts`); + await explicit.$cache?.invalidate({ tables: 'users' }); + await all.$cache?.invalidate({ tables: 'users' }); // public users - await db.run( + await explicit.run( sql` create table users ( id integer primary key AUTOINCREMENT, @@ -121,7 +40,7 @@ export function tests() { ) `, ); - await db.run( + await explicit.run( sql` create table posts ( id integer primary key AUTOINCREMENT, @@ -132,23 +51,23 @@ export function tests() { ); }); - test('test force invalidate', async (ctx) => { - const { db } = ctx.cachedSqlite; + test('test force invalidate', async ({ caches }) => { + const { explicit: db } = caches; - const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); + using spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); - test('default global config - no cache should be hit', async (ctx) => { - const { db } = ctx.cachedSqlite; + test('default global config - no cache should be hit', async ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); @@ -157,15 +76,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(0); }); - test('default global config + enable cache on select: get, put', async (ctx) => { - const { db } = ctx.cachedSqlite; + test('default global config + enable cache on select: get, put', async ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); @@ -174,15 +93,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(0); }); - test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { - const { db } = ctx.cachedSqlite; + test('default global config + enable cache on select + write: get, put, onMutate', async ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); @@ -201,15 +120,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(1); }); - test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { - const { db } = ctx.cachedSqlite; + test('default global config + enable cache on select + disable invalidate: get, put', async ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); @@ -223,15 +142,15 @@ export function tests() { await db.$cache?.invalidate({ tags: ['custom'] }); }); - test('global: true + disable cache', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSqlite; + test('global: true + disable cache', async ({ caches }) => { + const { all: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); @@ -240,15 +159,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(0); }); - test('global: true - cache should be hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSqlite; + test('global: true - cache should be hit', async ({ caches }) => { + const { all: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); @@ -257,15 +176,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(0); }); - test('global: true - cache: false on select - no cache hit', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSqlite; + test('global: true - cache: false on select - no cache hit', async ({ caches }) => { + const { all: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); @@ -274,15 +193,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(0); }); - test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSqlite; + test('global: true - disable invalidate - cache hit + no invalidate', async ({ caches }) => { + const { all: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); @@ -301,15 +220,15 @@ export function tests() { expect(spyInvalidate).toHaveBeenCalledTimes(1); }); - test('global: true - with custom tag', async (ctx) => { - const { dbGlobalCached: db } = ctx.cachedSqlite; + test('global: true - with custom tag', async ({ caches }) => { + const { all: db } = caches; // @ts-expect-error - const spyPut = vi.spyOn(db.$cache, 'put'); + using spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error - const spyGet = vi.spyOn(db.$cache, 'get'); + using spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error - const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); + using spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); @@ -324,8 +243,8 @@ export function tests() { }); // check select used tables - test('check simple select used tables', (ctx) => { - const { db } = ctx.cachedSqlite; + test('check simple select used tables', ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); @@ -333,8 +252,8 @@ export function tests() { expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedSqlite; + test('select+join', ({ caches }) => { + const { explicit: db } = caches; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) @@ -345,8 +264,8 @@ export function tests() { ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables - test('select+2joins', (ctx) => { - const { db } = ctx.cachedSqlite; + test('select+2joins', ({ caches }) => { + const { explicit: db } = caches; expect( db.select().from(usersTable).leftJoin( @@ -369,8 +288,8 @@ export function tests() { ).toStrictEqual(['users', 'posts']); }); // select subquery used tables - test('select+join', (ctx) => { - const { db } = ctx.cachedSqlite; + test('select+join', ({ caches }) => { + const { explicit: db } = caches; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); diff --git a/integration-tests/tests/sqlite/sqlite-common.ts b/integration-tests/tests/sqlite/sqlite-common.ts index 84f274e015..a3abe04a9b 100644 --- a/integration-tests/tests/sqlite/sqlite-common.ts +++ b/integration-tests/tests/sqlite/sqlite-common.ts @@ -45,20 +45,12 @@ import { unique, uniqueKeyName, } from 'drizzle-orm/sqlite-core'; -import { beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; +import { describe, expect, expectTypeOf } from 'vitest'; import type { Equal } from '~/utils'; import { Expect } from '~/utils'; -import type relations from './relations'; +import { Test } from './instrumentation'; import { clear, init, rqbPost, rqbUser } from './schema'; -declare module 'vitest' { - interface TestContext { - sqlite: { - db: BaseSQLiteDatabase<'async' | 'sync', any, Record, typeof relations>; - }; - } -} - const allTypesTable = sqliteTable('all_types', { int: integer('int', { mode: 'number', @@ -162,17 +154,13 @@ const pkExampleTable = sqliteTable('pk_example', { id: integer('id').notNull(), name: text('name').notNull(), email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey({ columns: [table.id, table.name] }), -})); +}, (table) => [primaryKey({ columns: [table.id, table.name] })]); const conflictChainExampleTable = sqliteTable('conflict_chain_example', { id: integer('id').notNull().unique(), name: text('name').notNull(), email: text('email').notNull(), -}, (table) => ({ - compositePk: primaryKey({ columns: [table.id, table.name] }), -})); +}, (table) => [primaryKey({ columns: [table.id, table.name] })]); const bigIntExample = sqliteTable('big_int_example', { id: integer('id').primaryKey(), @@ -190,11 +178,13 @@ const aggregateTable = sqliteTable('aggregate_table', { nullOnly: integer('null_only'), }); -export function tests() { - describe('common', () => { - beforeEach(async (ctx) => { - const { db } = ctx.sqlite; +export function tests(test: Test, exclude: string[] = []) { + test.beforeEach(({ task, skip }) => { + if (exclude.includes(task.name)) skip(); + }); + describe('common', () => { + test.beforeEach(async ({ db }) => { await db.run(sql`drop table if exists ${usersTable}`); await db.run(sql`drop table if exists ${users2Table}`); await db.run(sql`drop table if exists ${citiesTable}`); @@ -341,15 +331,21 @@ export function tests() { ]); } - test('table config: foreign keys name', async () => { - const table = sqliteTable('cities', { - id: int('id').primaryKey(), - name: text('name').notNull(), - state: text('state'), - }, (t) => ({ - f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), - f1: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk_deprecated' }), - })); + test.concurrent('table config: foreign keys name', async () => { + const table = sqliteTable( + 'cities', + { + id: int('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, + ( + t, + ) => [ + foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk_deprecated' }), + ], + ); const tableConfig = getTableConfig(table); @@ -358,14 +354,12 @@ export function tests() { expect(tableConfig.foreignKeys[1]!.getName()).toBe('custom_fk_deprecated'); }); - test('table config: primary keys name', async () => { + test.concurrent('table config: primary keys name', async () => { const table = sqliteTable('cities', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), - })); + }, (t) => [primaryKey({ columns: [t.id, t.name], name: 'custom_pk' })]); const tableConfig = getTableConfig(table); @@ -373,9 +367,7 @@ export function tests() { expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); - test('insert bigint values', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert bigint values', async ({ db }) => { await db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); await db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); await db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); @@ -392,9 +384,7 @@ export function tests() { ]); }); - test('select all fields', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); @@ -404,18 +394,14 @@ export function tests() { expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); }); - test('select partial', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select({ name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([{ name: 'John' }]); }); - test('select sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -424,9 +410,7 @@ export function tests() { expect(users).toEqual([{ name: 'JOHN' }]); }); - test('select typed sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select typed sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.select({ name: sql`upper(${usersTable.name})`, @@ -435,9 +419,7 @@ export function tests() { expect(users).toEqual([{ name: 'JOHN' }]); }); - test('select with empty array in inArray', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with empty array in inArray', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ @@ -449,9 +431,7 @@ export function tests() { expect(result).toEqual([]); }); - test('select with empty array in notInArray', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with empty array in notInArray', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ @@ -463,9 +443,7 @@ export function tests() { expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); - test('select distinct', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select distinct', async ({ db }) => { const usersDistinctTable = sqliteTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), @@ -490,9 +468,7 @@ export function tests() { expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); - test('insert returning sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert returning sql', async ({ db }) => { const users = await db.insert(usersTable).values({ name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }).all(); @@ -500,9 +476,7 @@ export function tests() { expect(users).toEqual([{ name: 'JOHN' }]); }); - test('$default function', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$default function', async ({ db }) => { await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); const selectedOrder = await db.select().from(orders); @@ -515,9 +489,7 @@ export function tests() { }]); }); - test('delete returning sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('delete returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, @@ -526,9 +498,7 @@ export function tests() { expect(users).toEqual([{ name: 'JOHN' }]); }); - test('query check: insert single empty row', (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('query check: insert single empty row', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), @@ -546,9 +516,7 @@ export function tests() { }); }); - test('query check: insert multiple empty rows', (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('query check: insert multiple empty rows', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), @@ -566,9 +534,7 @@ export function tests() { }); }); - test('Insert all defaults in 1 row', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('Insert all defaults in 1 row', async ({ db }) => { const users = sqliteTable('empty_insert_single', { id: integer('id').primaryKey(), name: text('name').default('Dan'), @@ -588,9 +554,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); - test('Insert all defaults in multiple rows', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('Insert all defaults in multiple rows', async ({ db }) => { const users = sqliteTable('empty_insert_multiple', { id: integer('id').primaryKey(), name: text('name').default('Dan'), @@ -610,9 +574,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); - test('update returning sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update returning sql', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, @@ -621,9 +583,7 @@ export function tests() { expect(users).toEqual([{ name: 'JANE' }]); }); - test('insert with auto increment', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with auto increment', async ({ db }) => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Jane' }, @@ -640,27 +600,21 @@ export function tests() { ]); }); - test('insert with default values', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with default values', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select().from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); }); - test('insert with overridden default values', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with overridden default values', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', verified: true }).run(); const result = await db.select().from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); }); - test('update with returning all fields', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update with returning all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); @@ -672,9 +626,7 @@ export function tests() { expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); }); - test('update with returning partial', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, @@ -684,9 +636,7 @@ export function tests() { expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); - test('delete with returning all fields', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('delete with returning all fields', async ({ db }) => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); @@ -697,9 +647,7 @@ export function tests() { expect(users).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); }); - test('delete with returning partial', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('delete with returning partial', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, @@ -709,9 +657,7 @@ export function tests() { expect(users).toEqual([{ id: 1, name: 'John' }]); }); - test('insert + select', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert + select', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); @@ -723,9 +669,7 @@ export function tests() { expect(result2).toEqual([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); }); - test('json insert', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('json insert', async ({ db }) => { await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); const result = await db.select({ id: usersTable.id, @@ -736,9 +680,7 @@ export function tests() { expect(result).toEqual([{ id: 1, name: 'John', json: ['foo', 'bar'] }]); }); - test('insert many', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert many', async ({ db }) => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, @@ -760,9 +702,7 @@ export function tests() { ]); }); - test('insert many with returning', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert many with returning', async ({ db }) => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, @@ -785,8 +725,7 @@ export function tests() { ]); }); - test('partial join with alias', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('partial join with alias', async ({ db }) => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); @@ -811,9 +750,7 @@ export function tests() { }]); }); - test('full join with alias', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('full join with alias', async ({ db }) => { const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { @@ -847,9 +784,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('select from alias', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from alias', async ({ db }) => { const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { @@ -885,18 +820,14 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('insert with spaces', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with spaces', async ({ db }) => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); - test('prepared statement', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); const result = await statement.all(); @@ -904,9 +835,7 @@ export function tests() { expect(result).toEqual([{ id: 1, name: 'John' }]); }); - test('prepared statement reuse', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement reuse', async ({ db }) => { const stmt = db.insert(usersTable).values({ name: sql.placeholder('name') }).prepare(); for (let i = 0; i < 10; i++) { @@ -932,9 +861,7 @@ export function tests() { ]); }); - test('insert: placeholders on columns with encoder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert: placeholders on columns with encoder', async ({ db }) => { const stmt = db.insert(usersTable).values({ name: 'John', verified: sql.placeholder('verified'), @@ -954,9 +881,7 @@ export function tests() { ]); }); - test('prepared statement with placeholder in .where', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement with placeholder in .where', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const stmt = db.select({ id: usersTable.id, @@ -969,9 +894,7 @@ export function tests() { expect(result).toEqual([{ id: 1, name: 'John' }]); }); - test('prepared statement with placeholder in .limit', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement with placeholder in .limit', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }).run(); const stmt = db .select({ @@ -989,9 +912,7 @@ export function tests() { expect(result).toHaveLength(1); }); - test('prepared statement with placeholder in .offset', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement with placeholder in .offset', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).run(); const stmt = db .select({ @@ -1008,9 +929,7 @@ export function tests() { expect(result).toEqual([{ id: 2, name: 'John1' }]); }); - test('prepared statement built using $dynamic', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prepared statement built using $dynamic', async ({ db }) => { function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } @@ -1031,9 +950,7 @@ export function tests() { expect(result).toHaveLength(1); }); - test('select with group by as field', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with group by as field', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -1043,9 +960,7 @@ export function tests() { expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); - test('select with exists', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with exists', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const user = alias(usersTable, 'user'); @@ -1058,9 +973,7 @@ export function tests() { expect(result).toEqual([{ name: 'John' }]); }); - test('select with group by as sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with group by as sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -1070,9 +983,7 @@ export function tests() { expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); - test('select with group by as sql + column', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with group by as sql + column', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -1083,9 +994,7 @@ export function tests() { expect(result).toStrictEqual(expect.arrayContaining([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }])); }); - test('select with group by as column + sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with group by as column + sql', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -1095,9 +1004,7 @@ export function tests() { expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); - test('select with group by complex query', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select with group by complex query', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) @@ -1109,9 +1016,7 @@ export function tests() { expect(result).toEqual([{ name: 'Jane' }]); }); - test('build query', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('build query', async ({ db }) => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); @@ -1122,18 +1027,14 @@ export function tests() { }); }); - test('insert via db.run + select via db.all', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert via db.run + select via db.all', async ({ db }) => { await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result).toEqual([{ id: 1, name: 'John' }]); }); - test('insert via db.get', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert via db.get', async ({ db }) => { const inserted = await db.get<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, @@ -1142,9 +1043,7 @@ export function tests() { expect(inserted).toEqual({ id: 1, name: 'John' }); }); - test('insert via db.run + select via db.get', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert via db.run + select via db.get', async ({ db }) => { await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.get<{ id: number; name: string }>( @@ -1153,18 +1052,14 @@ export function tests() { expect(result).toEqual({ id: 1, name: 'John' }); }); - test('insert via db.get w/ query builder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert via db.get w/ query builder', async ({ db }) => { const inserted = await db.get>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).toEqual({ id: 1, name: 'John' }); }); - test('select from a many subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from a many subquery', async ({ db }) => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); @@ -1197,9 +1092,7 @@ export function tests() { }]); }); - test('select from a one subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from a one subquery', async ({ db }) => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); @@ -1234,9 +1127,7 @@ export function tests() { }]); }); - test('join subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('join subquery', async ({ db }) => { await db.insert(courseCategoriesTable).values([ { name: 'Category 1' }, { name: 'Category 2' }, @@ -1279,9 +1170,7 @@ export function tests() { ]); }); - test('with ... select', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('with ... select', async ({ db }) => { await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, @@ -1363,9 +1252,7 @@ export function tests() { ]); }); - test('with ... update', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('with ... update', async ({ db }) => { const products = sqliteTable('products', { id: integer('id').primaryKey(), price: numeric('price').notNull(), @@ -1417,9 +1304,7 @@ export function tests() { ]); }); - test('with ... insert', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('with ... insert', async ({ db }) => { const users = sqliteTable('users', { username: text('username').notNull(), admin: integer('admin', { mode: 'boolean' }).notNull(), @@ -1451,9 +1336,7 @@ export function tests() { expect(result).toEqual([{ admin: true }]); }); - test('with ... delete', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('with ... delete', async ({ db }) => { await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, @@ -1490,9 +1373,7 @@ export function tests() { ]); }); - test('select from subquery sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from subquery sql', async ({ db }) => { await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); const sq = db @@ -1505,23 +1386,17 @@ export function tests() { expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); - test('select a field without joining its table', (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select a field without joining its table', async ({ db }) => { expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); - test('select all fields from subquery without alias', (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select all fields from subquery without alias', async ({ db }) => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); - test('select count()', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select count()', async ({ db }) => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); const res = await db.select({ count: sql`count(*)` }).from(usersTable).all(); @@ -1529,9 +1404,7 @@ export function tests() { expect(res).toEqual([{ count: 2 }]); }); - test('having', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('having', async ({ db }) => { await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); await db.insert(users2Table).values([ @@ -1568,9 +1441,7 @@ export function tests() { ]); }); - test('view', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('view', async ({ db }) => { const newYorkers1 = sqliteView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); @@ -1631,9 +1502,7 @@ export function tests() { await db.run(sql`drop view ${newYorkers1}`); }); - test('insert null timestamp', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert null timestamp', async ({ db }) => { const test = sqliteTable('test', { t: integer('t', { mode: 'timestamp' }), }); @@ -1647,9 +1516,7 @@ export function tests() { await db.run(sql`drop table ${test}`); }); - test('select from raw sql', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from raw sql', async ({ db }) => { const result = await db.select({ id: sql`id`, name: sql`name`, @@ -1662,9 +1529,7 @@ export function tests() { ]); }); - test('select from raw sql with joins', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select from raw sql with joins', async ({ db }) => { const result = await db .select({ id: sql`users.id`, @@ -1683,9 +1548,7 @@ export function tests() { ]); }); - test('join on aliased sql from select', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('join on aliased sql from select', async ({ db }) => { const result = await db .select({ userId: sql`users.id`.as('userId'), @@ -1707,9 +1570,7 @@ export function tests() { ]); }); - test('join on aliased sql from with clause', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('join on aliased sql from with clause', async ({ db }) => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), @@ -1751,9 +1612,7 @@ export function tests() { ]); }); - test('prefixed table', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('prefixed table', async ({ db }) => { const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); const users = sqliteTable('test_prefixed_table_with_unique_name', { @@ -1776,9 +1635,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('orderBy with aliased column', (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('orderBy with aliased column', async ({ db }) => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); @@ -1786,9 +1643,7 @@ export function tests() { expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); }); - test('transaction', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('transaction', async ({ db }) => { const users = sqliteTable('users_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), @@ -1823,9 +1678,7 @@ export function tests() { await db.run(sql`drop table ${products}`); }); - test('transaction rollback', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('transaction rollback', async ({ db }) => { const users = sqliteTable('users_transactions_rollback', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), @@ -1850,9 +1703,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('nested transaction', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('nested transaction', async ({ db }) => { const users = sqliteTable('users_nested_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), @@ -1879,9 +1730,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('nested transaction rollback', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('nested transaction rollback', async ({ db }) => { const users = sqliteTable('users_nested_transactions_rollback', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), @@ -1911,9 +1760,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('join subquery with join', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('join subquery with join', async ({ db }) => { const internalStaff = sqliteTable('internal_staff', { userId: integer('user_id').notNull(), }); @@ -1963,9 +1810,7 @@ export function tests() { await db.run(sql`drop table ${ticket}`); }); - test('join view as subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('join view as subquery', async ({ db }) => { const users = sqliteTable('users_join_view', { id: integer('id').primaryKey(), name: text('name').notNull(), @@ -2016,9 +1861,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('insert with onConflict do nothing', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do nothing', async ({ db }) => { await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db @@ -2036,9 +1879,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John' }]); }); - test('insert with onConflict do nothing using composite pk', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do nothing using composite pk', async ({ db }) => { await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) @@ -2059,9 +1900,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); }); - test('insert with onConflict do nothing using target', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do nothing using target', async ({ db }) => { await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db @@ -2079,9 +1918,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John' }]); }); - test('insert with onConflict do nothing using composite pk as target', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do nothing using composite pk as target', async ({ db }) => { await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) @@ -2102,9 +1939,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); }); - test('insert with onConflict do update', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do update', async ({ db }) => { await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db @@ -2122,9 +1957,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John1' }]); }); - test('insert with onConflict do update where', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do update where', async ({ db }) => { await db .insert(usersTable) .values([{ id: 1, name: 'John', verified: false }]) @@ -2149,9 +1982,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John1', verified: true }]); }); - test('insert with onConflict do update using composite pk', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict do update using composite pk', async ({ db }) => { await db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); await db @@ -2169,9 +2000,7 @@ export function tests() { expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }]); }); - test('insert with onConflict chained (.update -> .nothing)', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict chained (.update -> .nothing)', async ({ db }) => { await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', @@ -2209,9 +2038,7 @@ export function tests() { }]); }); - test('insert with onConflict chained (.nothing -> .update)', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict chained (.nothing -> .update)', async ({ db }) => { await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', @@ -2249,9 +2076,7 @@ export function tests() { }]); }); - test('insert with onConflict chained (.update -> .update)', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict chained (.update -> .update)', async ({ db }) => { await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', @@ -2289,9 +2114,7 @@ export function tests() { }]); }); - test('insert with onConflict chained (.nothing -> .nothing)', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert with onConflict chained (.nothing -> .nothing)', async ({ db }) => { await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', @@ -2328,9 +2151,7 @@ export function tests() { }]); }); - test('insert undefined', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert undefined', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2349,9 +2170,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('update undefined', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update undefined', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2373,9 +2192,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('async api - CRUD', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('async api - CRUD', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2408,9 +2225,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('async api - insert + select w/ prepare + async execute', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('async api - insert + select w/ prepare + async execute', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2447,9 +2262,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('async api - insert + select w/ prepare + sync execute', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('async api - insert + select w/ prepare + sync execute', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2486,9 +2299,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('select + .get() for empty result', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('select + .get() for empty result', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), @@ -2507,9 +2318,7 @@ export function tests() { await db.run(sql`drop table ${users}`); }); - test('set operations (union) from query builder with subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (union) from query builder with subquery', async ({ db }) => { await setupSetOperationTest(db); const sq = db @@ -2543,9 +2352,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (union) as function', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (union) as function', async ({ db }) => { await setupSetOperationTest(db); const result = await union( @@ -2582,9 +2389,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (union all) from query builder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (union all) from query builder', async ({ db }) => { await setupSetOperationTest(db); const result = await db @@ -2616,9 +2421,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (union all) as function', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (union all) as function', async ({ db }) => { await setupSetOperationTest(db); const result = await unionAll( @@ -2656,9 +2459,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (intersect) from query builder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (intersect) from query builder', async ({ db }) => { await setupSetOperationTest(db); const result = await db @@ -2687,9 +2488,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (intersect) as function', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (intersect) as function', async ({ db }) => { await setupSetOperationTest(db); const result = await intersect( @@ -2723,9 +2522,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (except) from query builder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (except) from query builder', async ({ db }) => { await setupSetOperationTest(db); const result = await db @@ -2753,9 +2550,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (except) as function', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (except) as function', async ({ db }) => { await setupSetOperationTest(db); const result = await except( @@ -2792,9 +2587,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (mixed) from query builder', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (mixed) from query builder', async ({ db }) => { await setupSetOperationTest(db); const result = await db @@ -2832,9 +2625,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('set operations (mixed all) as function with subquery', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('set operations (mixed all) as function with subquery', async ({ db }) => { await setupSetOperationTest(db); const sq = union( @@ -2884,7 +2675,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('define constraints as array', async (_ctx) => { + test.concurrent('define constraints as array', async () => { const table = sqliteTable('name', { id: int(), }, (t) => [ @@ -2898,7 +2689,7 @@ export function tests() { expect(primaryKeys.length).toBe(1); }); - test('define constraints as array inside third param', async (_ctx) => { + test.concurrent('define constraints as array inside third param', async () => { const table = sqliteTable('name', { id: int(), }, (t) => [ @@ -2912,8 +2703,7 @@ export function tests() { expect(primaryKeys.length).toBe(1); }); - test('aggregate function: count', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('aggregate function: count', async ({ db }) => { const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -2926,8 +2716,7 @@ export function tests() { expect(result3[0]?.value).toBe(6); }); - test('aggregate function: avg', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('aggregate function: avg', async ({ db }) => { const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -2940,8 +2729,7 @@ export function tests() { expect(result3[0]?.value).toBe('42.5'); }); - test('aggregate function: sum', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('aggregate function: sum', async ({ db }) => { const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -2954,8 +2742,7 @@ export function tests() { expect(result3[0]?.value).toBe('170'); }); - test('aggregate function: max', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('aggregate function: max', async ({ db }) => { const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -2966,8 +2753,7 @@ export function tests() { expect(result2[0]?.value).toBeNull(); }); - test('aggregate function: min', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('aggregate function: min', async ({ db }) => { const table = aggregateTable; await setupAggregateFunctionsTest(db); @@ -2978,9 +2764,7 @@ export function tests() { expect(result2[0]?.value).toBeNull(); }); - test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('test $onUpdateFn and $onUpdate works as $default', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -3020,9 +2804,7 @@ export function tests() { } }); - test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('test $onUpdateFn and $onUpdate works updating', async ({ db }) => { await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( @@ -3065,10 +2847,8 @@ export function tests() { } }); - test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { - const { db } = ctx.sqlite; - - const users = sqliteTable('users', { + test.concurrent('test $onUpdateFn and $onUpdate works with sql value', async ({ db }) => { + const users = sqliteTable('users_on_update', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), updatedAt: integer('updated_at') @@ -3108,9 +2888,7 @@ export function tests() { expect(updateResp[0]?.updatedAt ?? 0).greaterThan(now); }); - test('$count separate', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count separate', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3133,9 +2911,7 @@ export function tests() { expect(count).toStrictEqual(4); }); - test('$count embedded', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count embedded', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3165,9 +2941,7 @@ export function tests() { ]); }); - test('$count separate reuse', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count separate reuse', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3202,9 +2976,7 @@ export function tests() { expect(count3).toStrictEqual(6); }); - test('$count embedded reuse', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count embedded reuse', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3259,9 +3031,7 @@ export function tests() { ]); }); - test('$count separate with filters', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count separate with filters', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3284,9 +3054,7 @@ export function tests() { expect(count).toStrictEqual(3); }); - test('$count embedded with filters', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('$count embedded with filters', async ({ db }) => { const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), @@ -3316,9 +3084,7 @@ export function tests() { ]); }); - test('update with limit and order by', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update with limit and order by', async ({ db }) => { await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, @@ -3338,9 +3104,7 @@ export function tests() { ]); }); - test('delete with limit and order by', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('delete with limit and order by', async ({ db }) => { await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, @@ -3358,9 +3122,7 @@ export function tests() { ]); }); - test('cross join', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('cross join', async ({ db }) => { await db .insert(usersTable) .values([ @@ -3392,8 +3154,7 @@ export function tests() { ]); }); - test('RQB v2 simple find first - no rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find first - no rows', async ({ db }) => { try { await init(db); @@ -3405,8 +3166,7 @@ export function tests() { } }); - test('RQB v2 simple find first - multiple rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find first - multiple rows', async ({ db }) => { try { await init(db); @@ -3438,8 +3198,7 @@ export function tests() { } }); - test('RQB v2 simple find first - with relation', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find first - with relation', async ({ db }) => { try { await init(db); @@ -3501,8 +3260,7 @@ export function tests() { } }); - test('RQB v2 simple find first - placeholders', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find first - placeholders', async ({ db }) => { try { await init(db); @@ -3543,8 +3301,7 @@ export function tests() { } }); - test('RQB v2 simple find many - no rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find many - no rows', async ({ db }) => { try { await init(db); @@ -3556,8 +3313,7 @@ export function tests() { } }); - test('RQB v2 simple find many - multiple rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find many - multiple rows', async ({ db }) => { try { await init(db); @@ -3593,8 +3349,7 @@ export function tests() { } }); - test('RQB v2 simple find many - with relation', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find many - with relation', async ({ db }) => { try { await init(db); @@ -3657,8 +3412,7 @@ export function tests() { } }); - test('RQB v2 simple find many - placeholders', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 simple find many - placeholders', async ({ db }) => { try { await init(db); @@ -3699,8 +3453,7 @@ export function tests() { } }); - test('RQB v2 transaction find first - no rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find first - no rows', async ({ db }) => { try { await init(db); @@ -3714,8 +3467,7 @@ export function tests() { } }); - test('RQB v2 transaction find first - multiple rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find first - multiple rows', async ({ db }) => { try { await init(db); @@ -3749,8 +3501,7 @@ export function tests() { } }); - test('RQB v2 transaction find first - with relation', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find first - with relation', async ({ db }) => { try { await init(db); @@ -3814,8 +3565,7 @@ export function tests() { } }); - test('RQB v2 transaction find first - placeholders', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find first - placeholders', async ({ db }) => { try { await init(db); @@ -3858,8 +3608,7 @@ export function tests() { } }); - test('RQB v2 transaction find many - no rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find many - no rows', async ({ db }) => { try { await init(db); @@ -3873,8 +3622,7 @@ export function tests() { } }); - test('RQB v2 transaction find many - multiple rows', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find many - multiple rows', async ({ db }) => { try { await init(db); @@ -3912,8 +3660,7 @@ export function tests() { } }); - test('RQB v2 transaction find many - with relation', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find many - with relation', async ({ db }) => { try { await init(db); @@ -3978,8 +3725,7 @@ export function tests() { } }); - test('RQB v2 transaction find many - placeholders', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('RQB v2 transaction find many - placeholders', async ({ db }) => { try { await init(db); @@ -4022,9 +3768,7 @@ export function tests() { } }); - test('limit 0', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('limit 0', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() @@ -4034,9 +3778,7 @@ export function tests() { expect(users).toEqual([]); }); - test('limit -1', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('limit -1', async ({ db }) => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() @@ -4046,9 +3788,161 @@ export function tests() { expect(users.length).toBeGreaterThan(0); }); - test('all types', async (ctx) => { - const { db } = ctx.sqlite; + test.concurrent('column.as', async ({ db }) => { + const users = sqliteTable('users_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => cities.id), + }); + + const cities = sqliteTable('cities_column_as', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }); + + const ucView = sqliteView('cities_users_column_as_view').as((qb) => + qb.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)) + ); + + try { + await db.run(sql`CREATE TABLE IF NOT EXISTS ${cities} ( + \`id\` INTEGER PRIMARY KEY, + \`name\` TEXT NOT NULL + );`); + + await db.run(sql`CREATE TABLE IF NOT EXISTS ${users} ( + \`id\` INTEGER PRIMARY KEY, + \`name\` TEXT NOT NULL, + \`city_id\` INTEGER REFERENCES ${cities}(\`id\`) + );`); + + await db.run( + sql`CREATE VIEW IF NOT EXISTS ${ucView} AS SELECT ${users.id} as \`user_id\`, ${cities.id} as \`city_id\`, ${users.name} as \`user_name\`, ${cities.name} as \`city_name\` FROM ${users} LEFT JOIN ${cities} ON ${ + eq(cities.id, users.cityId) + };`, + ); + + const citiesInsRet = await db.insert(cities).values([{ + id: 1, + name: 'Firstistan', + }, { + id: 2, + name: 'Secondaria', + }]).returning({ + cityId: cities.id.as('city_id'), + cityName: cities.name.as('city_name'), + }); + + expect(citiesInsRet).toStrictEqual(expect.arrayContaining([{ + cityId: 1, + cityName: 'Firstistan', + }, { + cityId: 2, + cityName: 'Secondaria', + }])); + + const usersInsRet = await db.insert(users).values([{ id: 1, name: 'First', cityId: 1 }, { + id: 2, + name: 'Second', + cityId: 2, + }, { + id: 3, + name: 'Third', + }]).returning({ + userId: users.id.as('user_id'), + userName: users.name.as('users_name'), + userCityId: users.cityId, + }); + + expect(usersInsRet).toStrictEqual(expect.arrayContaining([{ userId: 1, userName: 'First', userCityId: 1 }, { + userId: 2, + userName: 'Second', + userCityId: 2, + }, { + userId: 3, + userName: 'Third', + userCityId: null, + }])); + + const joinSelectReturn = await db.select({ + userId: users.id.as('user_id'), + cityId: cities.id.as('city_id'), + userName: users.name.as('user_name'), + cityName: cities.name.as('city_name'), + }).from(users).leftJoin(cities, eq(cities.id, users.cityId)); + + expect(joinSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewSelectReturn = await db.select().from(ucView); + + expect(viewSelectReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + + const viewJoinReturn = await db.select({ + userId: ucView.userId.as('user_id_ucv'), + cityId: cities.id.as('city_id'), + userName: ucView.userName.as('user_name_ucv'), + cityName: cities.name.as('city_name'), + }).from(ucView).leftJoin(cities, eq(cities.id, ucView.cityId)); + + expect(viewJoinReturn).toStrictEqual(expect.arrayContaining([{ + userId: 1, + userName: 'First', + cityId: 1, + cityName: 'Firstistan', + }, { + userId: 2, + userName: 'Second', + cityId: 2, + cityName: 'Secondaria', + }, { + userId: 3, + userName: 'Third', + cityId: null, + cityName: null, + }])); + } finally { + await db.run(sql`DROP TABLE IF EXISTS ${users};`); + await db.run(sql`DROP TABLE IF EXISTS ${cities};`); + await db.run(sql`DROP VIEW IF EXISTS ${ucView};`); + } + }); + test.concurrent('all types', async ({ db }) => { await db.run(sql` CREATE TABLE \`all_types\`( \`int\` integer, @@ -4174,15 +4068,15 @@ export function tests() { }); }); - test('table configs: unique third param', () => { + test.concurrent('table configs: unique third param', () => { const cities1Table = sqliteTable('cities1', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), - }, (t) => ({ - f: unique().on(t.name, t.state), - f1: unique('custom').on(t.name, t.state), - })); + }, (t) => [ + unique().on(t.name, t.state), + unique('custom').on(t.name, t.state), + ]); const tableConfig = getTableConfig(cities1Table); @@ -4199,7 +4093,7 @@ export function tests() { expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom'); }); - test('table configs: unique in column', () => { + test.concurrent('table configs: unique in column', () => { const cities1Table = sqliteTable('cities1', { id: int('id').primaryKey(), name: text('name').notNull().unique(), @@ -4211,7 +4105,7 @@ export function tests() { const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.isUnique).toBeTruthy(); - expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.uniqueName).toBe(undefined); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.isUnique).toBeTruthy(); @@ -4219,12 +4113,10 @@ export function tests() { const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.isUnique).toBeTruthy(); - expect(columnField?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnField!.name])); + expect(columnField?.uniqueName).toBe(undefined); }); - test('update ... from', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update ... from', async ({ db }) => { await db.run(sql`drop table if exists \`cities\``); await db.run(sql`drop table if exists \`users2\``); await db.run(sql` @@ -4266,9 +4158,7 @@ export function tests() { }]); }); - test('update ... from with alias', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update ... from with alias', async ({ db }) => { await db.run(sql`drop table if exists \`users2\``); await db.run(sql`drop table if exists \`cities\``); await db.run(sql` @@ -4313,9 +4203,7 @@ export function tests() { await db.run(sql`drop table if exists \`users2\``); }); - test('update ... from with join', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('update ... from with join', async ({ db }) => { const states = sqliteTable('states', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), @@ -4401,9 +4289,7 @@ export function tests() { }]); }); - test('insert into ... select', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert into ... select', async ({ db }) => { const notifications = sqliteTable('notifications_insert_into', { id: integer('id').primaryKey({ autoIncrement: true }), sentAt: integer('sent_at', { mode: 'timestamp' }).notNull().default(sql`current_timestamp`), @@ -4416,9 +4302,7 @@ export function tests() { const userNotications = sqliteTable('user_notifications_insert_into', { userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), notificationId: integer('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade' }), - }, (t) => ({ - pk: primaryKey({ columns: [t.userId, t.notificationId] }), - })); + }, (t) => [primaryKey({ columns: [t.userId, t.notificationId] })]); await db.run(sql`drop table if exists notifications_insert_into`); await db.run(sql`drop table if exists users_insert_into`); @@ -4478,9 +4362,7 @@ export function tests() { ]); }); - test('insert into ... select with keys in different order', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('insert into ... select with keys in different order', async ({ db }) => { const users1 = sqliteTable('users1', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), @@ -4519,9 +4401,7 @@ export function tests() { }).rejects.toThrowError(); }); - test('Object keys as column names', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('Object keys as column names', async ({ db }) => { // Tests the following: // Column with optional config without providing a value // Column with optional config providing a value @@ -4559,9 +4439,7 @@ export function tests() { await db.run(sql`drop table users`); }); - test('sql operator as cte', async (ctx) => { - const { db } = ctx.sqlite; - + test.concurrent('sql operator as cte', async ({ db }) => { const users = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), diff --git a/integration-tests/tests/sqlite/sqlite-proxy.test.ts b/integration-tests/tests/sqlite/sqlite-proxy.test.ts index c2e57ae82d..a2ceb04d4e 100644 --- a/integration-tests/tests/sqlite/sqlite-proxy.test.ts +++ b/integration-tests/tests/sqlite/sqlite-proxy.test.ts @@ -1,141 +1,229 @@ -/* eslint-disable drizzle-internal/require-entity-kind */ -import type BetterSqlite3 from 'better-sqlite3'; -import Database from 'better-sqlite3'; import { Name, sql } from 'drizzle-orm'; +import { getTableConfig } from 'drizzle-orm/sqlite-core'; import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy'; -import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { migrate } from 'drizzle-orm/sqlite-proxy/migrator'; +import { expect } from 'vitest'; +import { randomString } from '~/utils'; +import { proxyTest as test } from './instrumentation'; import relations from './relations'; -import { tests, usersTable } from './sqlite-common'; - -const ENABLE_LOGGING = false; -import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; - -class ServerSimulator { - constructor(private db: BetterSqlite3.Database) {} - - async query(sql: string, params: any[], method: string) { - if (method === 'run') { - try { - const result = this.db.prepare(sql).run(params); - return { data: result as any }; - } catch (e: any) { - return { error: e.message }; - } - } else if (method === 'all' || method === 'values') { - try { - const rows = this.db.prepare(sql).raw().all(params); - return { data: rows }; - } catch (e: any) { - return { error: e.message }; - } - } else if (method === 'get') { - try { - const row = this.db.prepare(sql).raw().get(params); - return { data: row }; - } catch (e: any) { - return { error: e.message }; - } - } else { - return { error: 'Unknown method value' }; - } - } +import { anotherUsersMigratorTable, tests, usersMigratorTable, usersTable } from './sqlite-common'; +import { tests as cacheTests } from './sqlite-common-cache'; + +const skip = [ + // Different driver respond + 'insert via db.get w/ query builder', + 'insert via db.run + select via db.get', + 'insert via db.get', + 'insert via db.run + select via db.all', +]; +cacheTests(test, skip); +tests(test, skip); - migrations(queries: string[]) { - this.db.exec('BEGIN'); +test.beforeEach(async ({ db }) => { + await db.run(sql`drop table if exists ${usersTable}`); + + await db.run(sql` + create table ${usersTable} ( + id integer primary key, + name text not null, + verified integer not null default 0, + json blob, + created_at integer not null default (strftime('%s', 'now')) + ) + `); +}); + +test('migrator', async ({ db, serverSimulator }) => { + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists __drizzle_migrations`); + + await migrate(db as SqliteRemoteDatabase, async (queries) => { try { - for (const query of queries) { - this.db.exec(query); - } - this.db.exec('COMMIT'); - } catch { - this.db.exec('ROLLBACK'); + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); } + }, { migrationsFolder: './drizzle2/sqlite' }); + + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result = await db.select().from(usersMigratorTable).all(); - return {}; - } -} + await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); + const result2 = await db.select().from(anotherUsersMigratorTable).all(); -let db: SqliteRemoteDatabase; -let dbGlobalCached: SqliteRemoteDatabase; -let cachedDb: SqliteRemoteDatabase; -let client: Database.Database; -let serverSimulator: ServerSimulator; + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table __drizzle_migrations`); +}); -beforeAll(async () => { - const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; - client = new Database(dbPath); - serverSimulator = new ServerSimulator(client); +test('migrator : migrate with custom table', async ({ db, serverSimulator }) => { + const customTable = randomString(); + await db.run(sql`drop table if exists another_users`); + await db.run(sql`drop table if exists users12`); + await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); - const callback = async (sql: string, params: any[], method: string) => { + await migrate(db as SqliteRemoteDatabase, async (queries) => { try { - const rows = await serverSimulator.query(sql, params, method); + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); + + // test if the custom migrations table was created + const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); + expect(res.length > 0).toBeTruthy(); + + // test if the migrated table are working as expected + await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); + const result = await db.select().from(usersMigratorTable); + expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); + + await db.run(sql`drop table another_users`); + await db.run(sql`drop table users12`); + await db.run(sql`drop table ${sql.identifier(customTable)}`); +}); + +test('migrator : --init', async ({ db, serverSimulator }) => { + const migrationsTable = 'drzl_init'; - if (rows.error !== undefined) { - throw new Error(rows.error); - } + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); - return { rows: rows.data }; - } catch (e: any) { - console.error('Error from sqlite proxy server:', e.response?.data ?? e.message); - throw e; + const migratorRes = await migrate(db as SqliteRemoteDatabase, async (queries) => { + try { + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); } - }; - db = proxyDrizzle(callback, { - logger: ENABLE_LOGGING, - relations, + }, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, }); - cachedDb = proxyDrizzle(callback, { cache: new TestCache() }); - dbGlobalCached = proxyDrizzle(callback, { cache: new TestGlobalCache() }); -}); -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; - ctx.cachedSqlite = { - db: cachedDb, - dbGlobalCached, - }; + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<[number]>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res?.[0]).toStrictEqual(false); }); -afterAll(async () => { - client?.close(); +test('migrator : --init - local migrations error', async ({ db, serverSimulator }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as SqliteRemoteDatabase, async (queries) => { + try { + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<[number]>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res?.[0]).toStrictEqual(false); }); -skipTests([ - // Different driver respond - 'insert via db.get w/ query builder', - 'insert via db.run + select via db.get', - 'insert via db.get', - 'insert via db.run + select via db.all', -]); -cacheTests(); -tests(); +test('migrator : --init - db migrations error', async ({ db, serverSimulator }) => { + const migrationsTable = 'drzl_init'; -beforeEach(async () => { - await db.run(sql`drop table if exists ${usersTable}`); + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); - await db.run(sql` - create table ${usersTable} ( - id integer primary key, - name text not null, - verified integer not null default 0, - json blob, - created_at integer not null default (strftime('%s', 'now')) - ) - `); + await migrate(db as SqliteRemoteDatabase, async (queries) => { + try { + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as SqliteRemoteDatabase, async (queries) => { + try { + serverSimulator.migrations(queries); + } catch (e) { + console.error(e); + throw new Error('Proxy server cannot run migrations'); + } + }, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<[number]>( + sql`SELECT EXISTS (SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${ + getTableConfig(usersMigratorTable).name + }) AS ${sql.identifier('tableExists')};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res?.[0]).toStrictEqual(true); }); -test('insert via db.get w/ query builder', async () => { +test('insert via db.get w/ query builder', async ({ db }) => { const inserted = await db.get>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).toEqual([1, 'John']); }); -test('insert via db.run + select via db.get', async () => { +test('insert via db.run + select via db.get', async ({ db }) => { await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.get<{ id: number; name: string }>( @@ -144,7 +232,7 @@ test('insert via db.run + select via db.get', async () => { expect(result).toEqual([1, 'John']); }); -test('insert via db.get', async () => { +test('insert via db.get', async ({ db }) => { const inserted = await db.get<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, @@ -153,9 +241,7 @@ test('insert via db.get', async () => { expect(inserted).toEqual([1, 'John']); }); -test('insert via db.run + select via db.all', async (ctx) => { - const { db } = ctx.sqlite; - +test('insert via db.run + select via db.all', async ({ db }) => { await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); diff --git a/integration-tests/tests/relational/sqlite.relations.ts b/integration-tests/tests/sqlite/sqlite.relations.ts similarity index 98% rename from integration-tests/tests/relational/sqlite.relations.ts rename to integration-tests/tests/sqlite/sqlite.relations.ts index 5106e26e06..0c525b9109 100644 --- a/integration-tests/tests/relational/sqlite.relations.ts +++ b/integration-tests/tests/sqlite/sqlite.relations.ts @@ -1,5 +1,5 @@ import { defineRelations } from 'drizzle-orm'; -import * as schema from './sqlite.schema.ts'; +import * as schema from './sqlite.schema'; export default defineRelations(schema, (r) => ({ usersView: { diff --git a/integration-tests/tests/relational/sqlite.schema.ts b/integration-tests/tests/sqlite/sqlite.schema.ts similarity index 99% rename from integration-tests/tests/relational/sqlite.schema.ts rename to integration-tests/tests/sqlite/sqlite.schema.ts index 86ccc60869..04770809f1 100644 --- a/integration-tests/tests/relational/sqlite.schema.ts +++ b/integration-tests/tests/sqlite/sqlite.schema.ts @@ -50,9 +50,7 @@ export const usersToGroupsTable = sqliteTable( () => groupsTable.id, ), }, - (t) => ({ - pk: primaryKey(t.userId, t.groupId), - }), + (t) => [primaryKey({ columns: [t.userId, t.groupId] })], ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { diff --git a/integration-tests/tests/relational/tables.ts b/integration-tests/tests/sqlite/tables.ts similarity index 91% rename from integration-tests/tests/relational/tables.ts rename to integration-tests/tests/sqlite/tables.ts index 701f4a26c5..2009956af2 100644 --- a/integration-tests/tests/relational/tables.ts +++ b/integration-tests/tests/sqlite/tables.ts @@ -67,11 +67,11 @@ export const node = sqliteTable('node', { parentId: int('parent_id'), leftId: int('left_id'), rightId: int('right_id'), -}, (node) => ({ - fk1: foreignKey(() => ({ columns: [node.parentId], foreignColumns: [node.id] })), - fk2: foreignKey(() => ({ columns: [node.leftId], foreignColumns: [node.id] })), - fk3: foreignKey(() => ({ columns: [node.rightId], foreignColumns: [node.id] })), -})); +}, (node) => [ + foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), + foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), +]); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), diff --git a/integration-tests/tests/relational/turso-v1.test.ts b/integration-tests/tests/sqlite/turso-v1.test.ts similarity index 94% rename from integration-tests/tests/relational/turso-v1.test.ts rename to integration-tests/tests/sqlite/turso-v1.test.ts index b96bd81707..14eb93a83a 100644 --- a/integration-tests/tests/relational/turso-v1.test.ts +++ b/integration-tests/tests/sqlite/turso-v1.test.ts @@ -1,51 +1,17 @@ import 'dotenv/config'; -import { type Client, createClient } from '@libsql/client'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; -import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; -import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import * as schema from './sqlite.schema.ts'; +import { expect, expectTypeOf } from 'vitest'; +import { libSQLTursoV1Test as test } from './instrumentation'; +import * as schema from './sqlite.schema'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; -const ENABLE_LOGGING = false; - /* Test cases: - querying nested relation without PK with additional fields */ -let db: LibSQLDatabase; - -beforeAll(async () => { - const url = process.env['LIBSQL_URL']; - const authToken = process.env['LIBSQL_AUTH_TOKEN']; - if (!url) { - throw new Error('LIBSQL_URL is not set'); - } - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - let client: Client; - do { - try { - client = createClient({ url, authToken }); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to libsql'); - throw lastError; - } - db = drizzle(client!, { logger: ENABLE_LOGGING, schema, casing: 'snake_case' }); -}); - -beforeEach(async () => { +test.beforeEach(async ({ db }) => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); @@ -118,7 +84,7 @@ beforeEach(async () => { [Find Many] One relation users+posts */ -test('[Find Many] Get users with posts', async () => { +test.concurrent('[Find Many] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -180,7 +146,7 @@ test('[Find Many] Get users with posts', async () => { }); }); -test('[Find Many] Get users with posts + limit posts', async () => { +test.concurrent('[Find Many] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -251,7 +217,7 @@ test('[Find Many] Get users with posts + limit posts', async () => { }); }); -test('[Find Many] Get users with posts + limit posts and users', async () => { +test.concurrent('[Find Many] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -314,7 +280,7 @@ test('[Find Many] Get users with posts + limit posts and users', async () => { }); }); -test('[Find Many] Get users with posts + custom fields', async () => { +test.concurrent('[Find Many] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -405,7 +371,7 @@ test('[Find Many] Get users with posts + custom fields', async () => { }); }); -test('[Find Many] Get users with posts + custom fields + limits', async () => { +test.concurrent('[Find Many] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -462,7 +428,7 @@ test('[Find Many] Get users with posts + custom fields + limits', async () => { }); // TODO check order -test.skip('[Find Many] Get users with posts + orderBy', async () => { +test.skip('[Find Many] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -544,7 +510,7 @@ test.skip('[Find Many] Get users with posts + orderBy', async () => { }); }); -test('[Find Many] Get users with posts + where', async () => { +test.concurrent('[Find Many] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -592,7 +558,7 @@ test('[Find Many] Get users with posts + where', async () => { }); }); -test('[Find Many] Get users with posts + where + partial', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -642,7 +608,7 @@ test('[Find Many] Get users with posts + where + partial', async () => { }); }); -test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -692,7 +658,7 @@ test('[Find Many] Get users with posts + where + partial. Did not select posts i }); }); -test('[Find Many] Get users with posts + where + partial(true + false)', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -739,7 +705,7 @@ test('[Find Many] Get users with posts + where + partial(true + false)', async ( }); }); -test('[Find Many] Get users with posts + where + partial(false)', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -790,7 +756,7 @@ test('[Find Many] Get users with posts + where + partial(false)', async () => { }); }); -test('[Find Many] Get users with posts in transaction', async () => { +test.concurrent('[Find Many] Get users with posts in transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -853,7 +819,7 @@ test('[Find Many] Get users with posts in transaction', async () => { }); }); -test('[Find Many] Get users with posts in rollbacked transaction', async () => { +test.concurrent('[Find Many] Get users with posts in rollbacked transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -910,7 +876,7 @@ test('[Find Many] Get users with posts in rollbacked transaction', async () => { }); // select only custom -test('[Find Many] Get only custom fields', async () => { +test.concurrent('[Find Many] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -987,7 +953,7 @@ test('[Find Many] Get only custom fields', async () => { }); }); -test('[Find Many] Get only custom fields + where', async () => { +test.concurrent('[Find Many] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1037,7 +1003,7 @@ test('[Find Many] Get only custom fields + where', async () => { }); }); -test('[Find Many] Get only custom fields + where + limit', async () => { +test.concurrent('[Find Many] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1088,7 +1054,7 @@ test('[Find Many] Get only custom fields + where + limit', async () => { }); }); -test('[Find Many] Get only custom fields + where + orderBy', async () => { +test.concurrent('[Find Many] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1140,7 +1106,7 @@ test('[Find Many] Get only custom fields + where + orderBy', async () => { }); // select only custom find one -test('[Find One] Get only custom fields', async () => { +test.concurrent('[Find One] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1198,7 +1164,7 @@ test('[Find One] Get only custom fields', async () => { }); }); -test('[Find One] Get only custom fields + where', async () => { +test.concurrent('[Find One] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1249,7 +1215,7 @@ test('[Find One] Get only custom fields + where', async () => { }); }); -test('[Find One] Get only custom fields + where + limit', async () => { +test.concurrent('[Find One] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1301,7 +1267,7 @@ test('[Find One] Get only custom fields + where + limit', async () => { }); }); -test('[Find One] Get only custom fields + where + orderBy', async () => { +test.concurrent('[Find One] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1354,7 +1320,7 @@ test('[Find One] Get only custom fields + where + orderBy', async () => { }); // columns {} -test('[Find Many] Get select {}', async () => { +test.concurrent('[Find Many] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1369,7 +1335,7 @@ test('[Find Many] Get select {}', async () => { }); // columns {} -test('[Find One] Get select {}', async () => { +test.concurrent('[Find One] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1384,7 +1350,7 @@ test('[Find One] Get select {}', async () => { }); // deep select {} -test('[Find Many] Get deep select {}', async () => { +test.concurrent('[Find Many] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1410,7 +1376,7 @@ test('[Find Many] Get deep select {}', async () => { }); // deep select {} -test('[Find One] Get deep select {}', async () => { +test.concurrent('[Find One] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1438,7 +1404,7 @@ test('[Find One] Get deep select {}', async () => { /* Prepared statements for users+posts */ -test('[Find Many] Get users with posts + prepared limit', async () => { +test.concurrent('[Find Many] Get users with posts + prepared limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1506,7 +1472,7 @@ test('[Find Many] Get users with posts + prepared limit', async () => { }); }); -test('[Find Many] Get users with posts + prepared limit + offset', async () => { +test.concurrent('[Find Many] Get users with posts + prepared limit + offset', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1568,7 +1534,7 @@ test('[Find Many] Get users with posts + prepared limit + offset', async () => { }); }); -test('[Find Many] Get users with posts + prepared where', async () => { +test.concurrent('[Find Many] Get users with posts + prepared where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1618,7 +1584,7 @@ test('[Find Many] Get users with posts + prepared where', async () => { }); }); -test('[Find Many] Get users with posts + prepared + limit + offset + where', async () => { +test.concurrent('[Find Many] Get users with posts + prepared + limit + offset + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1678,7 +1644,7 @@ test('[Find Many] Get users with posts + prepared + limit + offset + where', asy [Find One] One relation users+posts */ -test('[Find One] Get users with posts', async () => { +test.concurrent('[Find One] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1723,7 +1689,7 @@ test('[Find One] Get users with posts', async () => { }); }); -test('[Find One] Get users with posts + limit posts', async () => { +test.concurrent('[Find One] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1774,7 +1740,7 @@ test('[Find One] Get users with posts + limit posts', async () => { }); }); -test('[Find One] Get users with posts no results found', async () => { +test.concurrent('[Find One] Get users with posts no results found', async ({ db }) => { const usersWithPosts = await db._query.usersTable.findFirst({ with: { posts: { @@ -1801,7 +1767,7 @@ test('[Find One] Get users with posts no results found', async () => { expect(usersWithPosts).toBeUndefined(); }); -test('[Find One] Get users with posts + limit posts and users', async () => { +test.concurrent('[Find One] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1852,7 +1818,7 @@ test('[Find One] Get users with posts + limit posts and users', async () => { }); }); -test('[Find One] Get users with posts + custom fields', async () => { +test.concurrent('[Find One] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1924,7 +1890,7 @@ test('[Find One] Get users with posts + custom fields', async () => { }); }); -test('[Find One] Get users with posts + custom fields + limits', async () => { +test.concurrent('[Find One] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1981,7 +1947,7 @@ test('[Find One] Get users with posts + custom fields + limits', async () => { }); // TODO. Check order -test.skip('[Find One] Get users with posts + orderBy', async () => { +test.skip('[Find One] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2038,7 +2004,7 @@ test.skip('[Find One] Get users with posts + orderBy', async () => { }); }); -test('[Find One] Get users with posts + where', async () => { +test.concurrent('[Find One] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2087,7 +2053,7 @@ test('[Find One] Get users with posts + where', async () => { }); }); -test('[Find One] Get users with posts + where + partial', async () => { +test.concurrent('[Find One] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2138,7 +2104,7 @@ test('[Find One] Get users with posts + where + partial', async () => { }); }); -test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { +test.concurrent('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2189,7 +2155,7 @@ test('[Find One] Get users with posts + where + partial. Did not select posts id }); }); -test('[Find One] Get users with posts + where + partial(true + false)', async () => { +test.concurrent('[Find One] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2237,7 +2203,7 @@ test('[Find One] Get users with posts + where + partial(true + false)', async () }); }); -test('[Find One] Get users with posts + where + partial(false)', async () => { +test.concurrent('[Find One] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2293,7 +2259,7 @@ test('[Find One] Get users with posts + where + partial(false)', async () => { One relation users+users. Self referencing */ -test('Get user with invitee', async () => { +test.concurrent('Get user with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2360,7 +2326,7 @@ test('Get user with invitee', async () => { }); }); -test('Get user + limit with invitee', async () => { +test.concurrent('Get user + limit with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, @@ -2412,7 +2378,7 @@ test('Get user + limit with invitee', async () => { }); }); -test('Get user with invitee and custom fields', async () => { +test.concurrent('Get user with invitee and custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2488,7 +2454,7 @@ test('Get user with invitee and custom fields', async () => { }); }); -test('Get user with invitee and custom fields + limits', async () => { +test.concurrent('Get user with invitee and custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2556,7 +2522,7 @@ test('Get user with invitee and custom fields + limits', async () => { }); }); -test('Get user with invitee + order by', async () => { +test.concurrent('Get user with invitee + order by', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2622,7 +2588,7 @@ test('Get user with invitee + order by', async () => { }); }); -test('Get user with invitee + where', async () => { +test.concurrent('Get user with invitee + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2672,7 +2638,7 @@ test('Get user with invitee + where', async () => { }); }); -test('Get user with invitee + where + partial', async () => { +test.concurrent('Get user with invitee + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2723,7 +2689,7 @@ test('Get user with invitee + where + partial', async () => { }); }); -test('Get user with invitee + where + partial. Did not select users id, but used it in where', async () => { +test.concurrent('Get user with invitee + where + partial. Did not select users id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2770,7 +2736,7 @@ test('Get user with invitee + where + partial. Did not select users id, but use }); }); -test('Get user with invitee + where + partial(true+false)', async () => { +test.concurrent('Get user with invitee + where + partial(true+false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2823,7 +2789,7 @@ test('Get user with invitee + where + partial(true+false)', async () => { }); }); -test('Get user with invitee + where + partial(false)', async () => { +test.concurrent('Get user with invitee + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2880,7 +2846,7 @@ test('Get user with invitee + where + partial(false)', async () => { Two first-level relations users+users and users+posts */ -test('Get user with invitee and posts', async () => { +test.concurrent('Get user with invitee and posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2964,7 +2930,7 @@ test('Get user with invitee and posts', async () => { }); }); -test('Get user with invitee and posts + limit posts and users', async () => { +test.concurrent('Get user with invitee and posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3045,7 +3011,7 @@ test('Get user with invitee and posts + limit posts and users', async () => { }); }); -test('Get user with invitee and posts + limits + custom fields in each', async () => { +test.concurrent('Get user with invitee and posts + limits + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3135,7 +3101,7 @@ test('Get user with invitee and posts + limits + custom fields in each', async ( }); }); -test('Get user with invitee and posts + custom fields in each', async () => { +test.concurrent('Get user with invitee and posts + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3257,7 +3223,7 @@ test('Get user with invitee and posts + custom fields in each', async () => { }); // TODO Check order -test.skip('Get user with invitee and posts + orderBy', async () => { +test.skip('Get user with invitee and posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3360,7 +3326,7 @@ test.skip('Get user with invitee and posts + orderBy', async () => { }); }); -test('Get user with invitee and posts + where', async () => { +test.concurrent('Get user with invitee and posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3428,7 +3394,7 @@ test('Get user with invitee and posts + where', async () => { }); }); -test('Get user with invitee and posts + limit posts and users + where', async () => { +test.concurrent('Get user with invitee and posts + limit posts and users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3488,7 +3454,7 @@ test('Get user with invitee and posts + limit posts and users + where', async () }); }); -test('Get user with invitee and posts + orderBy + where + custom', async () => { +test.concurrent('Get user with invitee and posts + orderBy + where + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3573,7 +3539,7 @@ test('Get user with invitee and posts + orderBy + where + custom', async () => { }); }); -test('Get user with invitee and posts + orderBy + where + partial + custom', async () => { +test.concurrent('Get user with invitee and posts + orderBy + where + partial + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3669,7 +3635,7 @@ test('Get user with invitee and posts + orderBy + where + partial + custom', asy One two-level relation users+posts+comments */ -test('Get user with posts and posts with comments', async () => { +test.concurrent('Get user with posts and posts with comments', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3824,7 +3790,7 @@ test('Get user with posts and posts with comments', async () => { One three-level relation users+posts+comments+comment_owner */ -test('Get user with posts and posts with comments and comments with owner', async () => { +test.concurrent('Get user with posts and posts with comments and comments with owner', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3966,7 +3932,7 @@ test('Get user with posts and posts with comments and comments with owner', asyn Users+users_to_groups+groups */ -test('[Find Many] Get users with groups', async () => { +test.concurrent('[Find Many] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4069,7 +4035,7 @@ test('[Find Many] Get users with groups', async () => { }); }); -test('[Find Many] Get groups with users', async () => { +test.concurrent('[Find Many] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4172,7 +4138,7 @@ test('[Find Many] Get groups with users', async () => { }); }); -test('[Find Many] Get users with groups + limit', async () => { +test.concurrent('[Find Many] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4255,7 +4221,7 @@ test('[Find Many] Get users with groups + limit', async () => { }); }); -test('[Find Many] Get groups with users + limit', async () => { +test.concurrent('[Find Many] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4338,7 +4304,7 @@ test('[Find Many] Get groups with users + limit', async () => { }); }); -test('[Find Many] Get users with groups + limit + where', async () => { +test.concurrent('[Find Many] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4407,7 +4373,7 @@ test('[Find Many] Get users with groups + limit + where', async () => { }); }); -test('[Find Many] Get groups with users + limit + where', async () => { +test.concurrent('[Find Many] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4477,7 +4443,7 @@ test('[Find Many] Get groups with users + limit + where', async () => { }); }); -test('[Find Many] Get users with groups + where', async () => { +test.concurrent('[Find Many] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4554,7 +4520,7 @@ test('[Find Many] Get users with groups + where', async () => { }); }); -test('[Find Many] Get groups with users + where', async () => { +test.concurrent('[Find Many] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4630,7 +4596,7 @@ test('[Find Many] Get groups with users + where', async () => { }); }); -test('[Find Many] Get users with groups + orderBy', async () => { +test.concurrent('[Find Many] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4732,7 +4698,7 @@ test('[Find Many] Get users with groups + orderBy', async () => { }); }); -test('[Find Many] Get groups with users + orderBy', async () => { +test.concurrent('[Find Many] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4835,7 +4801,7 @@ test('[Find Many] Get groups with users + orderBy', async () => { }); }); -test('[Find Many] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find Many] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4924,7 +4890,7 @@ test('[Find Many] Get users with groups + orderBy + limit', async () => { Users+users_to_groups+groups */ -test('[Find One] Get users with groups', async () => { +test.concurrent('[Find One] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4988,7 +4954,7 @@ test('[Find One] Get users with groups', async () => { }); }); -test('[Find One] Get groups with users', async () => { +test.concurrent('[Find One] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5052,7 +5018,7 @@ test('[Find One] Get groups with users', async () => { }); }); -test('[Find One] Get users with groups + limit', async () => { +test.concurrent('[Find One] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5117,7 +5083,7 @@ test('[Find One] Get users with groups + limit', async () => { }); }); -test('[Find One] Get groups with users + limit', async () => { +test.concurrent('[Find One] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5182,7 +5148,7 @@ test('[Find One] Get groups with users + limit', async () => { }); }); -test('[Find One] Get users with groups + limit + where', async () => { +test.concurrent('[Find One] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5248,7 +5214,7 @@ test('[Find One] Get users with groups + limit + where', async () => { }); }); -test('[Find One] Get groups with users + limit + where', async () => { +test.concurrent('[Find One] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5315,7 +5281,7 @@ test('[Find One] Get groups with users + limit + where', async () => { }); }); -test('[Find One] Get users with groups + where', async () => { +test.concurrent('[Find One] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5375,7 +5341,7 @@ test('[Find One] Get users with groups + where', async () => { }); }); -test('[Find One] Get groups with users + where', async () => { +test.concurrent('[Find One] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5441,7 +5407,7 @@ test('[Find One] Get groups with users + where', async () => { }); }); -test('[Find One] Get users with groups + orderBy', async () => { +test.concurrent('[Find One] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5513,7 +5479,7 @@ test('[Find One] Get users with groups + orderBy', async () => { }); }); -test('[Find One] Get groups with users + orderBy', async () => { +test.concurrent('[Find One] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5579,7 +5545,7 @@ test('[Find One] Get groups with users + orderBy', async () => { }); }); -test('[Find One] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find One] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5646,7 +5612,7 @@ test('[Find One] Get users with groups + orderBy + limit', async () => { }); }); -test('Get groups with users + orderBy + limit', async () => { +test.concurrent('Get groups with users + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5731,7 +5697,7 @@ test('Get groups with users + orderBy + limit', async () => { }); }); -test('Get users with groups + custom', async () => { +test.concurrent('Get users with groups + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5855,7 +5821,7 @@ test('Get users with groups + custom', async () => { }); }); -test('Get groups with users + custom', async () => { +test.concurrent('Get groups with users + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5976,13 +5942,13 @@ test('Get groups with users + custom', async () => { }); }); -test('async api', async () => { +test.concurrent('async api', async ({ db }) => { await db.insert(usersTable).values([{ id: 1, name: 'Dan' }]); const users = await db._query.usersTable.findMany(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); -test('async api - prepare', async () => { +test.concurrent('async api - prepare', async ({ db }) => { const insertStmt = db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).prepare(); await insertStmt.execute(); const queryStmt = db._query.usersTable.findMany().prepare(); @@ -5990,7 +5956,7 @@ test('async api - prepare', async () => { expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); -test('.toSQL()', () => { +test.concurrent('.toSQL()', ({ db }) => { const query = db._query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); diff --git a/integration-tests/tests/relational/turso.test.ts b/integration-tests/tests/sqlite/turso.test.ts similarity index 93% rename from integration-tests/tests/relational/turso.test.ts rename to integration-tests/tests/sqlite/turso.test.ts index 045c969515..bdc5543dcd 100644 --- a/integration-tests/tests/relational/turso.test.ts +++ b/integration-tests/tests/sqlite/turso.test.ts @@ -1,45 +1,10 @@ import 'dotenv/config'; -import { type Client, createClient } from '@libsql/client'; import { DrizzleError, sql, TransactionRollbackError } from 'drizzle-orm'; -import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; -import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; -import relations from './sqlite.relations.ts'; -import { commentsTable, groupsTable, postsTable, usersTable, usersToGroupsTable } from './sqlite.schema.ts'; +import { expect, expectTypeOf } from 'vitest'; +import { libSQLTursoTest as test } from './instrumentation'; +import { commentsTable, groupsTable, postsTable, usersTable, usersToGroupsTable } from './sqlite.schema'; -const ENABLE_LOGGING = false; - -let db: LibSQLDatabase; - -beforeAll(async () => { - const url = process.env['LIBSQL_URL']; - const authToken = process.env['LIBSQL_AUTH_TOKEN']; - if (!url) { - throw new Error('LIBSQL_URL is not set'); - } - const sleep = 250; - let timeLeft = 5000; - let connected = false; - let lastError: unknown | undefined; - let client: Client; - do { - try { - client = createClient({ url, authToken }); - connected = true; - break; - } catch (e) { - lastError = e; - await new Promise((resolve) => setTimeout(resolve, sleep)); - timeLeft -= sleep; - } - } while (timeLeft > 0); - if (!connected) { - console.error('Cannot connect to libsql'); - throw lastError; - } - db = drizzle(client!, { logger: ENABLE_LOGGING, relations, casing: 'snake_case' }); -}); - -beforeEach(async () => { +test.beforeEach(async ({ db }) => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); @@ -112,7 +77,7 @@ beforeEach(async () => { [Find Many] One relation users+posts */ -test('[Find Many] Get users with posts', async () => { +test.concurrent('[Find Many] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -174,7 +139,7 @@ test('[Find Many] Get users with posts', async () => { }); }); -test('[Find Many] Get users with posts + limit posts', async () => { +test.concurrent('[Find Many] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -245,7 +210,7 @@ test('[Find Many] Get users with posts + limit posts', async () => { }); }); -test('[Find Many] Get users with posts + limit posts and users', async () => { +test.concurrent('[Find Many] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -308,7 +273,7 @@ test('[Find Many] Get users with posts + limit posts and users', async () => { }); }); -test('[Find Many] Get users with posts + custom fields', async () => { +test.concurrent('[Find Many] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -399,7 +364,7 @@ test('[Find Many] Get users with posts + custom fields', async () => { }); }); -test('[Find Many] Get users with posts + custom fields + limits', async () => { +test.concurrent('[Find Many] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -456,7 +421,7 @@ test('[Find Many] Get users with posts + custom fields + limits', async () => { }); // TODO check order -test.skip('[Find Many] Get users with posts + orderBy', async () => { +test.skip('[Find Many] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -542,7 +507,7 @@ test.skip('[Find Many] Get users with posts + orderBy', async () => { }); }); -test('[Find Many] Get users with posts + where', async () => { +test.concurrent('[Find Many] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -594,7 +559,7 @@ test('[Find Many] Get users with posts + where', async () => { }); }); -test('[Find Many] Get users with posts + where + partial', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -648,7 +613,7 @@ test('[Find Many] Get users with posts + where + partial', async () => { }); }); -test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -702,7 +667,7 @@ test('[Find Many] Get users with posts + where + partial. Did not select posts i }); }); -test('[Find Many] Get users with posts + where + partial(true + false)', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -753,7 +718,7 @@ test('[Find Many] Get users with posts + where + partial(true + false)', async ( }); }); -test('[Find Many] Get users with posts + where + partial(false)', async () => { +test.concurrent('[Find Many] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -808,7 +773,7 @@ test('[Find Many] Get users with posts + where + partial(false)', async () => { }); }); -test('[Find Many] Get users with posts in transaction', async () => { +test.concurrent('[Find Many] Get users with posts in transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -875,7 +840,7 @@ test('[Find Many] Get users with posts in transaction', async () => { }); }); -test('[Find Many] Get users with posts in rollbacked transaction', async () => { +test.concurrent('[Find Many] Get users with posts in rollbacked transaction', async ({ db }) => { let usersWithPosts: { id: number; name: string; @@ -936,7 +901,7 @@ test('[Find Many] Get users with posts in rollbacked transaction', async () => { }); // select only custom -test('[Find Many] Get only custom fields', async () => { +test.concurrent('[Find Many] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1013,7 +978,7 @@ test('[Find Many] Get only custom fields', async () => { }); }); -test('[Find Many] Get only custom fields + where', async () => { +test.concurrent('[Find Many] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1069,7 +1034,7 @@ test('[Find Many] Get only custom fields + where', async () => { }); }); -test('[Find Many] Get only custom fields + where + limit', async () => { +test.concurrent('[Find Many] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1126,7 +1091,7 @@ test('[Find Many] Get only custom fields + where + limit', async () => { }); }); -test('[Find Many] Get only custom fields + where + orderBy', async () => { +test.concurrent('[Find Many] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1186,7 +1151,7 @@ test('[Find Many] Get only custom fields + where + orderBy', async () => { }); // select only custom find one -test('[Find One] Get only custom fields', async () => { +test.concurrent('[Find One] Get only custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1244,7 +1209,7 @@ test('[Find One] Get only custom fields', async () => { }); }); -test('[Find One] Get only custom fields + where', async () => { +test.concurrent('[Find One] Get only custom fields + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1301,7 +1266,7 @@ test('[Find One] Get only custom fields + where', async () => { }); }); -test('[Find One] Get only custom fields + where + limit', async () => { +test.concurrent('[Find One] Get only custom fields + where + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1359,7 +1324,7 @@ test('[Find One] Get only custom fields + where + limit', async () => { }); }); -test('[Find One] Get only custom fields + where + orderBy', async () => { +test.concurrent('[Find One] Get only custom fields + where + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1420,7 +1385,7 @@ test('[Find One] Get only custom fields + where + orderBy', async () => { }); // columns {} -test('[Find Many] Get select {}', async () => { +test.concurrent('[Find Many] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1435,7 +1400,7 @@ test('[Find Many] Get select {}', async () => { }); // columns {} -test('[Find One] Get select {}', async () => { +test.concurrent('[Find One] Get select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1450,7 +1415,7 @@ test('[Find One] Get select {}', async () => { }); // deep select {} -test('[Find Many] Get deep select {}', async () => { +test.concurrent('[Find Many] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1476,7 +1441,7 @@ test('[Find Many] Get deep select {}', async () => { }); // deep select {} -test('[Find One] Get deep select {}', async () => { +test.concurrent('[Find One] Get deep select {}', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1504,7 +1469,7 @@ test('[Find One] Get deep select {}', async () => { /* Prepared statements for users+posts */ -test('[Find Many] Get users with posts + prepared limit', async () => { +test.concurrent('[Find Many] Get users with posts + prepared limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1572,7 +1537,7 @@ test('[Find Many] Get users with posts + prepared limit', async () => { }); }); -test('[Find Many] Get users with posts + prepared limit + offset', async () => { +test.concurrent('[Find Many] Get users with posts + prepared limit + offset', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1634,7 +1599,7 @@ test('[Find Many] Get users with posts + prepared limit + offset', async () => { }); }); -test('[Find Many] Get users with posts + prepared where', async () => { +test.concurrent('[Find Many] Get users with posts + prepared where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1690,7 +1655,7 @@ test('[Find Many] Get users with posts + prepared where', async () => { }); }); -test('[Find Many] Get users with posts + prepared + limit + offset + where', async () => { +test.concurrent('[Find Many] Get users with posts + prepared + limit + offset + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1760,7 +1725,7 @@ test('[Find Many] Get users with posts + prepared + limit + offset + where', asy [Find One] One relation users+posts */ -test('[Find One] Get users with posts', async () => { +test.concurrent('[Find One] Get users with posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1805,7 +1770,7 @@ test('[Find One] Get users with posts', async () => { }); }); -test('[Find One] Get users with posts + limit posts', async () => { +test.concurrent('[Find One] Get users with posts + limit posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1856,7 +1821,7 @@ test('[Find One] Get users with posts + limit posts', async () => { }); }); -test('[Find One] Get users with posts no results found', async () => { +test.concurrent('[Find One] Get users with posts no results found', async ({ db }) => { const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { @@ -1883,7 +1848,7 @@ test('[Find One] Get users with posts no results found', async () => { expect(usersWithPosts).toBeUndefined(); }); -test('[Find One] Get users with posts + limit posts and users', async () => { +test.concurrent('[Find One] Get users with posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -1934,7 +1899,7 @@ test('[Find One] Get users with posts + limit posts and users', async () => { }); }); -test('[Find One] Get users with posts + custom fields', async () => { +test.concurrent('[Find One] Get users with posts + custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2006,7 +1971,7 @@ test('[Find One] Get users with posts + custom fields', async () => { }); }); -test('[Find One] Get users with posts + custom fields + limits', async () => { +test.concurrent('[Find One] Get users with posts + custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2063,7 +2028,7 @@ test('[Find One] Get users with posts + custom fields + limits', async () => { }); // TODO. Check order -test.skip('[Find One] Get users with posts + orderBy', async () => { +test.skip('[Find One] Get users with posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2124,7 +2089,7 @@ test.skip('[Find One] Get users with posts + orderBy', async () => { }); }); -test('[Find One] Get users with posts + where', async () => { +test.concurrent('[Find One] Get users with posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2177,7 +2142,7 @@ test('[Find One] Get users with posts + where', async () => { }); }); -test('[Find One] Get users with posts + where + partial', async () => { +test.concurrent('[Find One] Get users with posts + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2232,7 +2197,7 @@ test('[Find One] Get users with posts + where + partial', async () => { }); }); -test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { +test.concurrent('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2287,7 +2252,7 @@ test('[Find One] Get users with posts + where + partial. Did not select posts id }); }); -test('[Find One] Get users with posts + where + partial(true + false)', async () => { +test.concurrent('[Find One] Get users with posts + where + partial(true + false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2339,7 +2304,7 @@ test('[Find One] Get users with posts + where + partial(true + false)', async () }); }); -test('[Find One] Get users with posts + where + partial(false)', async () => { +test.concurrent('[Find One] Get users with posts + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2399,7 +2364,7 @@ test('[Find One] Get users with posts + where + partial(false)', async () => { One relation users+users. Self referencing */ -test('Get user with invitee', async () => { +test.concurrent('Get user with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2466,7 +2431,7 @@ test('Get user with invitee', async () => { }); }); -test('Get user + limit with invitee', async () => { +test.concurrent('Get user + limit with invitee', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, @@ -2518,7 +2483,7 @@ test('Get user + limit with invitee', async () => { }); }); -test('Get user with invitee and custom fields', async () => { +test.concurrent('Get user with invitee and custom fields', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2594,7 +2559,7 @@ test('Get user with invitee and custom fields', async () => { }); }); -test('Get user with invitee and custom fields + limits', async () => { +test.concurrent('Get user with invitee and custom fields + limits', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2662,7 +2627,7 @@ test('Get user with invitee and custom fields + limits', async () => { }); }); -test('Get user with invitee + order by', async () => { +test.concurrent('Get user with invitee + order by', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2730,7 +2695,7 @@ test('Get user with invitee + order by', async () => { }); }); -test('Get user with invitee + where', async () => { +test.concurrent('Get user with invitee + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2784,7 +2749,7 @@ test('Get user with invitee + where', async () => { }); }); -test('Get user with invitee + where + partial', async () => { +test.concurrent('Get user with invitee + where + partial', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2839,7 +2804,7 @@ test('Get user with invitee + where + partial', async () => { }); }); -test('Get user with invitee + where + partial. Did not select users id, but used it in where', async () => { +test.concurrent('Get user with invitee + where + partial. Did not select users id, but used it in where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2890,7 +2855,7 @@ test('Get user with invitee + where + partial. Did not select users id, but use }); }); -test('Get user with invitee + where + partial(true+false)', async () => { +test.concurrent('Get user with invitee + where + partial(true+false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -2947,7 +2912,7 @@ test('Get user with invitee + where + partial(true+false)', async () => { }); }); -test('Get user with invitee + where + partial(false)', async () => { +test.concurrent('Get user with invitee + where + partial(false)', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3008,7 +2973,7 @@ test('Get user with invitee + where + partial(false)', async () => { Two first-level relations users+users and users+posts */ -test('Get user with invitee and posts', async () => { +test.concurrent('Get user with invitee and posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3092,7 +3057,7 @@ test('Get user with invitee and posts', async () => { }); }); -test('Get user with invitee and posts + limit posts and users', async () => { +test.concurrent('Get user with invitee and posts + limit posts and users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3173,7 +3138,7 @@ test('Get user with invitee and posts + limit posts and users', async () => { }); }); -test('Get user with invitee and posts + limits + custom fields in each', async () => { +test.concurrent('Get user with invitee and posts + limits + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3263,7 +3228,7 @@ test('Get user with invitee and posts + limits + custom fields in each', async ( }); }); -test('Get user with invitee and posts + custom fields in each', async () => { +test.concurrent('Get user with invitee and posts + custom fields in each', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3385,7 +3350,7 @@ test('Get user with invitee and posts + custom fields in each', async () => { }); // TODO Check order -test.skip('Get user with invitee and posts + orderBy', async () => { +test.skip('Get user with invitee and posts + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3492,7 +3457,7 @@ test.skip('Get user with invitee and posts + orderBy', async () => { }); }); -test('Get user with invitee and posts + where', async () => { +test.concurrent('Get user with invitee and posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3566,7 +3531,7 @@ test('Get user with invitee and posts + where', async () => { }); }); -test('Get user with invitee and posts + limit posts and users + where', async () => { +test.concurrent('Get user with invitee and posts + limit posts and users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3632,7 +3597,7 @@ test('Get user with invitee and posts + limit posts and users + where', async () }); }); -test('Get user with invitee and posts + orderBy + where + custom', async () => { +test.concurrent('Get user with invitee and posts + orderBy + where + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3727,7 +3692,7 @@ test('Get user with invitee and posts + orderBy + where + custom', async () => { }); }); -test('Get user with invitee and posts + orderBy + where + partial + custom', async () => { +test.concurrent('Get user with invitee and posts + orderBy + where + partial + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3833,7 +3798,7 @@ test('Get user with invitee and posts + orderBy + where + partial + custom', asy One two-level relation users+posts+comments */ -test('Get user with posts and posts with comments', async () => { +test.concurrent('Get user with posts and posts with comments', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -3988,7 +3953,7 @@ test('Get user with posts and posts with comments', async () => { One three-level relation users+posts+comments+comment_owner */ -test('Get user with posts and posts with comments and comments with owner', async () => { +test.concurrent('Get user with posts and posts with comments and comments with owner', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4130,7 +4095,7 @@ test('Get user with posts and posts with comments and comments with owner', asyn Users+users_to_groups+groups */ -test('[Find Many] Get users with groups', async () => { +test.concurrent('[Find Many] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4235,7 +4200,7 @@ test('[Find Many] Get users with groups', async () => { }); }); -test('[Find Many] Get groups with users', async () => { +test.concurrent('[Find Many] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4338,7 +4303,7 @@ test('[Find Many] Get groups with users', async () => { }); }); -test('[Find Many] Get users with groups + limit', async () => { +test.concurrent('[Find Many] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4421,7 +4386,7 @@ test('[Find Many] Get users with groups + limit', async () => { }); }); -test('[Find Many] Get groups with users + limit', async () => { +test.concurrent('[Find Many] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4504,7 +4469,7 @@ test('[Find Many] Get groups with users + limit', async () => { }); }); -test('[Find Many] Get users with groups + limit + where', async () => { +test.concurrent('[Find Many] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4579,7 +4544,7 @@ test('[Find Many] Get users with groups + limit + where', async () => { }); }); -test('[Find Many] Get groups with users + limit + where', async () => { +test.concurrent('[Find Many] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4655,7 +4620,7 @@ test('[Find Many] Get groups with users + limit + where', async () => { }); }); -test('[Find Many] Get users with groups + where', async () => { +test.concurrent('[Find Many] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4738,7 +4703,7 @@ test('[Find Many] Get users with groups + where', async () => { }); }); -test('[Find Many] Get groups with users + where', async () => { +test.concurrent('[Find Many] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4820,7 +4785,7 @@ test('[Find Many] Get groups with users + where', async () => { }); }); -test('[Find Many] Get users with groups + orderBy', async () => { +test.concurrent('[Find Many] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -4926,7 +4891,7 @@ test('[Find Many] Get users with groups + orderBy', async () => { }); }); -test('[Find Many] Get groups with users + orderBy', async () => { +test.concurrent('[Find Many] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5033,7 +4998,7 @@ test('[Find Many] Get groups with users + orderBy', async () => { }); }); -test('[Find Many] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find Many] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5126,7 +5091,7 @@ test('[Find Many] Get users with groups + orderBy + limit', async () => { Users+users_to_groups+groups */ -test('[Find One] Get users with groups', async () => { +test.concurrent('[Find One] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5190,7 +5155,7 @@ test('[Find One] Get users with groups', async () => { }); }); -test('[Find One] Get groups with users', async () => { +test.concurrent('[Find One] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5254,7 +5219,7 @@ test('[Find One] Get groups with users', async () => { }); }); -test('[Find One] Get users with groups + limit', async () => { +test.concurrent('[Find One] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5319,7 +5284,7 @@ test('[Find One] Get users with groups + limit', async () => { }); }); -test('[Find One] Get groups with users + limit', async () => { +test.concurrent('[Find One] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5384,7 +5349,7 @@ test('[Find One] Get groups with users + limit', async () => { }); }); -test('[Find One] Get users with groups + limit + where', async () => { +test.concurrent('[Find One] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5456,7 +5421,7 @@ test('[Find One] Get users with groups + limit + where', async () => { }); }); -test('[Find One] Get groups with users + limit + where', async () => { +test.concurrent('[Find One] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5529,7 +5494,7 @@ test('[Find One] Get groups with users + limit + where', async () => { }); }); -test('[Find One] Get users with groups + where', async () => { +test.concurrent('[Find One] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5595,7 +5560,7 @@ test('[Find One] Get users with groups + where', async () => { }); }); -test('[Find One] Get groups with users + where', async () => { +test.concurrent('[Find One] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5667,7 +5632,7 @@ test('[Find One] Get groups with users + where', async () => { }); }); -test('[Find One] Get users with groups + orderBy', async () => { +test.concurrent('[Find One] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5743,7 +5708,7 @@ test('[Find One] Get users with groups + orderBy', async () => { }); }); -test('[Find One] Get groups with users + orderBy', async () => { +test.concurrent('[Find One] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5813,7 +5778,7 @@ test('[Find One] Get groups with users + orderBy', async () => { }); }); -test('[Find One] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find One] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5884,7 +5849,7 @@ test('[Find One] Get users with groups + orderBy + limit', async () => { }); }); -test('Get groups with users + orderBy + limit', async () => { +test.concurrent('Get groups with users + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -5973,7 +5938,7 @@ test('Get groups with users + orderBy + limit', async () => { }); }); -test('Get users with groups + custom', async () => { +test.concurrent('Get users with groups + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6097,7 +6062,7 @@ test('Get users with groups + custom', async () => { }); }); -test('Get groups with users + custom', async () => { +test.concurrent('Get groups with users + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6218,13 +6183,13 @@ test('Get groups with users + custom', async () => { }); }); -test('async api', async () => { +test.concurrent('async api', async ({ db }) => { await db.insert(usersTable).values([{ id: 1, name: 'Dan' }]); const users = await db.query.usersTable.findMany(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); -test('async api - prepare', async () => { +test.concurrent('async api - prepare', async ({ db }) => { const insertStmt = db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).prepare(); await insertStmt.execute(); const queryStmt = db.query.usersTable.findMany().prepare(); @@ -6232,7 +6197,7 @@ test('async api - prepare', async () => { expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); -test('Force optional on where on non-optional relation query', async () => { +test.concurrent('Force optional on where on non-optional relation query', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6303,7 +6268,7 @@ test('Force optional on where on non-optional relation query', async () => { }); }); -test('[Find Many .through] Get users with groups', async () => { +test.concurrent('[Find Many .through] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6386,7 +6351,7 @@ test('[Find Many .through] Get users with groups', async () => { }]); }); -test('[Find Many .through] Get groups with users', async () => { +test.concurrent('[Find Many .through] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6467,7 +6432,7 @@ test('[Find Many .through] Get groups with users', async () => { }]); }); -test('[Find Many .through] Get users with groups + limit', async () => { +test.concurrent('[Find Many .through] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6537,7 +6502,7 @@ test('[Find Many .through] Get users with groups + limit', async () => { }]); }); -test('[Find Many .through] Get groups with users + limit', async () => { +test.concurrent('[Find Many .through] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6607,7 +6572,7 @@ test('[Find Many .through] Get groups with users + limit', async () => { }]); }); -test('[Find Many .through] Get users with groups + limit + where', async () => { +test.concurrent('[Find Many .through] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6668,7 +6633,7 @@ test('[Find Many .through] Get users with groups + limit + where', async () => { }]); }); -test('[Find Many .through] Get groups with users + limit + where', async () => { +test.concurrent('[Find Many .through] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6728,7 +6693,7 @@ test('[Find Many .through] Get groups with users + limit + where', async () => { }]); }); -test('[Find Many .through] Get users with groups + where', async () => { +test.concurrent('[Find Many .through] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6796,7 +6761,7 @@ test('[Find Many .through] Get users with groups + where', async () => { }]); }); -test('[Find Many .through] Get groups with users + where', async () => { +test.concurrent('[Find Many .through] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6861,7 +6826,7 @@ test('[Find Many .through] Get groups with users + where', async () => { }]); }); -test('[Find Many .through] Get users with groups + orderBy', async () => { +test.concurrent('[Find Many .through] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -6943,7 +6908,7 @@ test('[Find Many .through] Get users with groups + orderBy', async () => { }]); }); -test('[Find Many .through] Get groups with users + orderBy', async () => { +test.concurrent('[Find Many .through] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7026,7 +6991,7 @@ test('[Find Many .through] Get groups with users + orderBy', async () => { }]); }); -test('[Find Many .through] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find Many .through] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7096,7 +7061,7 @@ test('[Find Many .through] Get users with groups + orderBy + limit', async () => }]); }); -test('[Find One .through] Get users with groups', async () => { +test.concurrent('[Find One .through] Get users with groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7149,7 +7114,7 @@ test('[Find One .through] Get users with groups', async () => { }); }); -test('[Find One .through] Get groups with users', async () => { +test.concurrent('[Find One .through] Get groups with users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7202,7 +7167,7 @@ test('[Find One .through] Get groups with users', async () => { }); }); -test('[Find One .through] Get users with groups + limit', async () => { +test.concurrent('[Find One .through] Get users with groups + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7257,7 +7222,7 @@ test('[Find One .through] Get users with groups + limit', async () => { }); }); -test('[Find One .through] Get groups with users + limit', async () => { +test.concurrent('[Find One .through] Get groups with users + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7312,7 +7277,7 @@ test('[Find One .through] Get groups with users + limit', async () => { }); }); -test('[Find One .through] Get users with groups + limit + where', async () => { +test.concurrent('[Find One .through] Get users with groups + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7374,7 +7339,7 @@ test('[Find One .through] Get users with groups + limit + where', async () => { }); }); -test('[Find One .through] Get groups with users + limit + where', async () => { +test.concurrent('[Find One .through] Get groups with users + limit + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7434,7 +7399,7 @@ test('[Find One .through] Get groups with users + limit + where', async () => { }); }); -test('[Find One .through] Get users with groups + where', async () => { +test.concurrent('[Find One .through] Get users with groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7492,7 +7457,7 @@ test('[Find One .through] Get users with groups + where', async () => { }); }); -test('[Find One .through] Get groups with users + where', async () => { +test.concurrent('[Find One .through] Get groups with users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7552,7 +7517,7 @@ test('[Find One .through] Get groups with users + where', async () => { }); }); -test('[Find One .through] Get users with groups + orderBy', async () => { +test.concurrent('[Find One .through] Get users with groups + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7616,7 +7581,7 @@ test('[Find One .through] Get users with groups + orderBy', async () => { }); }); -test('[Find One .through] Get groups with users + orderBy', async () => { +test.concurrent('[Find One .through] Get groups with users + orderBy', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7676,7 +7641,7 @@ test('[Find One .through] Get groups with users + orderBy', async () => { }); }); -test('[Find One .through] Get users with groups + orderBy + limit', async () => { +test.concurrent('[Find One .through] Get users with groups + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7737,7 +7702,7 @@ test('[Find One .through] Get users with groups + orderBy + limit', async () => }); }); -test('[Find Many .through] Get groups with users + orderBy + limit', async () => { +test.concurrent('[Find Many .through] Get groups with users + orderBy + limit', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7809,7 +7774,7 @@ test('[Find Many .through] Get groups with users + orderBy + limit', async () => }]); }); -test('[Find Many .through] Get users with groups + custom', async () => { +test.concurrent('[Find Many .through] Get users with groups + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -7913,7 +7878,7 @@ test('[Find Many .through] Get users with groups + custom', async () => { }]); }); -test('[Find Many .through] Get groups with users + custom', async () => { +test.concurrent('[Find Many .through] Get groups with users + custom', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8012,7 +7977,7 @@ test('[Find Many .through] Get groups with users + custom', async () => { }]); }); -test('[Find Many .through] Get users with first group', async () => { +test.concurrent('[Find Many .through] Get users with first group', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8080,7 +8045,7 @@ test('[Find Many .through] Get users with first group', async () => { }]); }); -test('[Find Many .through] Get groups with first user', async () => { +test.concurrent('[Find Many .through] Get groups with first user', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8146,7 +8111,7 @@ test('[Find Many .through] Get groups with first user', async () => { }]); }); -test('[Find Many .through] Get users with filtered groups', async () => { +test.concurrent('[Find Many .through] Get users with filtered groups', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8225,7 +8190,7 @@ test('[Find Many .through] Get users with filtered groups', async () => { }]); }); -test('[Find Many .through] Get groups with filtered users', async () => { +test.concurrent('[Find Many .through] Get groups with filtered users', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8301,7 +8266,7 @@ test('[Find Many .through] Get groups with filtered users', async () => { }]); }); -test('[Find Many .through] Get users with filtered groups + where', async () => { +test.concurrent('[Find Many .through] Get users with filtered groups + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8381,7 +8346,7 @@ test('[Find Many .through] Get users with filtered groups + where', async () => }]); }); -test('[Find Many .through] Get groups with filtered users + where', async () => { +test.concurrent('[Find Many .through] Get groups with filtered users + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8449,7 +8414,7 @@ test('[Find Many .through] Get groups with filtered users + where', async () => }]); }); -test('[Find Many] Get users with filtered posts', async () => { +test.concurrent('[Find Many] Get users with filtered posts', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8517,7 +8482,7 @@ test('[Find Many] Get users with filtered posts', async () => { }]); }); -test('[Find Many] Get posts with filtered authors', async () => { +test.concurrent('[Find Many] Get posts with filtered authors', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8583,7 +8548,7 @@ test('[Find Many] Get posts with filtered authors', async () => { ]); }); -test('[Find Many] Get users with filtered posts + where', async () => { +test.concurrent('[Find Many] Get users with filtered posts + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8654,7 +8619,7 @@ test('[Find Many] Get users with filtered posts + where', async () => { }]); }); -test('[Find Many] Get posts with filtered authors + where', async () => { +test.concurrent('[Find Many] Get posts with filtered authors + where', async ({ db }) => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, @@ -8716,7 +8681,7 @@ test('[Find Many] Get posts with filtered authors + where', async () => { ]); }); -test('.toSQL()', () => { +test.concurrent('.toSQL()', ({ db }) => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); diff --git a/integration-tests/tests/sqlite/tursodatabase.test.ts b/integration-tests/tests/sqlite/tursodatabase.test.ts index 81d026de82..9db45029ac 100644 --- a/integration-tests/tests/sqlite/tursodatabase.test.ts +++ b/integration-tests/tests/sqlite/tursodatabase.test.ts @@ -1,43 +1,12 @@ -import { Database } from '@tursodatabase/database'; import { sql } from 'drizzle-orm'; -import { type BaseSQLiteDatabase, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; +import { getTableConfig, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import type { TursoDatabaseDatabase } from 'drizzle-orm/tursodatabase'; -import { drizzle } from 'drizzle-orm/tursodatabase/database'; import { migrate } from 'drizzle-orm/tursodatabase/migrator'; -import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; -import { skipTests } from '~/common'; +import { expect } from 'vitest'; +import { tursoDatabaseTest as test } from './instrumentation'; import relations from './relations'; import { tests } from './sqlite-common'; -declare module 'vitest' { - interface TestContext { - sqlite: { - db: BaseSQLiteDatabase<'async' | 'sync', any, Record, typeof relations>; - }; - } -} - -const ENABLE_LOGGING = false; - -let db: TursoDatabaseDatabase; -let client: Database | undefined; - -beforeAll(async () => { - const dbPath = ':memory:'; - client = new Database(dbPath); - db = drizzle({ client, logger: ENABLE_LOGGING, relations }); -}); - -afterAll(async () => { - client?.close(); -}); - -beforeEach((ctx) => { - ctx.sqlite = { - db, - }; -}); - export const usersMigratorTable = sqliteTable('users12', { id: integer('id').primaryKey(), name: text('name').notNull(), @@ -50,12 +19,12 @@ export const anotherUsersMigratorTable = sqliteTable('another_users', { email: text('email').notNull(), }); -test('migrator', async () => { +test('migrator', async ({ db }) => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); - await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); + await migrate(db as TursoDatabaseDatabase, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); @@ -70,20 +39,99 @@ test('migrator', async () => { await db.run(sql`drop table __drizzle_migrations`); }); -beforeEach((ctx) => { - // FROM clause is not supported in UPDATE - const skip = [ - 'update ... from', - 'update ... from with alias', - 'update ... from with join', - ]; - - if (skip.includes(ctx.task.name)) { - ctx.skip(); - } +test('migrator : --init', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as TursoDatabaseDatabase, { + migrationsFolder: './drizzle2/sqlite', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ name: string }>( + sql`SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${getTableConfig(usersMigratorTable).name};`, + ); + + expect(migratorRes).toStrictEqual(undefined); + expect(meta.length).toStrictEqual(1); + expect(!!res).toStrictEqual(false); +}); + +test('migrator : --init - local migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + const migratorRes = await migrate(db as TursoDatabaseDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ name: string }>( + sql`SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${getTableConfig(usersMigratorTable).name};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'localMigrations' }); + expect(meta.length).toStrictEqual(0); + expect(!!res).toStrictEqual(false); +}); + +test('migrator : --init - db migrations error', async ({ db }) => { + const migrationsTable = 'drzl_init'; + + await db.run(sql`drop table if exists ${sql.identifier(migrationsTable)};`); + await db.run(sql`drop table if exists ${usersMigratorTable}`); + await db.run(sql`drop table if exists ${sql.identifier('another_users')}`); + + await migrate(db as TursoDatabaseDatabase, { + migrationsFolder: './drizzle2/sqlite', + migrationsTable, + }); + + const migratorRes = await migrate(db as TursoDatabaseDatabase, { + migrationsFolder: './drizzle2/sqlite-init', + + migrationsTable, + // @ts-ignore - internal param + init: true, + }); + + const meta = await db.select({ + hash: sql`${sql.identifier('hash')}`.as('hash'), + createdAt: sql`${sql.identifier('created_at')}`.mapWith(Number).as('created_at'), + }).from(sql`${sql.identifier(migrationsTable)}`); + + const res = await db.get<{ name: string }>( + sql`SELECT name FROM sqlite_master WHERE type = 'table' AND name = ${getTableConfig(usersMigratorTable).name};`, + ); + + expect(migratorRes).toStrictEqual({ exitCode: 'databaseMigrations' }); + expect(meta.length).toStrictEqual(1); + expect(!!res).toStrictEqual(true); }); -skipTests([ +const skip = [ // Subquery in WHERE clause is not supported 'RQB v2 simple find many - with relation', 'RQB v2 transaction find many - with relation', @@ -132,5 +180,5 @@ skipTests([ // TBD 'join on aliased sql from with clause', 'join view as subquery', -]); -tests(); +]; +tests(test, skip); diff --git a/integration-tests/tests/utils/is-config.test.ts b/integration-tests/tests/utils/is-config.test.ts index 054188b865..6031cc25b4 100644 --- a/integration-tests/tests/utils/is-config.test.ts +++ b/integration-tests/tests/utils/is-config.test.ts @@ -178,7 +178,7 @@ describe('Rejects drivers', (it) => { // expect(isConfig(cl)).toEqual(false); // }); - it.skip('vercel:Client', async () => { + it('vercel:Client', async () => { const cl = vcClient({ connectionString: process.env['NEON_CONNECTION_STRING']?.replace('-pooler', ''), }); @@ -372,7 +372,7 @@ describe('Accepts drivers in .client', (it) => { // expect(isConfig({client:cl})).toEqual(true); // }); - it.skip('vercel:Client', async () => { + it('vercel:Client', async () => { const cl = vcClient({ connectionString: process.env['NEON_CONNECTION_STRING']?.replace('-pooler', ''), }); diff --git a/integration-tests/tests/version.test.ts b/integration-tests/tests/utils/version.test.ts similarity index 100% rename from integration-tests/tests/version.test.ts rename to integration-tests/tests/utils/version.test.ts diff --git a/integration-tests/tsconfig.json b/integration-tests/tsconfig.json index 38541f8e26..a431e423f2 100644 --- a/integration-tests/tsconfig.json +++ b/integration-tests/tsconfig.json @@ -8,5 +8,5 @@ } }, "include": ["tests", "type-tests"], - "exclude": ["**/playground", "**/.sst", "tests/prisma/*/client/**/*.js"] + "exclude": ["**/playground", "**/.sst", "tests/prisma/*/client/**/*.js", "tests/mysql/instrumentation.ts"] } diff --git a/integration-tests/vitest-ci.config.ts b/integration-tests/vitest-ci.config.ts index 8f6ecf4af3..5e2dfc8d84 100644 --- a/integration-tests/vitest-ci.config.ts +++ b/integration-tests/vitest-ci.config.ts @@ -1,5 +1,6 @@ -import 'dotenv/config'; +import 'dotenv/config.js'; import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index 3df5b76201..b5a02cb072 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -1,29 +1,16 @@ -import 'dotenv/config'; +import 'dotenv/config.js'; import tsconfigPaths from 'vite-tsconfig-paths'; +// oxlint-disable-next-line extensions import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ - 'tests/seeder/**/*.test.ts', - 'tests/extensions/postgis/**/*', - 'tests/relational/**/*.test.ts', - 'tests/pg/**/*.test.ts', - 'tests/mysql/**/*.test.ts', - 'tests/singlestore/**/*.test.ts', - 'tests/sqlite/**/*.test.ts', - 'tests/replicas/**/*', - 'tests/imports/**/*', - 'tests/extensions/vectors/**/*', - 'tests/version.test.ts', - 'tests/pg/node-postgres.test.ts', - 'tests/utils/is-config.test.ts', - 'js-tests/driver-init/commonjs/*.test.cjs', - 'js-tests/driver-init/module/*.test.mjs', - 'tests/gel/**/*.test.ts', + 'tests/**/*.test.ts', + 'js-tests', ], exclude: [ - ...(process.env.SKIP_EXTERNAL_DB_TESTS + ...(process.env['SKIP_EXTERNAL_DB_TESTS'] ? [ 'tests/relational/mysql.planetscale.test.ts', 'tests/relational/mysql.planetscale-v1.test.ts', @@ -52,9 +39,7 @@ export default defineConfig({ 'js-tests/driver-init/module/vercel.test.mjs', ] : []), - 'tests/pg/awsdatapi.test.ts', 'tests/awsdatapi.alltypes.test.ts', - 'tests/pg/vercel-pg.test.ts', 'tests/relational/vercel.test.ts', 'tests/relational/vercel-v1.test.ts', // Have a strange "invalid SQL: ERROR: must be owner of schema public" error. Will need to check with xata team @@ -65,7 +50,6 @@ export default defineConfig({ // move back after decide on speed 'tests/sqlite/libsql-ws.test.ts', 'tests/sqlite/libsql-http.test.ts', - 'tests/mysql/tidb-serverless.test.ts', 'js-tests/driver-init/module/planetscale.test.mjs', 'js-tests/driver-init/module/planetscale.test.cjs', 'js-tests/driver-init/commonjs/planetscale.test.cjs', @@ -73,15 +57,8 @@ export default defineConfig({ typecheck: { tsconfig: 'tsconfig.json', }, - testTimeout: 100000, - hookTimeout: 200000, - isolate: true, - poolOptions: { - threads: { - singleThread: true, - }, - }, - maxWorkers: 1, + testTimeout: 120000, + hookTimeout: 60000, fileParallelism: false, }, plugins: [tsconfigPaths()], diff --git a/package.json b/package.json index eb6f8bdb03..696d655c63 100755 --- a/package.json +++ b/package.json @@ -1,43 +1,57 @@ { "name": "drizzle-root", "private": true, + "type": "module", "scripts": { + "postinstall": "pnpm husky", "build:orm": "turbo run build --filter drizzle-orm --color", "build": "turbo run build test:types //#lint --color", + "build:artifact": "turbo run build:artifact --color", "b": "pnpm build", "pack": "turbo run pack --color", + "pack:artifact": "turbo run pack:artifact --color", "test": "turbo run test --color", "t": "pnpm test", "test:types": "turbo run test:types --color", - "lint": "dprint check --list-different", - "lint:fix": "dprint fmt" + "test:types-lint": "turbo run test:types //#lint --color", + "lint": "pnpm oxlint", + "lint:check": "pnpm oxlint --max-warnings=0", + "format": "dprint fmt", + "format:check": "dprint check --list-different", + "fmt": "pnpm format" }, "devDependencies": { - "@arethetypeswrong/cli": "0.15.3", - "@trivago/prettier-plugin-sort-imports": "^5.2.2", - "@typescript-eslint/eslint-plugin": "^6.7.3", - "@typescript-eslint/experimental-utils": "^5.62.0", - "@typescript-eslint/parser": "^6.7.3", "bun-types": "^1.2.0", "concurrently": "^8.2.1", - "dprint": "^0.46.2", - "drizzle-kit": "^0.19.13", + "dprint": "^0.50.2", + "drizzle-kit": "workspace:./drizzle-kit/dist", "drizzle-orm": "workspace:./drizzle-orm/dist", "drizzle-orm-old": "npm:drizzle-orm@^0.27.2", - "eslint": "^8.50.0", "eslint-plugin-drizzle-internal": "link:eslint/eslint-plugin-drizzle-internal", - "eslint-plugin-import": "^2.28.1", - "eslint-plugin-no-instanceof": "^1.0.1", - "eslint-plugin-unicorn": "^48.0.1", - "eslint-plugin-unused-imports": "^3.0.0", "glob": "^10.3.10", - "prettier": "^3.0.3", + "husky": "^9.1.7", + "lint-staged": "^16.2.4", + "oxlint": "^1.28.0", "recast": "^0.23.9", "resolve-tspaths": "^0.8.16", "tsup": "^8.3.5", "tsx": "^4.10.5", "turbo": "^2.2.3", - "typescript": "5.9.2" + "typescript": "5.9.2", + "vite-tsconfig-paths": "^4.3.2", + "vitest": "4.0.13" }, - "packageManager": "pnpm@10.6.3" + "packageManager": "pnpm@10.15.0", + "engines": { + "node": ">=24" + }, + "lint-staged": { + "*": [ + "pnpm format:check --allow-no-files", + "pnpm lint:check" + ] + }, + "volta": { + "node": "24.10.0" + } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8cf1fed240..bb726be3fb 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -8,21 +8,6 @@ importers: .: devDependencies: - '@arethetypeswrong/cli': - specifier: 0.15.3 - version: 0.15.3 - '@trivago/prettier-plugin-sort-imports': - specifier: ^5.2.2 - version: 5.2.2(prettier@3.6.2) - '@typescript-eslint/eslint-plugin': - specifier: ^6.7.3 - version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/experimental-utils': - specifier: ^5.62.0 - version: 5.62.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/parser': - specifier: ^6.7.3 - version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) bun-types: specifier: ^1.2.0 version: 1.3.3 @@ -30,41 +15,32 @@ importers: specifier: ^8.2.1 version: 8.2.2 dprint: - specifier: ^0.46.2 - version: 0.46.3 + specifier: ^0.50.2 + version: 0.50.2 drizzle-kit: - specifier: ^0.19.13 - version: 0.19.13 + specifier: workspace:./drizzle-kit/dist + version: link:drizzle-kit/dist drizzle-orm: specifier: workspace:./drizzle-orm/dist version: link:drizzle-orm/dist drizzle-orm-old: specifier: npm:drizzle-orm@^0.27.2 - version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.10.0)(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.9.1)(bun-types@1.3.3)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(sql.js@1.13.0)(sqlite3@5.1.7) - eslint: - specifier: ^8.50.0 - version: 8.57.1 + version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.9.1)(bun-types@1.3.3)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(sql.js@1.13.0)(sqlite3@5.1.7) eslint-plugin-drizzle-internal: specifier: link:eslint/eslint-plugin-drizzle-internal version: link:eslint/eslint-plugin-drizzle-internal - eslint-plugin-import: - specifier: ^2.28.1 - version: 2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1) - eslint-plugin-no-instanceof: - specifier: ^1.0.1 - version: 1.0.1 - eslint-plugin-unicorn: - specifier: ^48.0.1 - version: 48.0.1(eslint@8.57.1) - eslint-plugin-unused-imports: - specifier: ^3.0.0 - version: 3.2.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1) glob: specifier: ^10.3.10 version: 10.5.0 - prettier: - specifier: ^3.0.3 - version: 3.6.2 + husky: + specifier: ^9.1.7 + version: 9.1.7 + lint-staged: + specifier: ^16.2.4 + version: 16.2.7 + oxlint: + specifier: ^1.28.0 + version: 1.30.0 recast: specifier: ^0.23.9 version: 0.23.11 @@ -83,15 +59,76 @@ importers: typescript: specifier: 5.9.2 version: 5.9.2 + vite-tsconfig-paths: + specifier: ^4.3.2 + version: 4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) + vitest: + specifier: 4.0.13 + version: 4.0.13(@opentelemetry/api@1.9.0)(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + + attw-fork: + dependencies: + '@andrewbranch/untar.js': + specifier: ^1.0.3 + version: 1.0.3 + '@loaderkit/resolve': + specifier: ^1.0.2 + version: 1.0.4 + chalk: + specifier: ^4.1.2 + version: 4.1.2 + cjs-module-lexer: + specifier: ^1.2.3 + version: 1.4.3 + cli-table3: + specifier: ^0.6.3 + version: 0.6.5 + fflate: + specifier: ^0.8.2 + version: 0.8.2 + lru-cache: + specifier: ^11.0.1 + version: 11.2.2 + marked: + specifier: 9.1.2 + version: 9.1.2 + marked-terminal: + specifier: 7.1.0 + version: 7.1.0(marked@9.1.2) + semver: + specifier: ^7.5.4 + version: 7.7.3 + typescript: + specifier: 5.9.2 + version: 5.9.2 + validate-npm-package-name: + specifier: ^5.0.0 + version: 5.0.1 + devDependencies: + '@types/marked-terminal': + specifier: 3.1.3 + version: 3.1.3 + '@types/node': + specifier: ^24.5.0 + version: 24.10.1 + '@types/semver': + specifier: ^7.5.0 + version: 7.7.1 + '@types/validate-npm-package-name': + specifier: ^4.0.0 + version: 4.0.2 + ts-expose-internals: + specifier: 5.6.3 + version: 5.6.3 drizzle-arktype: devDependencies: '@ark/attest': specifier: ^0.45.8 - version: 0.45.11(typescript@6.0.0-dev.20251126) + version: 0.45.11(typescript@5.9.2) '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126) + version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2) '@types/node': specifier: ^18.15.10 version: 18.19.130 @@ -116,12 +153,6 @@ importers: tsx: specifier: ^4.19.3 version: 4.20.6 - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) zx: specifier: ^7.2.2 version: 7.2.4 @@ -129,21 +160,18 @@ importers: drizzle-kit: dependencies: '@drizzle-team/brocli': - specifier: ^0.10.2 - version: 0.10.2 - '@esbuild-kit/esm-loader': - specifier: ^2.5.5 - version: 2.6.5 + specifier: ^0.11.0 + version: 0.11.0 + '@js-temporal/polyfill': + specifier: ^0.5.1 + version: 0.5.1 esbuild: - specifier: ^0.25.4 + specifier: ^0.25.10 version: 0.25.12 esbuild-register: - specifier: ^3.5.0 + specifier: ^3.6.0 version: 3.6.0(esbuild@0.25.12) devDependencies: - '@arethetypeswrong/cli': - specifier: ^0.15.3 - version: 0.15.3 '@aws-sdk/client-rds-data': specifier: ^3.556.0 version: 3.940.0 @@ -153,9 +181,6 @@ importers: '@electric-sql/pglite': specifier: ^0.2.12 version: 0.2.12 - '@hono/bun-compress': - specifier: ^0.1.0 - version: 0.1.0(hono@4.10.7) '@hono/node-server': specifier: ^1.9.0 version: 1.19.6(hono@4.10.7) @@ -166,13 +191,13 @@ importers: specifier: ^0.10.0 version: 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@neondatabase/serverless': - specifier: ^0.9.1 - version: 0.9.5 + specifier: ^1.0.2 + version: 1.0.2 '@originjs/vite-plugin-commonjs': specifier: ^1.0.3 version: 1.0.3 '@planetscale/database': - specifier: ^1.16.0 + specifier: ^1.19.0 version: 1.19.0 '@sqlitecloud/drivers': specifier: ^1.0.653 @@ -201,9 +226,12 @@ importers: '@types/minimatch': specifier: ^5.1.2 version: 5.1.2 + '@types/mssql': + specifier: ^9.1.4 + version: 9.1.8 '@types/node': - specifier: ^18.11.15 - version: 18.19.130 + specifier: ^24.7.2 + version: 24.10.1 '@types/pg': specifier: ^8.10.7 version: 8.15.6 @@ -219,12 +247,6 @@ importers: '@types/ws': specifier: ^8.5.10 version: 8.18.1 - '@typescript-eslint/eslint-plugin': - specifier: ^7.2.0 - version: 7.18.0(@typescript-eslint/parser@7.18.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/parser': - specifier: ^7.2.0 - version: 7.18.0(eslint@8.57.1)(typescript@5.9.2) '@vercel/postgres': specifier: ^0.8.0 version: 0.8.0 @@ -253,8 +275,8 @@ importers: specifier: ^16.0.3 version: 16.6.1 drizzle-kit: - specifier: 0.25.0-b1faa33 - version: 0.25.0-b1faa33 + specifier: ^0.31.6 + version: 0.31.7 drizzle-orm: specifier: workspace:./drizzle-orm/dist version: link:drizzle-orm/dist @@ -264,15 +286,6 @@ importers: esbuild-node-externals: specifier: ^1.9.0 version: 1.20.1(esbuild@0.25.12) - eslint: - specifier: ^8.57.0 - version: 8.57.1 - eslint-config-prettier: - specifier: ^9.1.0 - version: 9.1.2(eslint@8.57.1) - eslint-plugin-prettier: - specifier: ^5.1.3 - version: 5.5.4(eslint-config-prettier@9.1.2(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2) gel: specifier: ^2.0.0 version: 2.2.0 @@ -297,6 +310,9 @@ importers: minimatch: specifier: ^7.4.3 version: 7.4.6 + mssql: + specifier: ^12.0.0 + version: 12.1.1 mysql2: specifier: 3.14.1 version: 3.14.1 @@ -306,6 +322,9 @@ importers: ohm-js: specifier: ^17.1.0 version: 17.2.1 + orm044: + specifier: npm:drizzle-orm@0.44.1 + version: drizzle-orm@0.44.1(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@upstash/redis@1.35.7)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@5.9.3))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7) pg: specifier: ^8.11.5 version: 8.16.3 @@ -321,27 +340,18 @@ importers: semver: specifier: ^7.7.2 version: 7.7.3 - superjson: - specifier: ^2.2.1 - version: 2.2.5 tsup: specifier: ^8.3.5 - version: 8.5.1(postcss@8.5.6)(tsx@3.14.0)(typescript@5.9.2)(yaml@2.8.1) + version: 8.5.1(postcss@8.5.6)(tsx@4.20.6)(typescript@5.9.3)(yaml@2.8.1) tsx: - specifier: ^3.12.1 - version: 3.14.0 + specifier: ^4.20.6 + version: 4.20.6 typescript: - specifier: ^5.9.2 - version: 5.9.2 + specifier: ^5.9.3 + version: 5.9.3 uuid: specifier: ^9.0.1 version: 9.0.1 - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) ws: specifier: ^8.18.2 version: 8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.3) @@ -353,12 +363,19 @@ importers: version: 8.8.5 drizzle-orm: + dependencies: + '@types/mssql': + specifier: ^9.1.4 + version: 9.1.8 + mssql: + specifier: ^11.0.1 + version: 11.0.1 devDependencies: '@arktype/attest': specifier: ^0.46.0 - version: 0.46.0(typescript@6.0.0-dev.20251126) + version: 0.46.0(typescript@5.9.2) '@aws-sdk/client-rds-data': - specifier: ^3.549.0 + specifier: ^3.914.0 version: 3.940.0 '@cloudflare/workers-types': specifier: ^4.20251004.0 @@ -425,13 +442,13 @@ importers: version: 1.4.9 '@upstash/redis': specifier: ^1.34.3 - version: 1.35.6 + version: 1.35.7 '@vercel/postgres': specifier: ^0.8.0 version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.5(typescript@6.0.0-dev.20251126) + version: 0.29.5(typescript@5.9.2) better-sqlite3: specifier: ^11.9.1 version: 11.9.1 @@ -480,12 +497,6 @@ importers: tsx: specifier: ^3.12.7 version: 3.14.0 - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) zod: specifier: ^3.20.2 version: 3.25.1 @@ -510,13 +521,19 @@ importers: version: 0.4.4(rollup@3.29.5) '@rollup/plugin-typescript': specifier: ^11.1.6 - version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126) + version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2) + '@types/async-retry': + specifier: ^1.4.8 + version: 1.4.9 '@types/better-sqlite3': specifier: ^7.6.11 version: 7.6.13 '@types/dockerode': specifier: ^3.3.31 version: 3.3.47 + '@types/mssql': + specifier: ^9.1.4 + version: 9.1.8 '@types/node': specifier: ^22.5.4 version: 22.19.1 @@ -526,6 +543,9 @@ importers: '@types/uuid': specifier: ^10.0.0 version: 10.0.0 + async-retry: + specifier: ^1.3.3 + version: 1.3.3 better-sqlite3: specifier: ^11.1.2 version: 11.9.1 @@ -547,6 +567,9 @@ importers: get-port: specifier: ^7.1.0 version: 7.1.0 + mssql: + specifier: ^11.0.1 + version: 11.0.1 mysql2: specifier: ^3.14.1 version: 3.14.1 @@ -555,7 +578,7 @@ importers: version: 8.16.3 resolve-tspaths: specifier: ^0.8.19 - version: 0.8.23(typescript@6.0.0-dev.20251126) + version: 0.8.23(typescript@5.9.2) rollup: specifier: ^3.29.5 version: 3.29.5 @@ -568,9 +591,6 @@ importers: uuid: specifier: ^10.0.0 version: 10.0.0 - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) zx: specifier: ^8.1.5 version: 8.8.5 @@ -579,7 +599,7 @@ importers: devDependencies: '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126) + version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2) '@sinclair/typebox': specifier: ^0.34.8 version: 0.34.41 @@ -601,12 +621,6 @@ importers: rollup: specifier: ^3.29.5 version: 3.29.5 - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) zx: specifier: ^7.2.2 version: 7.2.4 @@ -615,7 +629,7 @@ importers: devDependencies: '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126) + version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2) '@types/node': specifier: ^18.15.10 version: 18.19.130 @@ -636,13 +650,7 @@ importers: version: 3.29.5 valibot: specifier: 1.0.0-beta.7 - version: 1.0.0-beta.7(typescript@6.0.0-dev.20251126) - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + version: 1.0.0-beta.7(typescript@5.9.2) zx: specifier: ^7.2.2 version: 7.2.4 @@ -651,7 +659,7 @@ importers: devDependencies: '@rollup/plugin-typescript': specifier: ^11.1.0 - version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126) + version: 11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2) '@types/node': specifier: ^18.15.10 version: 18.19.130 @@ -670,12 +678,6 @@ importers: rollup: specifier: ^3.29.5 version: 3.29.5 - vite-tsconfig-paths: - specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) zod: specifier: 3.25.1 version: 3.25.1 @@ -690,13 +692,13 @@ importers: version: 20.19.25 '@typescript-eslint/parser': specifier: ^6.10.0 - version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) + version: 6.21.0(eslint@8.57.1)(typescript@5.9.3) '@typescript-eslint/rule-tester': specifier: ^6.10.0 - version: 6.21.0(@eslint/eslintrc@2.1.4)(eslint@8.57.1)(typescript@5.9.2) + version: 6.21.0(@eslint/eslintrc@2.1.4)(eslint@8.57.1)(typescript@5.9.3) '@typescript-eslint/utils': specifier: ^6.10.0 - version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) + version: 6.21.0(eslint@8.57.1)(typescript@5.9.3) cpy-cli: specifier: ^5.0.0 version: 5.0.0 @@ -705,10 +707,7 @@ importers: version: 8.57.1 typescript: specifier: ^5.9.2 - version: 5.9.2 - vitest: - specifier: ^3.1.3 - version: 3.2.4(@types/node@20.19.25)(@vitest/ui@1.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + version: 5.9.3 integration-tests: dependencies: @@ -721,9 +720,6 @@ importers: '@electric-sql/pglite': specifier: 0.2.12 version: 0.2.12 - '@libsql/client': - specifier: ^0.10.0 - version: 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@miniflare/d1': specifier: ^2.14.4 version: 2.14.4 @@ -759,7 +755,7 @@ importers: version: 0.8.0 '@xata.io/client': specifier: ^0.29.3 - version: 0.29.5(typescript@6.0.0-dev.20251126) + version: 0.29.5(typescript@5.9.2) async-retry: specifier: ^1.3.3 version: 1.3.3 @@ -796,6 +792,9 @@ importers: get-port: specifier: ^7.0.0 version: 7.1.0 + mssql: + specifier: ^11.0.1 + version: 11.0.1 mysql2: specifier: ^3.14.1 version: 3.14.1 @@ -826,9 +825,6 @@ importers: uvu: specifier: ^0.5.6 version: 0.5.6 - vitest: - specifier: ^3.2.4 - version: 3.2.4(@types/node@20.19.25)(@vitest/ui@1.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) ws: specifier: ^8.18.2 version: 8.18.3(bufferutil@4.0.8)(utf-8-validate@6.0.3) @@ -839,6 +835,9 @@ importers: '@cloudflare/workers-types': specifier: ^4.20241004.0 version: 4.20251126.0 + '@libsql/client': + specifier: ^0.10.0 + version: 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@neondatabase/serverless': specifier: 0.10.0 version: 0.10.0 @@ -857,6 +856,9 @@ importers: '@types/dockerode': specifier: ^3.3.18 version: 3.3.47 + '@types/mssql': + specifier: ^9.1.4 + version: 9.1.8 '@types/node': specifier: ^20.2.5 version: 20.19.25 @@ -874,10 +876,7 @@ importers: version: 8.18.1 '@upstash/redis': specifier: ^1.34.3 - version: 1.35.6 - '@vitest/ui': - specifier: ^1.6.0 - version: 1.6.1(vitest@3.2.4) + version: 1.35.7 ava: specifier: ^5.3.0 version: 5.3.1 @@ -895,13 +894,13 @@ importers: version: 5.5.4 ts-node: specifier: ^10.9.2 - version: 10.9.2(@types/node@20.19.25)(typescript@6.0.0-dev.20251126) + version: 10.9.2(@types/node@20.19.25)(typescript@5.9.2) tsx: specifier: ^4.14.0 version: 4.20.6 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) + version: 4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) zx: specifier: ^8.3.2 version: 8.8.5 @@ -910,7 +909,7 @@ importers: dependencies: drizzle-beta: specifier: npm:drizzle-orm@1.0.0-beta.1-c0277c0 - version: drizzle-orm@1.0.0-beta.1-c0277c0(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.10.0)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@6.0.0-dev.20251126))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7) + version: drizzle-orm@1.0.0-beta.1-c0277c0(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@5.9.2))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7) drizzle-seed: specifier: workspace:../drizzle-seed/dist version: link:../drizzle-seed/dist @@ -944,20 +943,11 @@ packages: '@andrewbranch/untar.js@1.0.3': resolution: {integrity: sha512-Jh15/qVmrLGhkKJBdXlK1+9tY4lZruYjsgkDFj08ZmDiWVBLJcqkok7Z0/R0In+i1rScBpJlSvrTS2Lm41Pbnw==} - '@arethetypeswrong/cli@0.15.3': - resolution: {integrity: sha512-sIMA9ZJBWDEg1+xt5RkAEflZuf8+PO8SdKj17x6PtETuUho+qlZJg4DgmKc3q+QwQ9zOB5VLK6jVRbFdNLdUIA==} - engines: {node: '>=18'} - hasBin: true - '@arethetypeswrong/cli@0.16.4': resolution: {integrity: sha512-qMmdVlJon5FtA+ahn0c1oAVNxiq4xW5lqFiTZ21XHIeVwAVIQ+uRz4UEivqRMsjVV1grzRgJSKqaOrq1MvlVyQ==} engines: {node: '>=18'} hasBin: true - '@arethetypeswrong/core@0.15.1': - resolution: {integrity: sha512-FYp6GBAgsNz81BkfItRz8RLZO03w5+BaeiPma1uCfmxTnxbtuMrI/dbzGiOk8VghO108uFI0oJo0OkewdSHw7g==} - engines: {node: '>=18'} - '@arethetypeswrong/core@0.16.4': resolution: {integrity: sha512-RI3HXgSuKTfcBf1hSEg1P9/cOvmI0flsMm6/QL3L3wju4AlHDqd55JFPfXs4pzgEAgy5L9pul4/HPPz99x2GvA==} engines: {node: '>=18'} @@ -1130,6 +1120,74 @@ packages: resolution: {integrity: sha512-sIyFcoPZkTtNu9xFeEoynMef3bPJIAbOfUh+ueYcfhVl6xm2VRtMcMclSxmZCMnHHd4hlYKJeq/aggmBEWynww==} engines: {node: '>=18.0.0'} + '@azure-rest/core-client@2.5.1': + resolution: {integrity: sha512-EHaOXW0RYDKS5CFffnixdyRPak5ytiCtU7uXDcP/uiY+A6jFRwNGzzJBiznkCzvi5EYpY+YWinieqHb0oY916A==} + engines: {node: '>=20.0.0'} + + '@azure/abort-controller@2.1.2': + resolution: {integrity: sha512-nBrLsEWm4J2u5LpAPjxADTlq3trDgVZZXHNKabeXZtpq3d3AbN/KGO82R87rdDz5/lYB024rtEf10/q0urNgsA==} + engines: {node: '>=18.0.0'} + + '@azure/core-auth@1.10.1': + resolution: {integrity: sha512-ykRMW8PjVAn+RS6ww5cmK9U2CyH9p4Q88YJwvUslfuMmN98w/2rdGRLPqJYObapBCdzBVeDgYWdJnFPFb7qzpg==} + engines: {node: '>=20.0.0'} + + '@azure/core-client@1.10.1': + resolution: {integrity: sha512-Nh5PhEOeY6PrnxNPsEHRr9eimxLwgLlpmguQaHKBinFYA/RU9+kOYVOQqOrTsCL+KSxrLLl1gD8Dk5BFW/7l/w==} + engines: {node: '>=20.0.0'} + + '@azure/core-http-compat@2.3.1': + resolution: {integrity: sha512-az9BkXND3/d5VgdRRQVkiJb2gOmDU8Qcq4GvjtBmDICNiQ9udFmDk4ZpSB5Qq1OmtDJGlQAfBaS4palFsazQ5g==} + engines: {node: '>=20.0.0'} + + '@azure/core-lro@2.7.2': + resolution: {integrity: sha512-0YIpccoX8m/k00O7mDDMdJpbr6mf1yWo2dfmxt5A8XVZVVMz2SSKaEbMCeJRvgQ0IaSlqhjT47p4hVIRRy90xw==} + engines: {node: '>=18.0.0'} + + '@azure/core-paging@1.6.2': + resolution: {integrity: sha512-YKWi9YuCU04B55h25cnOYZHxXYtEvQEbKST5vqRga7hWY9ydd3FZHdeQF8pyh+acWZvppw13M/LMGx0LABUVMA==} + engines: {node: '>=18.0.0'} + + '@azure/core-rest-pipeline@1.22.2': + resolution: {integrity: sha512-MzHym+wOi8CLUlKCQu12de0nwcq9k9Kuv43j4Wa++CsCpJwps2eeBQwD2Bu8snkxTtDKDx4GwjuR9E8yC8LNrg==} + engines: {node: '>=20.0.0'} + + '@azure/core-tracing@1.3.1': + resolution: {integrity: sha512-9MWKevR7Hz8kNzzPLfX4EAtGM2b8mr50HPDBvio96bURP/9C+HjdH3sBlLSNNrvRAr5/k/svoH457gB5IKpmwQ==} + engines: {node: '>=20.0.0'} + + '@azure/core-util@1.13.1': + resolution: {integrity: sha512-XPArKLzsvl0Hf0CaGyKHUyVgF7oDnhKoP85Xv6M4StF/1AhfORhZudHtOyf2s+FcbuQ9dPRAjB8J2KvRRMUK2A==} + engines: {node: '>=20.0.0'} + + '@azure/identity@4.13.0': + resolution: {integrity: sha512-uWC0fssc+hs1TGGVkkghiaFkkS7NkTxfnCH+Hdg+yTehTpMcehpok4PgUKKdyCH+9ldu6FhiHRv84Ntqj1vVcw==} + engines: {node: '>=20.0.0'} + + '@azure/keyvault-common@2.0.0': + resolution: {integrity: sha512-wRLVaroQtOqfg60cxkzUkGKrKMsCP6uYXAOomOIysSMyt1/YM0eUn9LqieAWM8DLcU4+07Fio2YGpPeqUbpP9w==} + engines: {node: '>=18.0.0'} + + '@azure/keyvault-keys@4.10.0': + resolution: {integrity: sha512-eDT7iXoBTRZ2n3fLiftuGJFD+yjkiB1GNqzU2KbY1TLYeXeSPVTVgn2eJ5vmRTZ11978jy2Kg2wI7xa9Tyr8ag==} + engines: {node: '>=18.0.0'} + + '@azure/logger@1.3.0': + resolution: {integrity: sha512-fCqPIfOcLE+CGqGPd66c8bZpwAji98tZ4JI9i/mlTNTlsIWslCfpg48s/ypyLxZTump5sypjrKn2/kY7q8oAbA==} + engines: {node: '>=20.0.0'} + + '@azure/msal-browser@4.26.2': + resolution: {integrity: sha512-F2U1mEAFsYGC5xzo1KuWc/Sy3CRglU9Ql46cDUx8x/Y3KnAIr1QAq96cIKCk/ZfnVxlvprXWRjNKoEpgLJXLhg==} + engines: {node: '>=0.8.0'} + + '@azure/msal-common@15.13.2': + resolution: {integrity: sha512-cNwUoCk3FF8VQ7Ln/MdcJVIv3sF73/OT86cRH81ECsydh7F4CNfIo2OAx6Cegtg8Yv75x4506wN4q+Emo6erOA==} + engines: {node: '>=0.8.0'} + + '@azure/msal-node@3.8.3': + resolution: {integrity: sha512-Ul7A4gwmaHzYWj2Z5xBDly/W8JSC1vnKgJ898zPMZr0oSf1ah0tiL15sytjycU/PMhDZAlkWtEL1+MzNMU6uww==} + engines: {node: '>=16'} + '@babel/code-frame@7.10.4': resolution: {integrity: sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==} @@ -1630,6 +1688,9 @@ packages: '@balena/dockerignore@1.0.2': resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} + '@braidai/lang@1.1.2': + resolution: {integrity: sha512-qBcknbBufNHlui137Hft8xauQMTZDKdophmLFv05r2eNmdIv/MlPuP4TdUknHG68UdWLgVZwgxVe735HzJNIwA==} + '@cloudflare/workers-types@4.20251126.0': resolution: {integrity: sha512-DSeI1Q7JYmh5/D/tw5eZCjrKY34v69rwj63hHt60nSQW5QLwWCbj/lLtNz9f2EPa+JCACwpLXHgCXfzJ29x66w==} @@ -1644,46 +1705,56 @@ packages: resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} engines: {node: '>=12'} - '@dprint/darwin-arm64@0.46.3': - resolution: {integrity: sha512-1ycDpGvclGHF3UG5V6peymPDg6ouNTqM6BjhVELQ6zwr+X98AMhq/1slgO8hwHtPcaS5qhTAS+PkzOmBJRegow==} + '@dprint/darwin-arm64@0.50.2': + resolution: {integrity: sha512-4d08INZlTxbPW9LK9W8+93viN543/qA2Kxn4azVnPW/xCb2Im03UqJBz8mMm3nJZdtNnK3uTVG3ib1VW+XJisw==} cpu: [arm64] os: [darwin] - '@dprint/darwin-x64@0.46.3': - resolution: {integrity: sha512-v5IpLmrY836Q5hJAxZuX097ZNQvoZgO6JKO4bK4l6XDhhHAw2XTIUr41+FM5r36ENxyASMk0NpHjhcHtih3o0g==} + '@dprint/darwin-x64@0.50.2': + resolution: {integrity: sha512-ZXWPBwdLojhdBATq+bKwJvB7D8bIzrD6eR/Xuq9UYE7evQazUiR069d9NPF0iVuzTo6wNf9ub9SXI7qDl11EGA==} cpu: [x64] os: [darwin] - '@dprint/linux-arm64-glibc@0.46.3': - resolution: {integrity: sha512-9P13g1vgV8RfQH2qBGa8YAfaOeWA42RIhj7lmWRpkDFtwau96reMKwnBBn8bHUnc5e6bSsbPUOMb/X1KMUKz/g==} + '@dprint/linux-arm64-glibc@0.50.2': + resolution: {integrity: sha512-marxQzRw8atXAnaawwZHeeUaaAVewrGTlFKKcDASGyjPBhc23J5fHPUPremm8xCbgYZyTlokzrV8/1rDRWhJcw==} cpu: [arm64] os: [linux] - '@dprint/linux-arm64-musl@0.46.3': - resolution: {integrity: sha512-AAcdcMSZ6DEIoY9E0xQHjkZP+THP7EWsQge4TWzglSIjzn31YltglHAGYFcLB4CTJYpF0NsFDNFktzgkO+s0og==} + '@dprint/linux-arm64-musl@0.50.2': + resolution: {integrity: sha512-oGDq44ydzo0ZkJk6RHcUzUN5sOMT5HC6WA8kHXI6tkAsLUkaLO2DzZFfW4aAYZUn+hYNpQfQD8iGew0sjkyLyg==} cpu: [arm64] os: [linux] - '@dprint/linux-x64-glibc@0.46.3': - resolution: {integrity: sha512-c5cQ3G1rC64nBZ8Pd2LGWwzkEk4D7Ax9NrBbwYmNPvs6mFbGlJPC1+RD95x2WwIrIlMIciLG+Kxmt25PzBphmg==} + '@dprint/linux-riscv64-glibc@0.50.2': + resolution: {integrity: sha512-QMmZoZYWsXezDcC03fBOwPfxhTpPEyHqutcgJ0oauN9QcSXGji9NSZITMmtLz2Ki3T1MIvdaLd1goGzNSvNqTQ==} + cpu: [riscv64] + os: [linux] + + '@dprint/linux-x64-glibc@0.50.2': + resolution: {integrity: sha512-KMeHEzb4teQJChTgq8HuQzc+reRNDnarOTGTQovAZ9WNjOtKLViftsKWW5HsnRHtP5nUIPE9rF1QLjJ/gUsqvw==} cpu: [x64] os: [linux] - '@dprint/linux-x64-musl@0.46.3': - resolution: {integrity: sha512-ONtk2QtLcV0TqWOCOqzUFQixgk3JC+vnJLB5L6tQwT7BX5LzeircfE/1f4dg459iqejNC9MBXZkHnXqabvWSow==} + '@dprint/linux-x64-musl@0.50.2': + resolution: {integrity: sha512-qM37T7H69g5coBTfE7SsA+KZZaRBky6gaUhPgAYxW+fOsoVtZSVkXtfTtQauHTpqqOEtbxfCtum70Hz1fr1teg==} cpu: [x64] os: [linux] - '@dprint/win32-x64@0.46.3': - resolution: {integrity: sha512-xvj4DSEilf0gGdT7CqnwNEgfWNuWqT6eIBxHDEUbmcn1vZ7IwirtqRq/nm3lmYtQaJ4EbtMQZvACHZwxC7G96w==} + '@dprint/win32-arm64@0.50.2': + resolution: {integrity: sha512-kuGVHGoxLwssVDsodefUIYQRoO2fQncurH/xKgXiZwMPOSzFcgUzYJQiyqmJEp+PENhO9VT1hXUHZtlyCAWBUQ==} + cpu: [arm64] + os: [win32] + + '@dprint/win32-x64@0.50.2': + resolution: {integrity: sha512-N3l9k31c3IMfVXqL0L6ygIhJFvCIrfQ+Z5Jph6RnCcBO6oDYWeYhAv/qBk1vLsF2y/e79TKsR1tvaEwnrQ03XA==} cpu: [x64] os: [win32] '@drizzle-team/brocli@0.10.2': resolution: {integrity: sha512-z33Il7l5dKjUgGULTqBsQBQwckHh5AbIuxhdsIxDDiZAzBOrZO6q9ogcWC65kU382AfynTfgNumVcNIjuIua6w==} - '@drizzle-team/studio@0.0.5': - resolution: {integrity: sha512-ps5qF0tMxWRVu+V5gvCRrQNqlY92aTnIKdq27gm9LZMSdaKYZt6AVvSK1dlUMzs6Rt0Jm80b+eWct6xShBKhIw==} + '@drizzle-team/brocli@0.11.0': + resolution: {integrity: sha512-hD3pekGiPg0WPCCGAZmusBBJsDqGUR66Y452YgQsZOnkdQ7ViEPKuyP4huUGEZQefp8g34RRodXYmJ2TbCH+tg==} '@electric-sql/pglite@0.2.12': resolution: {integrity: sha512-J/X42ujcoFEbOkgRyoNqZB5qcqrnJRWVlwpH3fKYoJkTz49N91uAK/rDSSG/85WRas9nC9mdV4FnMTxnQWE/rw==} @@ -1705,12 +1776,6 @@ packages: resolution: {integrity: sha512-FxEMIkJKnodyA1OaCUoEvbYRkoZlLZ4d/eXFu9Fh8CbBBgP5EmZxrfTRyN0qpXZ4vOvqnE5YdRdcrmUUXuU+dA==} deprecated: 'Merged into tsx: https://tsx.is' - '@esbuild/aix-ppc64@0.19.12': - resolution: {integrity: sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [aix] - '@esbuild/aix-ppc64@0.25.12': resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} engines: {node: '>=18'} @@ -1729,12 +1794,6 @@ packages: cpu: [arm64] os: [android] - '@esbuild/android-arm64@0.19.12': - resolution: {integrity: sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [android] - '@esbuild/android-arm64@0.25.12': resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} engines: {node: '>=18'} @@ -1753,12 +1812,6 @@ packages: cpu: [arm] os: [android] - '@esbuild/android-arm@0.19.12': - resolution: {integrity: sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==} - engines: {node: '>=12'} - cpu: [arm] - os: [android] - '@esbuild/android-arm@0.25.12': resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} engines: {node: '>=18'} @@ -1777,12 +1830,6 @@ packages: cpu: [x64] os: [android] - '@esbuild/android-x64@0.19.12': - resolution: {integrity: sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==} - engines: {node: '>=12'} - cpu: [x64] - os: [android] - '@esbuild/android-x64@0.25.12': resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} engines: {node: '>=18'} @@ -1801,12 +1848,6 @@ packages: cpu: [arm64] os: [darwin] - '@esbuild/darwin-arm64@0.19.12': - resolution: {integrity: sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==} - engines: {node: '>=12'} - cpu: [arm64] - os: [darwin] - '@esbuild/darwin-arm64@0.25.12': resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} engines: {node: '>=18'} @@ -1825,12 +1866,6 @@ packages: cpu: [x64] os: [darwin] - '@esbuild/darwin-x64@0.19.12': - resolution: {integrity: sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==} - engines: {node: '>=12'} - cpu: [x64] - os: [darwin] - '@esbuild/darwin-x64@0.25.12': resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} engines: {node: '>=18'} @@ -1849,12 +1884,6 @@ packages: cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-arm64@0.19.12': - resolution: {integrity: sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [freebsd] - '@esbuild/freebsd-arm64@0.25.12': resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} engines: {node: '>=18'} @@ -1873,12 +1902,6 @@ packages: cpu: [x64] os: [freebsd] - '@esbuild/freebsd-x64@0.19.12': - resolution: {integrity: sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==} - engines: {node: '>=12'} - cpu: [x64] - os: [freebsd] - '@esbuild/freebsd-x64@0.25.12': resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} engines: {node: '>=18'} @@ -1897,12 +1920,6 @@ packages: cpu: [arm64] os: [linux] - '@esbuild/linux-arm64@0.19.12': - resolution: {integrity: sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==} - engines: {node: '>=12'} - cpu: [arm64] - os: [linux] - '@esbuild/linux-arm64@0.25.12': resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} engines: {node: '>=18'} @@ -1921,12 +1938,6 @@ packages: cpu: [arm] os: [linux] - '@esbuild/linux-arm@0.19.12': - resolution: {integrity: sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==} - engines: {node: '>=12'} - cpu: [arm] - os: [linux] - '@esbuild/linux-arm@0.25.12': resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} engines: {node: '>=18'} @@ -1945,12 +1956,6 @@ packages: cpu: [ia32] os: [linux] - '@esbuild/linux-ia32@0.19.12': - resolution: {integrity: sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==} - engines: {node: '>=12'} - cpu: [ia32] - os: [linux] - '@esbuild/linux-ia32@0.25.12': resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} engines: {node: '>=18'} @@ -1975,12 +1980,6 @@ packages: cpu: [loong64] os: [linux] - '@esbuild/linux-loong64@0.19.12': - resolution: {integrity: sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==} - engines: {node: '>=12'} - cpu: [loong64] - os: [linux] - '@esbuild/linux-loong64@0.25.12': resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} engines: {node: '>=18'} @@ -1999,12 +1998,6 @@ packages: cpu: [mips64el] os: [linux] - '@esbuild/linux-mips64el@0.19.12': - resolution: {integrity: sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==} - engines: {node: '>=12'} - cpu: [mips64el] - os: [linux] - '@esbuild/linux-mips64el@0.25.12': resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} engines: {node: '>=18'} @@ -2023,12 +2016,6 @@ packages: cpu: [ppc64] os: [linux] - '@esbuild/linux-ppc64@0.19.12': - resolution: {integrity: sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==} - engines: {node: '>=12'} - cpu: [ppc64] - os: [linux] - '@esbuild/linux-ppc64@0.25.12': resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} engines: {node: '>=18'} @@ -2047,12 +2034,6 @@ packages: cpu: [riscv64] os: [linux] - '@esbuild/linux-riscv64@0.19.12': - resolution: {integrity: sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==} - engines: {node: '>=12'} - cpu: [riscv64] - os: [linux] - '@esbuild/linux-riscv64@0.25.12': resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} engines: {node: '>=18'} @@ -2071,12 +2052,6 @@ packages: cpu: [s390x] os: [linux] - '@esbuild/linux-s390x@0.19.12': - resolution: {integrity: sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==} - engines: {node: '>=12'} - cpu: [s390x] - os: [linux] - '@esbuild/linux-s390x@0.25.12': resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} engines: {node: '>=18'} @@ -2095,12 +2070,6 @@ packages: cpu: [x64] os: [linux] - '@esbuild/linux-x64@0.19.12': - resolution: {integrity: sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==} - engines: {node: '>=12'} - cpu: [x64] - os: [linux] - '@esbuild/linux-x64@0.25.12': resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} engines: {node: '>=18'} @@ -2131,12 +2100,6 @@ packages: cpu: [x64] os: [netbsd] - '@esbuild/netbsd-x64@0.19.12': - resolution: {integrity: sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==} - engines: {node: '>=12'} - cpu: [x64] - os: [netbsd] - '@esbuild/netbsd-x64@0.25.12': resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} engines: {node: '>=18'} @@ -2167,12 +2130,6 @@ packages: cpu: [x64] os: [openbsd] - '@esbuild/openbsd-x64@0.19.12': - resolution: {integrity: sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==} - engines: {node: '>=12'} - cpu: [x64] - os: [openbsd] - '@esbuild/openbsd-x64@0.25.12': resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} engines: {node: '>=18'} @@ -2203,12 +2160,6 @@ packages: cpu: [x64] os: [sunos] - '@esbuild/sunos-x64@0.19.12': - resolution: {integrity: sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==} - engines: {node: '>=12'} - cpu: [x64] - os: [sunos] - '@esbuild/sunos-x64@0.25.12': resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} engines: {node: '>=18'} @@ -2227,12 +2178,6 @@ packages: cpu: [arm64] os: [win32] - '@esbuild/win32-arm64@0.19.12': - resolution: {integrity: sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==} - engines: {node: '>=12'} - cpu: [arm64] - os: [win32] - '@esbuild/win32-arm64@0.25.12': resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} engines: {node: '>=18'} @@ -2251,12 +2196,6 @@ packages: cpu: [ia32] os: [win32] - '@esbuild/win32-ia32@0.19.12': - resolution: {integrity: sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==} - engines: {node: '>=12'} - cpu: [ia32] - os: [win32] - '@esbuild/win32-ia32@0.25.12': resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} engines: {node: '>=18'} @@ -2275,12 +2214,6 @@ packages: cpu: [x64] os: [win32] - '@esbuild/win32-x64@0.19.12': - resolution: {integrity: sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==} - engines: {node: '>=12'} - cpu: [x64] - os: [win32] - '@esbuild/win32-x64@0.25.12': resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} engines: {node: '>=18'} @@ -2451,11 +2384,6 @@ packages: engines: {node: '>=6'} hasBin: true - '@hono/bun-compress@0.1.0': - resolution: {integrity: sha512-wxy9PdC07Yc81NawIcdIiuGAEeDujwPWd01KdxubXJ33G9vdjUO85ec0UMjH0Cy7+zfNXlcWgJ+zoATjT/IfTg==} - peerDependencies: - hono: '*' - '@hono/node-server@1.19.6': resolution: {integrity: sha512-Shz/KjlIeAhfiuE93NDKVdZ7HdBVLQAfdbaXEaoAVO3ic9ibRSLGIQGkcBbFyuLr+7/1D5ZCINM8B+6IvXeMtw==} engines: {node: '>=18.14.1'} @@ -2558,9 +2486,16 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@js-joda/core@5.6.5': + resolution: {integrity: sha512-3zwefSMwHpu8iVUW8YYz227sIv6UFqO31p1Bf1ZH/Vom7CmNyUsXjDBlnNzcuhmOL1XfxZ3nvND42kR23XlbcQ==} + '@js-sdsl/ordered-map@4.4.2': resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + '@js-temporal/polyfill@0.5.1': + resolution: {integrity: sha512-hloP58zRVCRSpgDxmqCWJNlizAlUgJFqG2ypq79DCvyv9tHjRYMDOcPFjzfl/A1/YxDvRCZz8wvZvmapQnKwFQ==} + engines: {node: '>=12'} + '@jsep-plugin/assignment@1.3.0': resolution: {integrity: sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==} engines: {node: '>= 10.16.0'} @@ -2632,6 +2567,9 @@ packages: cpu: [x64] os: [win32] + '@loaderkit/resolve@1.0.4': + resolution: {integrity: sha512-rJzYKVcV4dxJv+vW6jlvagF8zvGxHJ2+HTr1e2qOejfmGhAApgJHl8Aog4mMszxceTRiKTTbnpgmTO1bEZHV/A==} + '@miniflare/core@2.14.4': resolution: {integrity: sha512-FMmZcC1f54YpF4pDWPtdQPIO8NXfgUxCoR9uyrhxKJdZu7M6n8QKopPVNuaxR40jcsdxb7yKoQoFWnHfzJD9GQ==} engines: {node: '>=16.13'} @@ -2673,8 +2611,9 @@ packages: '@neondatabase/serverless@0.7.2': resolution: {integrity: sha512-wU3WA2uTyNO7wjPs3Mg0G01jztAxUxzd9/mskMmtPwPTjf7JKWi9AW5/puOGXLxmZ9PVgRFeBVRVYq5nBPhsCg==} - '@neondatabase/serverless@0.9.5': - resolution: {integrity: sha512-siFas6gItqv6wD/pZnvdu34wEqgG3nSE6zWZdq5j2DEsa+VvX8i/5HXJOo06qrw5axPXn+lGCxeR+NLaSPIXug==} + '@neondatabase/serverless@1.0.2': + resolution: {integrity: sha512-I5sbpSIAHiB+b6UttofhrN/UJXII+4tZPAq1qugzwCwLIL8EZLV7F/JyHUrEIiGgQpEXzpnjlJ+zwcEhheGvCw==} + engines: {node: '>=19.0.0'} '@noble/hashes@1.8.0': resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==} @@ -2713,6 +2652,46 @@ packages: '@originjs/vite-plugin-commonjs@1.0.3': resolution: {integrity: sha512-KuEXeGPptM2lyxdIEJ4R11+5ztipHoE7hy8ClZt3PYaOVQ/pyngd2alaSrPnwyFeOW1UagRBaQ752aA1dTMdOQ==} + '@oxlint/darwin-arm64@1.30.0': + resolution: {integrity: sha512-vIiTk1FBObXHvaG3WK/SpwCZP06fuHhSrn8mK2hEs2uSFWG63WgvLAHSGhI4U1X29JGf36zhfUzuKirDIVdKBQ==} + cpu: [arm64] + os: [darwin] + + '@oxlint/darwin-x64@1.30.0': + resolution: {integrity: sha512-DP4ueul8Vza52gJX1/+jyXQjUsgjmPe4Pq5uYOVa8k2V8cKRb2JzBls+DugbeP7yzKNTmlSp3J2mUnHQXuntLA==} + cpu: [x64] + os: [darwin] + + '@oxlint/linux-arm64-gnu@1.30.0': + resolution: {integrity: sha512-1e9NvTvjzG6t1tnUzNU9HZTVwNwhZw2BDQxwIsXV743g54BIkvyZNNXOEGic/Jw4IuIXHzpX3ztVWZaSzvDopg==} + cpu: [arm64] + os: [linux] + + '@oxlint/linux-arm64-musl@1.30.0': + resolution: {integrity: sha512-szb5RB8Tbk756/z/GAdmUn+H1E2815BbcM7s6JZYQgyCJxR0RCL1yFXgKyz3BjIDqzR98Tw8H3g4TeJbN2etAg==} + cpu: [arm64] + os: [linux] + + '@oxlint/linux-x64-gnu@1.30.0': + resolution: {integrity: sha512-yOWGu4a82yA8xLusaznW41IF5ZkvBNz/U++M2/tCYAQUoJKSfJuAS5AhApRMKZLKeX0Vmdagh0YwvC+e98QG0w==} + cpu: [x64] + os: [linux] + + '@oxlint/linux-x64-musl@1.30.0': + resolution: {integrity: sha512-qL1902VF4EMTZTZdJEIzzUQ+UD0IbH+IW6dhYZXbP9nTXJnItW1fk4cyJq5zfUVu1IoVwKK2FP1jUMqEsBlWTw==} + cpu: [x64] + os: [linux] + + '@oxlint/win32-arm64@1.30.0': + resolution: {integrity: sha512-QK6C1djHKI7g1l5g6W9vkz9sd+sn1QJe6PbaV2sWFjVVoT0tO6LWygVWaci09ZHYVJ+lnCbbaFEgZ9jQhIs05A==} + cpu: [arm64] + os: [win32] + + '@oxlint/win32-x64@1.30.0': + resolution: {integrity: sha512-tbPnJIBUKke9KpceV+DpGyfN3LdhGaEPJHSuD4/mUEwP9Kk6IKSoDNih681RVGhgvaEZg3uHmQr6n9Uh0P3Yrg==} + cpu: [x64] + os: [win32] + '@paralleldrive/cuid2@2.3.1': resolution: {integrity: sha512-XO7cAxhnTZl0Yggq6jOgjiOHhbgcO4NqFqwSmQpjK3b6TEE6Uj/jfSk6wzYyemh3+I0sHirKSetjQwn5cZktFw==} @@ -2723,17 +2702,10 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@pkgr/core@0.2.9': - resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} - engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} - '@planetscale/database@1.19.0': resolution: {integrity: sha512-Tv4jcFUFAFjOWrGSio49H6R2ijALv0ZzVBfJKIdm+kl9X046Fh4LLawrF9OMsglVbK6ukqMJsUCeucGAFTBcMA==} engines: {node: '>=16'} - '@polka/url@1.0.0-next.29': - resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} - '@prettier/sync@0.5.5': resolution: {integrity: sha512-6BMtNr7aQhyNcGzmumkL0tgr1YQGfm9d7ZdmRpWqWuqpc9vZBind4xMe5NMiRECOhjuSiWHfBWLBnXkpeE90bw==} peerDependencies: @@ -3023,9 +2995,6 @@ packages: cpu: [x64] os: [win32] - '@rtsao/scc@1.1.0': - resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} - '@sinclair/typebox@0.27.8': resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} @@ -3230,6 +3199,15 @@ packages: react-native-tcp-socket: '*' react-native-url-polyfill: '*' + '@standard-schema/spec@1.0.0': + resolution: {integrity: sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==} + + '@tediousjs/connection-string@0.5.0': + resolution: {integrity: sha512-7qSgZbincDDDFyRweCIEvZULFAw5iz/DeunhvuxpL31nfntX3P4Yd4HkHBRg9H8CdqY1e5WFN1PZIz/REL9MVQ==} + + '@tediousjs/connection-string@0.6.0': + resolution: {integrity: sha512-GxlsW354Vi6QqbUgdPyQVcQjI7cZBdGV5vOYVYuCVDTylx2wl3WHR2HlhcxxHTrMigbelpXsdcZso+66uxPfow==} + '@tidbcloud/serverless@0.1.1': resolution: {integrity: sha512-km2P5Mgr9nqVah5p5aMYbO3dBqecSwZ0AU7+BhJH+03L2eJO6qCATcBR8UHPuVLhA7GCt3CambKvVYK79pVQ2g==} engines: {node: '>=16'} @@ -3238,22 +3216,6 @@ packages: resolution: {integrity: sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==} engines: {node: '>= 6'} - '@trivago/prettier-plugin-sort-imports@5.2.2': - resolution: {integrity: sha512-fYDQA9e6yTNmA13TLVSA+WMQRc5Bn/c0EUBditUHNfMMxN7M82c38b1kEggVE3pLpZ0FwkwJkUEKMiOi52JXFA==} - engines: {node: '>18.12'} - peerDependencies: - '@vue/compiler-sfc': 3.x - prettier: 2.x - 3.x - prettier-plugin-svelte: 3.x - svelte: 4.x || 5.x - peerDependenciesMeta: - '@vue/compiler-sfc': - optional: true - prettier-plugin-svelte: - optional: true - svelte: - optional: true - '@ts-morph/common@0.26.1': resolution: {integrity: sha512-Sn28TGl/4cFpcM+jwsH1wLncYq3FtN/BIpem+HOygfBWPT5pAeS5dB4VFVzV8FbnOKHpDLZmvAl4AjPEev5idA==} @@ -3373,12 +3335,15 @@ packages: '@types/json-schema@7.0.15': resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} - '@types/json5@0.0.29': - resolution: {integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==} - '@types/jsonfile@6.1.4': resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} + '@types/marked-terminal@3.1.3': + resolution: {integrity: sha512-dKgOLKlI5zFb2jTbRcyQqbdrHxeU74DCOkVIZtsoB2sc1ctXZ1iB2uxG2jjAuzoLdvwHP065ijN6Q8HecWdWYg==} + + '@types/marked@3.0.4': + resolution: {integrity: sha512-fzrd0O45A0hZl3+Fs3+BcuD3SF+kEkV0KHBXrSPi1B73PnDJI9wcUkpA8JoujFKqgyOijeKgIllFYsgJFhNB5g==} + '@types/micromatch@4.0.10': resolution: {integrity: sha512-5jOhFDElqr4DKTrTEbnW8DZ4Hz5LRUEmyrGpCMrD/NphYv3nUnaF08xmSLx1rGGnyEs/kFnhiw6dCgcDqMr5PQ==} @@ -3388,6 +3353,9 @@ packages: '@types/minimist@1.2.5': resolution: {integrity: sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==} + '@types/mssql@9.1.8': + resolution: {integrity: sha512-mt9h5jWj+DYE5jxnKaWSV/GqDf9FV52XYVk6T3XZF69noEe+JJV6MKirii48l81+cjmAkSq+qeKX+k61fHkYrQ==} + '@types/node@18.19.130': resolution: {integrity: sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==} @@ -3400,9 +3368,6 @@ packages: '@types/node@24.10.1': resolution: {integrity: sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==} - '@types/normalize-package-data@2.4.4': - resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} - '@types/pg@8.11.6': resolution: {integrity: sha512-/2WmmBXHLsfRqzfHW7BNZ8SbYzE8OSk7i3WjFYvfgRHj7S1xj+16Je5fUKv3lVdVzk/zn9TXOqf+avFCFIE0yQ==} @@ -3424,6 +3389,9 @@ packages: '@types/react@18.3.27': resolution: {integrity: sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==} + '@types/readable-stream@4.0.22': + resolution: {integrity: sha512-/FFhJpfCLAPwAcN3mFycNUa77ddnr8jTgF5VmSNetaemWB2cIlfCA9t0YTM3JAT0wOcv8D4tjPo7pkDhK3EJIg==} + '@types/retry@0.12.5': resolution: {integrity: sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==} @@ -3445,6 +3413,9 @@ packages: '@types/uuid@9.0.8': resolution: {integrity: sha512-jg+97EGIcY9AGHJJRaaPVgetKDsrTgbRjQ5Msgjh/DQKEFl0DtyRr/VCOyD1T2R1MNeWPK/u7JoGhlDZnKBAfA==} + '@types/validate-npm-package-name@4.0.2': + resolution: {integrity: sha512-lrpDziQipxCEeK5kWxvljWYhUvOiB2A9izZd9B2AFarYAkqZshb4lPbRs7zKEic6eGtH8V/2qJW+dPp9OtF6bw==} + '@types/which@3.0.4': resolution: {integrity: sha512-liyfuo/106JdlgSchJzXEQCVArk0CvevqPote8F8HgWgJ3dRCcTHgJIsLDuee0kxk/mhbInzIZk3QWSZJ8R+2w==} @@ -3457,34 +3428,6 @@ packages: '@types/yargs@17.0.35': resolution: {integrity: sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==} - '@typescript-eslint/eslint-plugin@6.21.0': - resolution: {integrity: sha512-oy9+hTPCUFpngkEZUSzbf9MxI65wbKFoQYsgPdILTfbUldp5ovUuphZVe4i30emU9M/kP+T64Di0mxl7dSw3MA==} - engines: {node: ^16.0.0 || >=18.0.0} - peerDependencies: - '@typescript-eslint/parser': ^6.0.0 || ^6.0.0-alpha - eslint: ^7.0.0 || ^8.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/eslint-plugin@7.18.0': - resolution: {integrity: sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - '@typescript-eslint/parser': ^7.0.0 - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/experimental-utils@5.62.0': - resolution: {integrity: sha512-RTXpeB3eMkpoclG3ZHft6vG/Z30azNHuqY6wKPBHlVMZFuEvrtlEDe8gMqDb+SO+9hjC/pLekeSCryf9vMZlCw==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - '@typescript-eslint/parser@6.21.0': resolution: {integrity: sha512-tbsV1jPne5CkFQCgPBcDOt30ItF7aJoZL997JSF7MhGQqOeT3svWRYxiqlfA5RUdlHN6Fi+EI9bxqbdyAUZjYQ==} engines: {node: ^16.0.0 || >=18.0.0} @@ -3495,16 +3438,6 @@ packages: typescript: optional: true - '@typescript-eslint/parser@7.18.0': - resolution: {integrity: sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - '@typescript-eslint/rule-tester@6.21.0': resolution: {integrity: sha512-twxQo4He8+AQ/YG70Xt7Fl/ImBLpi7qElxHN6/aK+U4z97JsITCG7DdIIUw5M+qKtDMCYkZCEE2If8dnHI7jWA==} engines: {node: ^16.0.0 || >=18.0.0} @@ -3512,59 +3445,14 @@ packages: '@eslint/eslintrc': '>=2' eslint: '>=8' - '@typescript-eslint/scope-manager@5.62.0': - resolution: {integrity: sha512-VXuvVvZeQCQb5Zgf4HAxc04q5j+WrNAtNh9OwCsCgpKqESMTu3tF/jhZ3xG6T4NZwWl65Bg8KuS2uEvhSfLl0w==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@typescript-eslint/scope-manager@6.21.0': resolution: {integrity: sha512-OwLUIWZJry80O99zvqXVEioyniJMa+d2GrqpUTqi5/v5D5rOrppJVBPa0yKCblcigC0/aYAzxxqQ1B+DS2RYsg==} engines: {node: ^16.0.0 || >=18.0.0} - '@typescript-eslint/scope-manager@7.18.0': - resolution: {integrity: sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/type-utils@6.21.0': - resolution: {integrity: sha512-rZQI7wHfao8qMX3Rd3xqeYSMCL3SoiSQLBATSiVKARdFGCYSRvmViieZjqc58jKgs8Y8i9YvVVhRbHSTA4VBag==} - engines: {node: ^16.0.0 || >=18.0.0} - peerDependencies: - eslint: ^7.0.0 || ^8.0.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/type-utils@7.18.0': - resolution: {integrity: sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/types@5.62.0': - resolution: {integrity: sha512-87NVngcbVXUahrRTqIK27gD2t5Cu1yuCXxbLcFtCzZGlfyVWWh8mLHkoxzjsB6DDNnvdL+fW8MiwPEJyGJQDgQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@typescript-eslint/types@6.21.0': resolution: {integrity: sha512-1kFmZ1rOm5epu9NZEZm1kckCDGj5UJEf7P1kliH4LKu/RkwpsfqqGmY2OOcUs18lSlQBKLDYBOGxRVtrMN5lpg==} engines: {node: ^16.0.0 || >=18.0.0} - '@typescript-eslint/types@7.18.0': - resolution: {integrity: sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==} - engines: {node: ^18.18.0 || >=20.0.0} - - '@typescript-eslint/typescript-estree@5.62.0': - resolution: {integrity: sha512-CmcQ6uY7b9y694lKdRB8FEel7JbU/40iSAPomu++SjLMntB+2Leay2LO6i8VnJk58MtE9/nQSFIH6jpyRWyYzA==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - '@typescript-eslint/typescript-estree@6.21.0': resolution: {integrity: sha512-6npJTkZcO+y2/kr+z0hc4HwNfrrP4kNYh57ek7yCNlrBjWQ1Y0OS7jiZTkgumrvkX5HkEKXFZkkdFNkaW2wmUQ==} engines: {node: ^16.0.0 || >=18.0.0} @@ -3574,45 +3462,16 @@ packages: typescript: optional: true - '@typescript-eslint/typescript-estree@7.18.0': - resolution: {integrity: sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - - '@typescript-eslint/utils@5.62.0': - resolution: {integrity: sha512-n8oxjeb5aIbPFEtmQxQYOLI0i9n5ySBEY/ZEHHZqKQSFnxio1rv6dthascc9dLuwrL0RC5mPCxB7vnAVGAYWAQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 - '@typescript-eslint/utils@6.21.0': resolution: {integrity: sha512-NfWVaC8HP9T8cbKQxHcsJBY5YE1O33+jpMwN45qzWWaPDZgLIbo12toGMWnmhvCpd3sIxkpDw3Wv1B3dYrbDQQ==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: eslint: ^7.0.0 || ^8.0.0 - '@typescript-eslint/utils@7.18.0': - resolution: {integrity: sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw==} - engines: {node: ^18.18.0 || >=20.0.0} - peerDependencies: - eslint: ^8.56.0 - - '@typescript-eslint/visitor-keys@5.62.0': - resolution: {integrity: sha512-07ny+LHRzQXepkGg6w0mFY41fVUNBrL2Roj/++7V1txKugfjm/Ci/qSND03r2RhlJhJYMcTn9AhhSSqQp0Ysyw==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - '@typescript-eslint/visitor-keys@6.21.0': resolution: {integrity: sha512-JJtkDduxLi9bivAB+cYOVMtbkqdPOhZ+ZI5LC47MIRrDV4Yn2o+ZnW10Nkmr28xRpSpdJ6Sm42Hjf2+REYXm0A==} engines: {node: ^16.0.0 || >=18.0.0} - '@typescript-eslint/visitor-keys@7.18.0': - resolution: {integrity: sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==} - engines: {node: ^18.18.0 || >=20.0.0} - '@typescript/analyze-trace@0.10.1': resolution: {integrity: sha512-RnlSOPh14QbopGCApgkSx5UBgGda5MX1cHqp2fsqfiDyCwGL/m1jaeB9fzu7didVS81LQqGZZuxFBcg8YU8EVw==} hasBin: true @@ -3622,11 +3481,15 @@ packages: peerDependencies: typescript: '*' + '@typespec/ts-http-runtime@0.3.2': + resolution: {integrity: sha512-IlqQ/Gv22xUC1r/WQm4StLkYQmaaTsXAhUVsNE0+xiyf0yRFiH5++q78U3bw6bLKDCTmh0uqKB9eG9+Bt75Dkg==} + engines: {node: '>=20.0.0'} + '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} - '@upstash/redis@1.35.6': - resolution: {integrity: sha512-aSEIGJgJ7XUfTYvhQcQbq835re7e/BXjs8Janq6Pvr6LlmTZnyqwT97RziZLO/8AVUL037RLXqqiQC6kCt+5pA==} + '@upstash/redis@1.35.7': + resolution: {integrity: sha512-bdCdKhke+kYUjcLLuGWSeQw7OLuWIx3eyKksyToLBAlGIMX9qiII0ptp8E0y7VFE1yuBxBd/3kSzJ8774Q4g+A==} '@urql/core@5.2.0': resolution: {integrity: sha512-/n0ieD0mvvDnVAXEQgX/7qJiVcvYvNkOHeBvkwtylfjydar123caCXcl58PXFY11oU1oquJocVXHxLAbtv4x1A==} @@ -3640,42 +3503,34 @@ packages: resolution: {integrity: sha512-/QUV9ExwaNdKooRjOQqvrKNVnRvsaXeukPNI5DB1ovUTesglfR/fparw7ngo1KUWWKIVpEj2TRrA+ObRHRdaLg==} engines: {node: '>=14.6'} - '@vitest/expect@3.2.4': - resolution: {integrity: sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==} + '@vitest/expect@4.0.13': + resolution: {integrity: sha512-zYtcnNIBm6yS7Gpr7nFTmq8ncowlMdOJkWLqYvhr/zweY6tFbDkDi8BPPOeHxEtK1rSI69H7Fd4+1sqvEGli6w==} - '@vitest/mocker@3.2.4': - resolution: {integrity: sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==} + '@vitest/mocker@4.0.13': + resolution: {integrity: sha512-eNCwzrI5djoauklwP1fuslHBjrbR8rqIVbvNlAnkq1OTa6XT+lX68mrtPirNM9TnR69XUPt4puBCx2Wexseylg==} peerDependencies: msw: ^2.4.9 - vite: ^5.0.0 || ^6.0.0 || ^7.0.0-0 + vite: ^6.0.0 || ^7.0.0-0 peerDependenciesMeta: msw: optional: true vite: optional: true - '@vitest/pretty-format@3.2.4': - resolution: {integrity: sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==} - - '@vitest/runner@3.2.4': - resolution: {integrity: sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==} + '@vitest/pretty-format@4.0.13': + resolution: {integrity: sha512-ooqfze8URWbI2ozOeLDMh8YZxWDpGXoeY3VOgcDnsUxN0jPyPWSUvjPQWqDGCBks+opWlN1E4oP1UYl3C/2EQA==} - '@vitest/snapshot@3.2.4': - resolution: {integrity: sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==} + '@vitest/runner@4.0.13': + resolution: {integrity: sha512-9IKlAru58wcVaWy7hz6qWPb2QzJTKt+IOVKjAx5vb5rzEFPTL6H4/R9BMvjZ2ppkxKgTrFONEJFtzvnyEpiT+A==} - '@vitest/spy@3.2.4': - resolution: {integrity: sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==} + '@vitest/snapshot@4.0.13': + resolution: {integrity: sha512-hb7Usvyika1huG6G6l191qu1urNPsq1iFc2hmdzQY3F5/rTgqQnwwplyf8zoYHkpt7H6rw5UfIw6i/3qf9oSxQ==} - '@vitest/ui@1.6.1': - resolution: {integrity: sha512-xa57bCPGuzEFqGjPs3vVLyqareG8DX0uMkr5U/v5vLv5/ZUrBrPL7gzxzTJedEyZxFMfsozwTIbbYfEQVo3kgg==} - peerDependencies: - vitest: 1.6.1 - - '@vitest/utils@1.6.1': - resolution: {integrity: sha512-jOrrUvXM4Av9ZWiG1EajNto0u96kWAhJ1LmPmJhXXQx/32MecEKd10pOLYgS2BQx1TgkGhloPU1ArDW2vvaY6g==} + '@vitest/spy@4.0.13': + resolution: {integrity: sha512-hSu+m4se0lDV5yVIcNWqjuncrmBgwaXa2utFLIrBkQCQkt+pSwyZTPFQAZiiF/63j8jYa8uAeUZ3RSfcdWaYWw==} - '@vitest/utils@3.2.4': - resolution: {integrity: sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==} + '@vitest/utils@4.0.13': + resolution: {integrity: sha512-ydozWyQ4LZuu8rLp47xFUWis5VOKMdHjXCWhs1LuJsTNKww+pTHQNK4e0assIB9K80TxFyskENL6vCu3j34EYA==} '@xata.io/client@0.29.5': resolution: {integrity: sha512-b55dmPVNVFOE5nj2F2G6t9l/d5yYBhIu5X5w3rznhhsriGHkrzn93tqJexIZPS77E7f/yDXcFz06KbvR3bHK5w==} @@ -3754,10 +3609,6 @@ packages: resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} engines: {node: '>=8'} - ansi-escapes@6.2.1: - resolution: {integrity: sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==} - engines: {node: '>=14.16'} - ansi-escapes@7.2.0: resolution: {integrity: sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==} engines: {node: '>=18'} @@ -3790,9 +3641,6 @@ packages: resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} engines: {node: '>=12'} - ansicolors@0.3.2: - resolution: {integrity: sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==} - any-promise@1.3.0: resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} @@ -3835,38 +3683,14 @@ packages: arktype@2.1.27: resolution: {integrity: sha512-enctOHxI4SULBv/TDtCVi5M8oLd4J5SVlPUblXDzSsOYQNMzmVbUosGBnJuZDKmFlN5Ie0/QVEuTE+Z5X1UhsQ==} - array-buffer-byte-length@1.0.2: - resolution: {integrity: sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==} - engines: {node: '>= 0.4'} - array-find-index@1.0.2: resolution: {integrity: sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw==} engines: {node: '>=0.10.0'} - array-includes@3.1.9: - resolution: {integrity: sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==} - engines: {node: '>= 0.4'} - array-union@2.1.0: resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} engines: {node: '>=8'} - array.prototype.findlastindex@1.2.6: - resolution: {integrity: sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==} - engines: {node: '>= 0.4'} - - array.prototype.flat@1.3.3: - resolution: {integrity: sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==} - engines: {node: '>= 0.4'} - - array.prototype.flatmap@1.3.3: - resolution: {integrity: sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==} - engines: {node: '>= 0.4'} - - arraybuffer.prototype.slice@1.0.4: - resolution: {integrity: sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==} - engines: {node: '>= 0.4'} - arrgv@1.0.2: resolution: {integrity: sha512-a4eg4yhp7mmruZDQFqVMlxNRFGi/i1r87pt8SDHy0/I8PqSXoUTlWZRdAZo0VXgvEARcujbtTk8kiZRi1uDGRw==} engines: {node: '>=8.0.0'} @@ -3889,10 +3713,6 @@ packages: resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} engines: {node: '>=4'} - async-function@1.0.0: - resolution: {integrity: sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==} - engines: {node: '>= 0.4'} - async-limiter@1.0.1: resolution: {integrity: sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==} @@ -4025,6 +3845,9 @@ packages: bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + bl@6.1.5: + resolution: {integrity: sha512-XylDt2P3JBttAwLpORq/hOEX9eJzP0r6Voa46C/WVvad8D1J0jW5876txB8FnzKtbdnU6X4Y1vOEvC6PllJrDg==} + blueimp-md5@2.19.0: resolution: {integrity: sha512-DRQrD6gJyy8FbiE4s+bDoXS9hiW3Vbx5uCdwvcCf3zLHL+Iv7LtGHLpr+GZV8rHG8tK766FGYBwRbu8pELTt+w==} @@ -4064,6 +3887,9 @@ packages: bser@2.1.1: resolution: {integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==} + buffer-equal-constant-time@1.0.1: + resolution: {integrity: sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==} + buffer-from@1.1.2: resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} @@ -4084,16 +3910,16 @@ packages: resolution: {integrity: sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==} engines: {node: '>=10.0.0'} - builtin-modules@3.3.0: - resolution: {integrity: sha512-zhaCDicdLuWN5UbN5IMnFqNMhNfo919sH85y2/ea+5Yg9TsTkeZxpL+JLbp6cgYFS4sRLp3YV4S6yDuqVWHYOw==} - engines: {node: '>=6'} - builtins@5.1.0: resolution: {integrity: sha512-SW9lzGTLvWTP1AY8xeAMZimqDrIaSdLQUcVr9DMef51niJ022Ri87SwRRKYm4A6iHfkPaiVUu/Duw2Wc4J7kKg==} bun-types@1.3.3: resolution: {integrity: sha512-z3Xwlg7j2l9JY27x5Qn3Wlyos8YAp0kKRlrePAOjgjMGS5IG6E7Jnlx736vH9UVI4wUICwwhC9anYL++XeOgTQ==} + bundle-name@4.1.0: + resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} + engines: {node: '>=18'} + bundle-require@5.1.0: resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -4151,10 +3977,6 @@ packages: caniuse-lite@1.0.30001757: resolution: {integrity: sha512-r0nnL/I28Zi/yjk1el6ilj27tKcdjLsNqAOZr0yVjWPrSQyHgKI2INaEWw21bAQSv2LXRt1XuCS/GomNpWOxsQ==} - cardinal@2.1.1: - resolution: {integrity: sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==} - hasBin: true - cbor@8.1.0: resolution: {integrity: sha512-DwGjNW9omn6EwP70aXsn7FQJx5kO12tX0bZkaTjzdVFM6/7nhA4t0EENocKGx6D2Bch9PE2KzCUf5SceBdeijg==} engines: {node: '>=12.19'} @@ -4163,6 +3985,10 @@ packages: resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==} engines: {node: '>=18'} + chai@6.2.1: + resolution: {integrity: sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==} + engines: {node: '>=18'} + chalk@2.4.2: resolution: {integrity: sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==} engines: {node: '>=4'} @@ -4226,10 +4052,6 @@ packages: cjs-module-lexer@1.4.3: resolution: {integrity: sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==} - clean-regexp@1.0.0: - resolution: {integrity: sha512-GfisEZEJvzKrmGWkvfhgzcz/BllN1USeqD2V6tg14OAOgaCD2Z/PUEuxnAZ/nPvmaHRG7a8y77p1T/IRQ4D1Hw==} - engines: {node: '>=4'} - clean-stack@2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} @@ -4242,14 +4064,14 @@ packages: resolution: {integrity: sha512-3yONmlN9CSAkzNwnRCiJQ7Q2xK5mWuEfL3PuTZcAUzhObbXsfsnMptJzXwz93nc5zn9V9TwCVMmV7w4xsm43dw==} engines: {node: '>=0.10.0'} - cli-color@2.0.4: - resolution: {integrity: sha512-zlnpg0jNcibNrO7GG9IeHH7maWFeCz+Ja1wx/7tZNU5ASSSSZ+/qZciM0/LHCYxSdqv5h2sdbQ/PXYdOuetXvA==} - engines: {node: '>=0.10'} - cli-cursor@2.1.0: resolution: {integrity: sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==} engines: {node: '>=4'} + cli-cursor@5.0.0: + resolution: {integrity: sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==} + engines: {node: '>=18'} + cli-highlight@2.1.11: resolution: {integrity: sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg==} engines: {node: '>=8.0.0', npm: '>=5.0.0'} @@ -4267,6 +4089,10 @@ packages: resolution: {integrity: sha512-wfOBkjXteqSnI59oPcJkcPl/ZmwvMMOj340qUIY1SKZCv0B9Cf4D4fAucRkIKQmsIuYK3x1rrgU7MeGRruiuiA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + cli-truncate@5.1.1: + resolution: {integrity: sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==} + engines: {node: '>=20'} + cliui@7.0.4: resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} @@ -4306,6 +4132,9 @@ packages: resolution: {integrity: sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==} hasBin: true + colorette@2.0.20: + resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + colors@1.4.0: resolution: {integrity: sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==} engines: {node: '>=0.1.90'} @@ -4314,10 +4143,18 @@ packages: resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} engines: {node: '>=14'} + commander@11.1.0: + resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} + engines: {node: '>=16'} + commander@12.1.0: resolution: {integrity: sha512-Vw8qHK3bZM9y/P10u3Vib8o/DdkvA2OtPtZvD871QKjy74Wj1WSKFILMPRPSdUSx5RFK1arlJzEtA4PkFgnbuA==} engines: {node: '>=18'} + commander@14.0.2: + resolution: {integrity: sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==} + engines: {node: '>=20'} + commander@2.20.3: resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} @@ -4329,10 +4166,6 @@ packages: resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} engines: {node: '>= 10'} - commander@9.5.0: - resolution: {integrity: sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ==} - engines: {node: ^12.20.0 || >=14} - common-path-prefix@3.0.0: resolution: {integrity: sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==} @@ -4393,10 +4226,6 @@ packages: resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} engines: {node: '>= 0.6'} - copy-anything@4.0.5: - resolution: {integrity: sha512-7Vv6asjS4gMOuILabD3l739tsaxFQmC+a7pLZm02zyvs8p977bL3zEgq3yDk5rn9B0PbYgIv++jmHcuUab4RhA==} - engines: {node: '>=18'} - copy-file@11.1.0: resolution: {integrity: sha512-X8XDzyvYaA6msMyAM575CUoygY5b44QzLcGRKsK3MFmXcOvQa518dNPLsKYwkYsn72g3EiW+LE0ytd/FlqWmyw==} engines: {node: '>=18'} @@ -4452,26 +4281,10 @@ packages: resolution: {integrity: sha512-/fITjgjGU50vjQ4FH6eUoYu+iUoUKIXws2hL15JJpIR+BbTxaXQsMuuyjtNh2WqsSBS5nsaZHFsFecyw5CCAng==} engines: {node: '>=0.10.0'} - d@1.0.2: - resolution: {integrity: sha512-MOqHvMWF9/9MX6nza0KgvFH4HpMU0EF5uUDXqX/BtxtU8NfB0QzRtJ8Oe/6SuS4kbhyzVJwjd97EA4PKrzJ8bw==} - engines: {node: '>=0.12'} - data-uri-to-buffer@4.0.1: resolution: {integrity: sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==} engines: {node: '>= 12'} - data-view-buffer@1.0.2: - resolution: {integrity: sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==} - engines: {node: '>= 0.4'} - - data-view-byte-length@1.0.2: - resolution: {integrity: sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==} - engines: {node: '>= 0.4'} - - data-view-byte-offset@1.0.1: - resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} - engines: {node: '>= 0.4'} - date-fns@2.30.0: resolution: {integrity: sha512-fnULvOpxnC5/Vg3NCiWelDsLiUc9bRwAPs/+LfTLNvetFCtCTN+yQz15C/fs4AwX1R9K5GLtLfn8QW+dWisaAw==} engines: {node: '>=0.11'} @@ -4533,6 +4346,14 @@ packages: resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} engines: {node: '>=0.10.0'} + default-browser-id@5.0.1: + resolution: {integrity: sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==} + engines: {node: '>=18'} + + default-browser@5.4.0: + resolution: {integrity: sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg==} + engines: {node: '>=18'} + defaults@1.0.4: resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} @@ -4544,9 +4365,9 @@ packages: resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} engines: {node: '>=8'} - define-properties@1.2.1: - resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} - engines: {node: '>= 0.4'} + define-lazy-prop@3.0.0: + resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} + engines: {node: '>=12'} delegates@1.0.0: resolution: {integrity: sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==} @@ -4575,10 +4396,6 @@ packages: resolution: {integrity: sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==} engines: {node: '>=8'} - diff-sequences@29.6.3: - resolution: {integrity: sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==} - engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} - diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} @@ -4587,9 +4404,6 @@ packages: resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} engines: {node: '>=0.3.1'} - difflib@0.2.4: - resolution: {integrity: sha512-9YVwmMb0wQHQNr5J9m6BSj6fk4pfGITGQOOs+D9Fl+INODWFOfvhIU1hNv6GgR1RBoC/9NJcwu77zShxV0kT7w==} - dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -4602,10 +4416,6 @@ packages: resolution: {integrity: sha512-iND4mcOWhPaCNh54WmK/KoSb35AFqPAUWFMffTQcp52uQt36b5uNwEJTSXntJZBbeGad72Crbi/hvDIv6us/6Q==} engines: {node: '>= 8.0'} - doctrine@2.1.0: - resolution: {integrity: sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==} - engines: {node: '>=0.10.0'} - doctrine@3.0.0: resolution: {integrity: sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==} engines: {node: '>=6.0.0'} @@ -4626,20 +4436,16 @@ packages: resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==} engines: {node: '>=12'} - dprint@0.46.3: - resolution: {integrity: sha512-ACEd7B7sO/uvPvV/nsHbtkIeMqeD2a8XGO1DokROtKDUmI5WbuflGZOwyjFCYwy4rkX6FXoYBzGdEQ6um7BjCA==} + dprint@0.50.2: + resolution: {integrity: sha512-+0Fzg+17jsMMUouK00/Fara5YtGOuE76EAJINHB8VpkXHd0n00rMXtw/03qorOgz23eo8Y0UpYvNZBJJo3aNtw==} hasBin: true dreamopt@0.8.0: resolution: {integrity: sha512-vyJTp8+mC+G+5dfgsY+r3ckxlz+QMX40VjPQsZc5gxVAxLmi64TBoVkP54A/pRAXMXsbu2GMMBrZPxNv23waMg==} engines: {node: '>=0.4.0'} - drizzle-kit@0.19.13: - resolution: {integrity: sha512-Rba5VW1O2JfJlwVBeZ8Zwt2E2us5oZ08PQBDiVSGlug53TOc8hzXjblZFuF+dnll9/RQEHrkzBmJFgqTvn5Rxg==} - hasBin: true - - drizzle-kit@0.25.0-b1faa33: - resolution: {integrity: sha512-WMRuEgxt1oTc62EPVQhGD+pGs6LiqzT8UqxuI6mKfA5SCeCEIt87nFzzJ5WlwsqbuoSgXBXc5zhsHvqXRD03DA==} + drizzle-kit@0.31.7: + resolution: {integrity: sha512-hOzRGSdyKIU4FcTSFYGKdXEjFsncVwHZ43gY3WU5Bz9j5Iadp6Rh6hxLSQ1IWXpKLBKt/d5y1cpSPcV+FcoQ1A==} hasBin: true drizzle-orm@0.27.2: @@ -4704,6 +4510,98 @@ packages: sqlite3: optional: true + drizzle-orm@0.44.1: + resolution: {integrity: sha512-prIWOlwJbiYInvcJxE+IMiJCtMiFVrSUJCwx6AXSJvGOdLu35qZ46QncTZDgloiLNCG0XxTC8agQElSmsl++TA==} + peerDependencies: + '@aws-sdk/client-rds-data': '>=3' + '@cloudflare/workers-types': '>=4' + '@electric-sql/pglite': '>=0.2.0' + '@libsql/client': '>=0.10.0' + '@libsql/client-wasm': '>=0.10.0' + '@neondatabase/serverless': '>=0.10.0' + '@op-engineering/op-sqlite': '>=2' + '@opentelemetry/api': ^1.4.1 + '@planetscale/database': '>=1.13' + '@prisma/client': '*' + '@tidbcloud/serverless': '*' + '@types/better-sqlite3': '*' + '@types/pg': '*' + '@types/sql.js': '*' + '@upstash/redis': '>=1.34.7' + '@vercel/postgres': '>=0.8.0' + '@xata.io/client': '*' + better-sqlite3: '>=7' + bun-types: '*' + expo-sqlite: '>=14.0.0' + gel: '>=2' + knex: '*' + kysely: '*' + mysql2: '>=2' + pg: '>=8' + postgres: '>=3' + prisma: '*' + sql.js: '>=1' + sqlite3: '>=5' + peerDependenciesMeta: + '@aws-sdk/client-rds-data': + optional: true + '@cloudflare/workers-types': + optional: true + '@electric-sql/pglite': + optional: true + '@libsql/client': + optional: true + '@libsql/client-wasm': + optional: true + '@neondatabase/serverless': + optional: true + '@op-engineering/op-sqlite': + optional: true + '@opentelemetry/api': + optional: true + '@planetscale/database': + optional: true + '@prisma/client': + optional: true + '@tidbcloud/serverless': + optional: true + '@types/better-sqlite3': + optional: true + '@types/pg': + optional: true + '@types/sql.js': + optional: true + '@upstash/redis': + optional: true + '@vercel/postgres': + optional: true + '@xata.io/client': + optional: true + better-sqlite3: + optional: true + bun-types: + optional: true + expo-sqlite: + optional: true + gel: + optional: true + knex: + optional: true + kysely: + optional: true + mysql2: + optional: true + pg: + optional: true + postgres: + optional: true + prisma: + optional: true + sql.js: + optional: true + sqlite3: + optional: true + drizzle-orm@1.0.0-beta.1-c0277c0: resolution: {integrity: sha512-4XnmY3CdFHUzJpbRwc6mElkpDzyZs8Ko98i+cRuuPlakFgZqItr+inoK0bFTH50Eh66E/UXbxfAW6U0JK/1wyw==} peerDependencies: @@ -4807,6 +4705,9 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + ecdsa-sig-formatter@1.0.11: + resolution: {integrity: sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -4817,6 +4718,9 @@ packages: resolution: {integrity: sha512-KxdRyyFcS85pH3dnU8Y5yFUm2YJdaHwcBZWrfG8o89ZY9a13/f9itbN+YG3ELbBo9Pg5zvIozstmuV8bX13q6g==} engines: {node: '>=14.16'} + emoji-regex@10.6.0: + resolution: {integrity: sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==} + emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -4866,16 +4770,9 @@ packages: err-code@2.0.3: resolution: {integrity: sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==} - error-ex@1.3.4: - resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} - error-stack-parser@2.1.4: resolution: {integrity: sha512-Sk5V6wVazPhq5MhpO+AUxJn5x7XSXGl1R93Vn7i+zS15KDVxQijejNCrz8340/2bgLBjR9GtEG8ZVKONDjcqGQ==} - es-abstract@1.24.0: - resolution: {integrity: sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==} - engines: {node: '>= 0.4'} - es-define-property@1.0.1: resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} engines: {node: '>= 0.4'} @@ -4891,32 +4788,6 @@ packages: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} - es-set-tostringtag@2.1.0: - resolution: {integrity: sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==} - engines: {node: '>= 0.4'} - - es-shim-unscopables@1.1.0: - resolution: {integrity: sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==} - engines: {node: '>= 0.4'} - - es-to-primitive@1.3.0: - resolution: {integrity: sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==} - engines: {node: '>= 0.4'} - - es5-ext@0.10.64: - resolution: {integrity: sha512-p2snDhiLaXe6dahss1LddxqEm+SkuDvV8dnIQG0MWjyHpcMNfXKPE+/Cc0y+PhxJX3A4xGNeFCj5oc0BUh6deg==} - engines: {node: '>=0.10'} - - es6-iterator@2.0.3: - resolution: {integrity: sha512-zw4SRzoUkd+cl+ZoE15A9o1oQd920Bb0iOJMQkQhl3jNc03YqVjAhG7scf9C5KWRU/R13Orf588uCC6525o02g==} - - es6-symbol@3.1.4: - resolution: {integrity: sha512-U9bFFjX8tFiATgtkJ1zg25+KviIXpgRvRHS8sau3GfhVzThRQrOeksPeT0BWW2MNZs1OEWJ1DPXOQMn0KKRkvg==} - engines: {node: '>=0.12'} - - es6-weak-map@2.0.3: - resolution: {integrity: sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==} - esbuild-android-64@0.14.54: resolution: {integrity: sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ==} engines: {node: '>=12'} @@ -5058,11 +4929,6 @@ packages: engines: {node: '>=12'} hasBin: true - esbuild@0.19.12: - resolution: {integrity: sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==} - engines: {node: '>=12'} - hasBin: true - esbuild@0.25.12: resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} engines: {node: '>=18'} @@ -5096,94 +4962,13 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} - eslint-config-prettier@9.1.2: - resolution: {integrity: sha512-iI1f+D2ViGn+uvv5HuHVUamg8ll4tN+JRHGc6IJi4TP9Kl976C57fzPXgseXNs8v0iA8aSJpHsTWjDb9QJamGQ==} - hasBin: true - peerDependencies: - eslint: '>=7.0.0' + eslint-scope@7.2.2: + resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - eslint-import-resolver-node@0.3.9: - resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} - - eslint-module-utils@2.12.1: - resolution: {integrity: sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: '*' - eslint-import-resolver-node: '*' - eslint-import-resolver-typescript: '*' - eslint-import-resolver-webpack: '*' - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - eslint: - optional: true - eslint-import-resolver-node: - optional: true - eslint-import-resolver-typescript: - optional: true - eslint-import-resolver-webpack: - optional: true - - eslint-plugin-import@2.32.0: - resolution: {integrity: sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==} - engines: {node: '>=4'} - peerDependencies: - '@typescript-eslint/parser': '*' - eslint: ^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9 - peerDependenciesMeta: - '@typescript-eslint/parser': - optional: true - - eslint-plugin-no-instanceof@1.0.1: - resolution: {integrity: sha512-zlqQ7EsfzbRO68uI+p8FIE7zYB4njs+nNbkNjSb5QmLi2et67zQLqSeaao5U9SpnlZTTJC87nS2oyHo2ACtajw==} - - eslint-plugin-prettier@5.5.4: - resolution: {integrity: sha512-swNtI95SToIz05YINMA6Ox5R057IMAmWZ26GqPxusAp1TZzj+IdY9tXNWWD3vkF/wEqydCONcwjTFpxybBqZsg==} - engines: {node: ^14.18.0 || >=16.0.0} - peerDependencies: - '@types/eslint': '>=8.0.0' - eslint: '>=8.0.0' - eslint-config-prettier: '>= 7.0.0 <10.0.0 || >=10.1.0' - prettier: '>=3.0.0' - peerDependenciesMeta: - '@types/eslint': - optional: true - eslint-config-prettier: - optional: true - - eslint-plugin-unicorn@48.0.1: - resolution: {integrity: sha512-FW+4r20myG/DqFcCSzoumaddKBicIPeFnTrifon2mWIzlfyvzwyqZjqVP7m4Cqr/ZYisS2aiLghkUWaPg6vtCw==} - engines: {node: '>=16'} - peerDependencies: - eslint: '>=8.44.0' - - eslint-plugin-unused-imports@3.2.0: - resolution: {integrity: sha512-6uXyn6xdINEpxE1MtDjxQsyXB37lfyO2yKGVVgtD7WEWQGORSOZjgrD6hBhvGv4/SO+TOlS+UnC6JppRqbuwGQ==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - peerDependencies: - '@typescript-eslint/eslint-plugin': 6 - 7 - eslint: '8' - peerDependenciesMeta: - '@typescript-eslint/eslint-plugin': - optional: true - - eslint-rule-composer@0.3.0: - resolution: {integrity: sha512-bt+Sh8CtDmn2OajxvNO+BX7Wn4CIWMpTRm3MaiKPCQcnnlm0CS2mhui6QaoeQugs+3Kj2ESKEEGJUdVafwhiCg==} - engines: {node: '>=4.0.0'} - - eslint-scope@5.1.1: - resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} - engines: {node: '>=8.0.0'} - - eslint-scope@7.2.2: - resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - - eslint-visitor-keys@3.4.3: - resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} - engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} eslint@8.57.1: resolution: {integrity: sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==} @@ -5191,10 +4976,6 @@ packages: deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. hasBin: true - esniff@2.0.1: - resolution: {integrity: sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg==} - engines: {node: '>=0.10'} - espree@9.6.1: resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -5212,10 +4993,6 @@ packages: resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} engines: {node: '>=4.0'} - estraverse@4.3.0: - resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} - engines: {node: '>=4.0'} - estraverse@5.3.0: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} @@ -5234,9 +5011,6 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} - event-emitter@0.3.5: - resolution: {integrity: sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA==} - event-stream@3.3.4: resolution: {integrity: sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g==} @@ -5257,6 +5031,10 @@ packages: resolution: {integrity: sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==} engines: {node: '>=0.4.x'} + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + eventsource-parser@3.0.6: resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} engines: {node: '>=18.0.0'} @@ -5365,9 +5143,6 @@ packages: resolution: {integrity: sha512-DT9ck5YIRU+8GYzzU5kT3eHGA5iL+1Zd0EutOmTE9Dtk+Tvuzd23VBU+ec7HPNSTxXYO55gPV/hq4pSBJDjFpA==} engines: {node: '>= 18'} - ext@1.7.0: - resolution: {integrity: sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw==} - fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -5523,13 +5298,6 @@ packages: function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} - function.prototype.name@1.1.8: - resolution: {integrity: sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==} - engines: {node: '>= 0.4'} - - functions-have-names@1.2.3: - resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} - fx@39.2.0: resolution: {integrity: sha512-z4HgJGGBD8ZWI6sdHs2N5JT0gEyVvl8SLOdmedKOkom9LDeqMHAUt0y2GBdI2tNgTalWhdO7Wd9KdeRZF6UwQA==} hasBin: true @@ -5559,8 +5327,9 @@ packages: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} - get-func-name@2.0.2: - resolution: {integrity: sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==} + get-east-asian-width@1.4.0: + resolution: {integrity: sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==} + engines: {node: '>=18'} get-intrinsic@1.3.0: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} @@ -5586,10 +5355,6 @@ packages: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} - get-symbol-description@1.1.0: - resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==} - engines: {node: '>= 0.4'} - get-tsconfig@4.13.0: resolution: {integrity: sha512-1VKTZJCwBrvbd+Wn3AOgQP/2Av+TfTCOlE4AcRJE72W1ksZXbAx8PPBR9RzgTeSPzlPMHrbANMH3LbltH73wxQ==} @@ -5634,10 +5399,6 @@ packages: resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} engines: {node: '>=8'} - globalthis@1.0.4: - resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==} - engines: {node: '>= 0.4'} - globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} @@ -5666,10 +5427,6 @@ packages: hanji@0.0.5: resolution: {integrity: sha512-Abxw1Lq+TnYiL4BueXqMau222fPSPMFtya8HdpWsz/xVAhifXou71mPh/kY2+08RgFcVccjG3uZHs6K5HAe3zw==} - has-bigints@1.1.0: - resolution: {integrity: sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==} - engines: {node: '>= 0.4'} - has-flag@3.0.0: resolution: {integrity: sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==} engines: {node: '>=4'} @@ -5681,10 +5438,6 @@ packages: has-property-descriptors@1.0.2: resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} - has-proto@1.2.0: - resolution: {integrity: sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==} - engines: {node: '>= 0.4'} - has-symbols@1.1.0: resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} engines: {node: '>= 0.4'} @@ -5732,9 +5485,6 @@ packages: resolution: {integrity: sha512-Pst8FuGqz3L7tFF+u9Pu70eI0xa5S3LPUmrNd5Jm8nTHze9FxLTK9Kaj5g/k4UcwuJSXTP65SyHOPLrffpcAJg==} engines: {node: '>=16.9.0'} - hosted-git-info@2.8.9: - resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} - hosted-git-info@7.0.2: resolution: {integrity: sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==} engines: {node: ^16.14.0 || >=18.0.0} @@ -5754,6 +5504,10 @@ packages: resolution: {integrity: sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==} engines: {node: '>= 6'} + http-proxy-agent@7.0.2: + resolution: {integrity: sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==} + engines: {node: '>= 14'} + https-proxy-agent@5.0.1: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} @@ -5769,6 +5523,11 @@ packages: humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} + husky@9.1.7: + resolution: {integrity: sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA==} + engines: {node: '>=18'} + hasBin: true + iconv-lite@0.6.3: resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} engines: {node: '>=0.10.0'} @@ -5835,10 +5594,6 @@ packages: ini@1.3.8: resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - internal-slot@1.1.0: - resolution: {integrity: sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==} - engines: {node: '>= 0.4'} - invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} @@ -5858,33 +5613,10 @@ packages: resolution: {integrity: sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==} engines: {node: '>= 0.4'} - is-array-buffer@3.0.5: - resolution: {integrity: sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==} - engines: {node: '>= 0.4'} - - is-arrayish@0.2.1: - resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} - - is-async-function@2.1.1: - resolution: {integrity: sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==} - engines: {node: '>= 0.4'} - - is-bigint@1.1.0: - resolution: {integrity: sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==} - engines: {node: '>= 0.4'} - is-binary-path@2.1.0: resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} engines: {node: '>=8'} - is-boolean-object@1.2.2: - resolution: {integrity: sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==} - engines: {node: '>= 0.4'} - - is-builtin-module@3.2.1: - resolution: {integrity: sha512-BSLE3HnV2syZ0FK0iMA/yUGplUeMmNz4AW5fnTunbCIqZi4vG3WjJT9FHMy5D69xmAYBHXQhJdALdpwVxV501A==} - engines: {node: '>=6'} - is-callable@1.2.7: resolution: {integrity: sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==} engines: {node: '>= 0.4'} @@ -5893,19 +5625,16 @@ packages: resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} engines: {node: '>= 0.4'} - is-data-view@1.0.2: - resolution: {integrity: sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==} - engines: {node: '>= 0.4'} - - is-date-object@1.1.0: - resolution: {integrity: sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==} - engines: {node: '>= 0.4'} - is-docker@2.2.1: resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} engines: {node: '>=8'} hasBin: true + is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + is-error@2.2.2: resolution: {integrity: sha512-IOQqts/aHWbiisY5DuPJQ0gcbvaLFCa7fBa9xoLfxBZvQ+ZI/Zh9xoI7Gk+G64N0FdK4AbibytHht2tWgpJWLg==} @@ -5913,10 +5642,6 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} - is-finalizationregistry@1.1.1: - resolution: {integrity: sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==} - engines: {node: '>= 0.4'} - is-fullwidth-code-point@3.0.0: resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} engines: {node: '>=8'} @@ -5925,6 +5650,10 @@ packages: resolution: {integrity: sha512-O4L094N2/dZ7xqVdrXhh9r1KODPJpFms8B5sGdJLPy664AgvXsreZUyCQQNItZRDlYug4xStLjNp/sz3HvBowQ==} engines: {node: '>=12'} + is-fullwidth-code-point@5.1.0: + resolution: {integrity: sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==} + engines: {node: '>=18'} + is-generator-function@1.1.2: resolution: {integrity: sha512-upqt1SkGkODW9tsGNG5mtXTXtECizwtS2kA161M+gJPc1xdb/Ax629af6YrTwcOeQHbewrPNlE5Dx7kzvXTizA==} engines: {node: '>= 0.4'} @@ -5933,21 +5662,14 @@ packages: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} + is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + is-lambda@1.0.1: resolution: {integrity: sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==} - is-map@2.0.3: - resolution: {integrity: sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==} - engines: {node: '>= 0.4'} - - is-negative-zero@2.0.3: - resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==} - engines: {node: '>= 0.4'} - - is-number-object@1.1.1: - resolution: {integrity: sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==} - engines: {node: '>= 0.4'} - is-number@7.0.0: resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} engines: {node: '>=0.12.0'} @@ -5960,9 +5682,6 @@ packages: resolution: {integrity: sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==} engines: {node: '>=0.10.0'} - is-promise@2.2.2: - resolution: {integrity: sha512-+lP4/6lKUBfQjZ2pdxThZvLUAafmZb8OAxFb8XXtiQmS35INgr85hdOGoEs124ez1FCnZJt6jau/T+alh58QFQ==} - is-promise@4.0.0: resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} @@ -5973,26 +5692,10 @@ packages: resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==} engines: {node: '>= 0.4'} - is-set@2.0.3: - resolution: {integrity: sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==} - engines: {node: '>= 0.4'} - - is-shared-array-buffer@1.0.4: - resolution: {integrity: sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==} - engines: {node: '>= 0.4'} - is-stream@3.0.0: resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - is-string@1.1.1: - resolution: {integrity: sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==} - engines: {node: '>= 0.4'} - - is-symbol@1.1.1: - resolution: {integrity: sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==} - engines: {node: '>= 0.4'} - is-typed-array@1.1.15: resolution: {integrity: sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==} engines: {node: '>= 0.4'} @@ -6001,32 +5704,17 @@ packages: resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} engines: {node: '>=12'} - is-weakmap@2.0.2: - resolution: {integrity: sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==} - engines: {node: '>= 0.4'} - - is-weakref@1.1.1: - resolution: {integrity: sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==} - engines: {node: '>= 0.4'} - - is-weakset@2.0.4: - resolution: {integrity: sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==} - engines: {node: '>= 0.4'} - - is-what@5.5.0: - resolution: {integrity: sha512-oG7cgbmg5kLYae2N5IVd3jm2s+vldjxJzK1pcu9LfpGuQ93MQSzo0okvRna+7y5ifrD+20FE8FvjusyGaz14fw==} - engines: {node: '>=18'} - is-wsl@2.2.0: resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} engines: {node: '>=8'} + is-wsl@3.1.0: + resolution: {integrity: sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==} + engines: {node: '>=16'} + isarray@1.0.0: resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} - isarray@2.0.5: - resolution: {integrity: sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==} - isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} @@ -6049,9 +5737,6 @@ packages: resolution: {integrity: sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==} engines: {node: 20 || >=22} - javascript-natural-sort@0.7.1: - resolution: {integrity: sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==} - jest-environment-node@29.7.0: resolution: {integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -6108,6 +5793,9 @@ packages: js-base64@3.7.8: resolution: {integrity: sha512-hNngCeKxIUQiEUN3GPJOkz4wF/YvdUdbNL9hsBcMQTkKzboD7T/q3OYOuuPZLUE6dBxSGpwhk5mwuDud7JVAow==} + js-md4@0.3.2: + resolution: {integrity: sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA==} + js-string-escape@1.0.1: resolution: {integrity: sha512-Smw4xcfIQ5LVjAOuJCvN/zIodzA/BBSsluuoSykP+lUvScIi4U6RJLfwHet5cxFnCswUjISV8oAXaqaJDY3chg==} engines: {node: '>= 0.8'} @@ -6115,9 +5803,6 @@ packages: js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} - js-tokens@9.0.1: - resolution: {integrity: sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==} - js-yaml@3.14.2: resolution: {integrity: sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==} hasBin: true @@ -6126,6 +5811,9 @@ packages: resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} hasBin: true + jsbi@4.3.2: + resolution: {integrity: sha512-9fqMSQbhJykSeii05nxKl4m6Eqn2P6rOlYiS+C5Dr/HPIU/7yZxu5qzbs40tgaFORiw2Amd0mirjxatXYMkIew==} + jsc-safe-url@0.2.4: resolution: {integrity: sha512-0wM3YBWtYePOjfyXQH5MWQ8H7sdk5EXSwZvmSLKk2RboVQ2Bu239jycHDz5J/8Blf3K0Qnoy2b6xD+z10MFB+Q==} @@ -6133,10 +5821,6 @@ packages: resolution: {integrity: sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==} engines: {node: '>= 10.16.0'} - jsesc@0.5.0: - resolution: {integrity: sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==} - hasBin: true - jsesc@3.1.0: resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} engines: {node: '>=6'} @@ -6145,17 +5829,10 @@ packages: json-buffer@3.0.1: resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} - json-diff@0.9.0: - resolution: {integrity: sha512-cVnggDrVkAAA3OvFfHpFEhOnmcsUpleEKq4d4O8sQWWSH40MBrWstKigVB1kGrgLWzuom+7rRdaCsnBD6VyObQ==} - hasBin: true - json-diff@1.0.6: resolution: {integrity: sha512-tcFIPRdlc35YkYdGxcamJjllUhXWv4n2rK9oJ2RsAzV4FBkuV4ojKEDgcZ+kpKxDmJKv+PFK65+1tVVOnSeEqA==} hasBin: true - json-parse-even-better-errors@2.3.1: - resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} - json-rules-engine@7.3.1: resolution: {integrity: sha512-NyRTQZllvAt7AQ3g9P7/t4nIwlEB+EyZV7y8/WgXfZWSlpcDryt1UH9CsoU+Z+MDvj8umN9qqEcbE6qnk9JAHw==} engines: {node: '>=18.0.0'} @@ -6166,10 +5843,6 @@ packages: json-stable-stringify-without-jsonify@1.0.1: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} - json5@1.0.2: - resolution: {integrity: sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==} - hasBin: true - json5@2.2.3: resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} engines: {node: '>=6'} @@ -6192,10 +5865,20 @@ packages: engines: {node: '>=10'} hasBin: true + jsonwebtoken@9.0.2: + resolution: {integrity: sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==} + engines: {node: '>=12', npm: '>=6'} + junk@4.0.1: resolution: {integrity: sha512-Qush0uP+G8ZScpGMZvHUiRfI0YBWuB3gVBYlI0v0vvOJt5FLicco+IkP0a50LqTTQhmts/m6tP5SWE+USyIvcQ==} engines: {node: '>=12.20'} + jwa@1.4.2: + resolution: {integrity: sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==} + + jws@3.2.2: + resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} + keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} @@ -6307,6 +5990,15 @@ packages: lines-and-columns@1.2.4: resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + lint-staged@16.2.7: + resolution: {integrity: sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow==} + engines: {node: '>=20.17'} + hasBin: true + + listr2@9.0.5: + resolution: {integrity: sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==} + engines: {node: '>=20.0.0'} + load-json-file@7.0.1: resolution: {integrity: sha512-Gnxj3ev3mB5TkVBGad0JM6dmLiQL+o0t23JPBZ9sd+yvSLk05mFoqKBw5N8gbbkU4TNXyqCgIrl/VM17OgUIgQ==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -6333,9 +6025,30 @@ packages: lodash.debounce@4.0.8: resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + lodash.includes@4.3.0: + resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} + + lodash.isboolean@3.0.3: + resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} + + lodash.isinteger@4.0.4: + resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} + + lodash.isnumber@3.0.3: + resolution: {integrity: sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==} + + lodash.isplainobject@4.0.6: + resolution: {integrity: sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==} + + lodash.isstring@4.0.1: + resolution: {integrity: sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==} + lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + lodash.once@4.1.1: + resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} + lodash.throttle@4.1.1: resolution: {integrity: sha512-wIkUCfVKpVsWo3JSZlc+8MB5it+2AN5W8J7YVMST30UrvcQNZ1Okbj+rbVniijTWE6FGYy4XJq/rHkas8qJMLQ==} @@ -6346,6 +6059,10 @@ packages: resolution: {integrity: sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==} engines: {node: '>=4'} + log-update@6.1.0: + resolution: {integrity: sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==} + engines: {node: '>=18'} + long@5.3.2: resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} @@ -6353,9 +6070,6 @@ packages: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true - loupe@2.3.7: - resolution: {integrity: sha512-zSMINGVYkdpYSOBmLi0D1Uo7JU9nVdQKrHxC8eYlV+9YKK9WePqAlL7lSlorG/U2Fw1w0hTBmaa/jrQ3UbPHtA==} - loupe@3.2.1: resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==} @@ -6377,9 +6091,6 @@ packages: resolution: {integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==} engines: {node: '>=12'} - lru-queue@0.1.0: - resolution: {integrity: sha512-BpdYkt9EvGl8OfWHDQPISVpcl5xZthb+XPsbELj5AQXxIC8IriDZIQYjBJPEm5rS420sjZ0TLEzRcq5KdBhYrQ==} - lru.min@1.1.3: resolution: {integrity: sha512-Lkk/vx6ak3rYkRR0Nhu4lFUT2VDnQSxBe8Hbl7f36358p6ow8Bnvr8lrLt98H8J1aGxfhbX4Fs5tYg2+FTwr5Q==} engines: {bun: '>=1.0.0', deno: '>=1.30.0', node: '>=8.0.0'} @@ -6410,20 +6121,14 @@ packages: map-stream@0.1.0: resolution: {integrity: sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g==} - marked-terminal@6.2.0: - resolution: {integrity: sha512-ubWhwcBFHnXsjYNsu+Wndpg0zhY4CahSpPlA70PlO0rR9r2sZpkyU+rkCsOWH+KMEkx847UpALON+HWgxowFtw==} - engines: {node: '>=16.0.0'} - peerDependencies: - marked: '>=1 <12' - - marked-terminal@7.3.0: - resolution: {integrity: sha512-t4rBvPsHc57uE/2nJOLmMbZCQ4tgAccAED3ngXQqW6g+TxA488JzJ+FK3lQkzBQOI1mRV/r/Kq+1ZlJ4D0owQw==} + marked-terminal@7.1.0: + resolution: {integrity: sha512-+pvwa14KZL74MVXjYdPR3nSInhGhNvPce/3mqLVZT2oUvt654sL1XImFuLZ1pkA866IYZ3ikDTOFUIC7XzpZZg==} engines: {node: '>=16.0.0'} peerDependencies: - marked: '>=1 <16' + marked: '>=1 <14' - marked@9.1.6: - resolution: {integrity: sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==} + marked@9.1.2: + resolution: {integrity: sha512-qoKMJqK0w6vkLk8+KnKZAH6neUZSNaQqVZ/h2yZ9S7CbLuFHyS2viB0jnqcWF9UKjwsAbMrQtnQhdmdvOVOw9w==} engines: {node: '>= 16'} hasBin: true @@ -6453,10 +6158,6 @@ packages: memoize-one@5.2.1: resolution: {integrity: sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==} - memoizee@0.4.17: - resolution: {integrity: sha512-DGqD7Hjpi/1or4F/aYAspXKNm5Yili0QDAFAY4QYvpqpgiY6+1jOfqpmByzjxbWd/T9mChbCArXAbDAsTm5oXA==} - engines: {node: '>=0.12'} - meow@12.1.1: resolution: {integrity: sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==} engines: {node: '>=16.10'} @@ -6621,14 +6322,14 @@ packages: resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} engines: {node: '>=12'} + mimic-function@5.0.1: + resolution: {integrity: sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==} + engines: {node: '>=18'} + mimic-response@3.1.0: resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} engines: {node: '>=10'} - min-indent@1.0.1: - resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} - engines: {node: '>=4'} - minimatch@10.1.1: resolution: {integrity: sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==} engines: {node: 20 || >=22} @@ -6713,16 +6414,22 @@ packages: resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} engines: {node: '>=4'} - mrmime@2.0.1: - resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} - engines: {node: '>=10'} - ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + mssql@11.0.1: + resolution: {integrity: sha512-KlGNsugoT90enKlR8/G36H0kTxPthDhmtNUCwEHvgRza5Cjpjoj+P2X6eMpFUDN7pFrJZsKadL4x990G8RBE1w==} + engines: {node: '>=18'} + hasBin: true + + mssql@12.1.1: + resolution: {integrity: sha512-nUTXi0unU6p72YKe6KDR9vW2mSQWsmy1KZqV0JkaT2v3RSkxlwx4Y4srjYmH+DZNbyA53Ijp6o2OaLnLc4F2Qg==} + engines: {node: '>=18'} + hasBin: true + mysql2@3.14.1: resolution: {integrity: sha512-7ytuPQJjQB8TNAYX/H2yhL+iQOnIBjAMam361R7UAL0lOVXWjtdrmoL9HYKqKoLp/8UUTRcvo1QPvK9KL7wA8w==} engines: {node: '>= 8.0'} @@ -6737,6 +6444,10 @@ packages: nan@2.23.1: resolution: {integrity: sha512-r7bBUGKzlqk8oPBDYxt6Z0aEdF1G1rwlMcLk8LCOMbOzf0mG+JUfUzG4fIMWwHWP0iyaLWEQZJmtB7nOHEm/qw==} + nano-spawn@2.0.0: + resolution: {integrity: sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==} + engines: {node: '>=20.17'} + nanoid@3.3.11: resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -6745,6 +6456,9 @@ packages: napi-build-utils@2.0.0: resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==} + native-duplexpair@1.0.0: + resolution: {integrity: sha512-E7QQoM+3jvNtlmyfqRZ0/U75VFgCls+fSkbml2MpgWkWyz3ox8Y58gNhfuziuQYGNNQAbFZJQck55LHCnCK6CA==} + natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} @@ -6766,9 +6480,6 @@ packages: nested-error-stacks@2.1.1: resolution: {integrity: sha512-9iN1ka/9zmX1ZvLV9ewJYEk9h7RyRRtqdK0woXcqohu8EWIerfPUjYJPg0ULy0UqP7cslmdGc8xKDJcojlKiaw==} - next-tick@1.1.0: - resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==} - node-abi@3.85.0: resolution: {integrity: sha512-zsFhmbkAzwhTft6nd3VxcG0cvJsT70rL+BIGHWVq5fi6MwGrHwzqKaxXE+Hl2GmnGItnDKPPkO5/LQqjVkIdFg==} engines: {node: '>=10'} @@ -6820,9 +6531,6 @@ packages: engines: {node: '>=6'} hasBin: true - normalize-package-data@2.5.0: - resolution: {integrity: sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==} - normalize-path@3.0.0: resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} engines: {node: '>=0.10.0'} @@ -6866,26 +6574,6 @@ packages: resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} engines: {node: '>= 0.4'} - object-keys@1.1.1: - resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} - engines: {node: '>= 0.4'} - - object.assign@4.1.7: - resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} - engines: {node: '>= 0.4'} - - object.fromentries@2.0.8: - resolution: {integrity: sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==} - engines: {node: '>= 0.4'} - - object.groupby@1.0.3: - resolution: {integrity: sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==} - engines: {node: '>= 0.4'} - - object.values@1.2.1: - resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} - engines: {node: '>= 0.4'} - obuf@1.1.2: resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} @@ -6920,6 +6608,14 @@ packages: resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} engines: {node: '>=12'} + onetime@7.0.0: + resolution: {integrity: sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==} + engines: {node: '>=18'} + + open@10.2.0: + resolution: {integrity: sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==} + engines: {node: '>=18'} + open@7.4.2: resolution: {integrity: sha512-MVHddDVweXZF3awtlAS+6pgKLlm/JgxZ90+/NBurBoQctVOOB/zDdVjcyPzQ+0laDGbsWgrRkflI65sQeOgT9Q==} engines: {node: '>=8'} @@ -6943,9 +6639,15 @@ packages: resolution: {integrity: sha512-eNwHudNbO1folBP3JsZ19v9azXWtQZjICdr3Q0TDPIaeBQ3mXLrh54wM+er0+hSp+dWKf+Z8KM58CYzEyIYxYg==} engines: {node: '>=6'} - own-keys@1.0.1: - resolution: {integrity: sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==} - engines: {node: '>= 0.4'} + oxlint@1.30.0: + resolution: {integrity: sha512-6Mcpj7Gn26QNRUpue9kRZKQg623mH10kLPl597sNCOfXeUZHTglrc2O54eskHMRA+tR7c0u73nW4GPwSFePLkA==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + oxlint-tsgolint: '>=0.8.1' + peerDependenciesMeta: + oxlint-tsgolint: + optional: true p-defer@1.0.0: resolution: {integrity: sha512-wB3wfAxZpk2AzOfUMJNL+d36xothRSyj8EXOa4f6GMqYDN9BJaaSISbsk+wS9abmnebVw95C2Kb5t85UmpCxuw==} @@ -7026,10 +6728,6 @@ packages: resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} engines: {node: '>=6'} - parse-json@5.2.0: - resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} - engines: {node: '>=8'} - parse-ms@3.0.0: resolution: {integrity: sha512-Tpb8Z7r7XbbtBTrM9UhpkzzaMrqA2VXMT3YChzYltwV3P3pM6t8wl7TvpMnSTosz1aQAdVib7kdoys7vYOPerw==} engines: {node: '>=12'} @@ -7099,9 +6797,6 @@ packages: resolution: {integrity: sha512-Vj7sf++t5pBD637NSfkxpHSMfWaeig5+DKWLhcqIYx6mWQz5hdJTGDVMQiJcw1ZYkhs7AazKDGpRVji1LJCZUQ==} engines: {node: '>=18'} - pathe@1.1.2: - resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} - pathe@2.0.3: resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} @@ -7169,6 +6864,11 @@ packages: resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} engines: {node: '>=12'} + pidtree@0.6.0: + resolution: {integrity: sha512-eG2dWTVw5bzqGRztnHExczNxt5VGsE6OwTeCG3fdUf9KBsZzO3R5OIIIzWR+iZA0NtZ+RDVdaoE2dK1cn6jH4g==} + engines: {node: '>=0.10'} + hasBin: true + pirates@4.0.7: resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} engines: {node: '>= 6'} @@ -7281,10 +6981,6 @@ packages: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} - prettier-linter-helpers@1.0.0: - resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} - engines: {node: '>=6.0.0'} - prettier@3.5.3: resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} engines: {node: '>=14'} @@ -7316,6 +7012,10 @@ packages: resolution: {integrity: sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==} engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + process@0.11.10: + resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} + engines: {node: '>= 0.6.0'} + progress@2.0.3: resolution: {integrity: sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==} engines: {node: '>=0.4.0'} @@ -7443,18 +7143,14 @@ packages: resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} engines: {node: '>=0.10.0'} - read-pkg-up@7.0.1: - resolution: {integrity: sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==} - engines: {node: '>=8'} - - read-pkg@5.2.0: - resolution: {integrity: sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==} - engines: {node: '>=8'} - readable-stream@3.6.2: resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} engines: {node: '>= 6'} + readable-stream@4.7.0: + resolution: {integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -7467,13 +7163,6 @@ packages: resolution: {integrity: sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA==} engines: {node: '>= 4'} - redeyed@2.1.1: - resolution: {integrity: sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==} - - reflect.getprototypeof@1.0.10: - resolution: {integrity: sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==} - engines: {node: '>= 0.4'} - regenerate-unicode-properties@10.2.2: resolution: {integrity: sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==} engines: {node: '>=4'} @@ -7484,14 +7173,6 @@ packages: regenerator-runtime@0.13.11: resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} - regexp-tree@0.1.27: - resolution: {integrity: sha512-iETxpjK6YoRWJG5o6hXLwvjYAoW+FEZn9os0PD/b6AP6xQwsa/Y7lCVgIixBbUPMfhu+i2LtdeAqVTgGlQarfA==} - hasBin: true - - regexp.prototype.flags@1.5.4: - resolution: {integrity: sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==} - engines: {node: '>= 0.4'} - regexpu-core@6.4.0: resolution: {integrity: sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==} engines: {node: '>=4'} @@ -7499,10 +7180,6 @@ packages: regjsgen@0.8.0: resolution: {integrity: sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==} - regjsparser@0.10.0: - resolution: {integrity: sha512-qx+xQGZVsy55CH0a1hiVwHmqjLryfh7wQyF5HO07XJ9f7dQMY/gPQHhlyDkIzJKC+x2fUCpCcUODUUUFrm7SHA==} - hasBin: true - regjsparser@0.13.0: resolution: {integrity: sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==} hasBin: true @@ -7563,6 +7240,10 @@ packages: resolution: {integrity: sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==} engines: {node: '>=4'} + restore-cursor@5.1.0: + resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==} + engines: {node: '>=18'} + retry@0.12.0: resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} engines: {node: '>= 4'} @@ -7575,6 +7256,9 @@ packages: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + rfdc@1.4.1: + resolution: {integrity: sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==} + rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} deprecated: Rimraf versions prior to v4 are no longer supported @@ -7598,6 +7282,10 @@ packages: resolution: {integrity: sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==} engines: {node: '>= 18'} + run-applescript@7.1.0: + resolution: {integrity: sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==} + engines: {node: '>=18'} + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -7608,17 +7296,9 @@ packages: resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} engines: {node: '>=6'} - safe-array-concat@1.1.3: - resolution: {integrity: sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==} - engines: {node: '>=0.4'} - safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - safe-push-apply@1.0.0: - resolution: {integrity: sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==} - engines: {node: '>= 0.4'} - safe-regex-test@1.1.0: resolution: {integrity: sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==} engines: {node: '>= 0.4'} @@ -7635,10 +7315,6 @@ packages: scheduler@0.26.0: resolution: {integrity: sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==} - semver@5.7.2: - resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} - hasBin: true - semver@6.3.1: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true @@ -7692,14 +7368,6 @@ packages: resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} engines: {node: '>= 0.4'} - set-function-name@2.0.2: - resolution: {integrity: sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==} - engines: {node: '>= 0.4'} - - set-proto@1.0.0: - resolution: {integrity: sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==} - engines: {node: '>= 0.4'} - setprototypeof@1.2.0: resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} @@ -7750,10 +7418,6 @@ packages: simple-plist@1.3.1: resolution: {integrity: sha512-iMSw5i0XseMnrhtIzRb7XpQEXepa9xhWxGUojHBL43SIpQuDQkh3Wpy67ZbDzZVr6EKxvwVChnVpdl8hEVLDiw==} - sirv@2.0.4: - resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} - engines: {node: '>= 10'} - sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} @@ -7777,6 +7441,10 @@ packages: resolution: {integrity: sha512-FC+lgizVPfie0kkhqUScwRu1O/lF6NOgJmlCgK+/LYxDCTk8sGelYaHDhFcDN+Sn3Cv+3VSa4Byeo+IMCzpMgQ==} engines: {node: '>=12'} + slice-ansi@7.1.2: + resolution: {integrity: sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==} + engines: {node: '>=18'} + slugify@1.6.6: resolution: {integrity: sha512-h+z7HKHYXj6wJU+AnS/+IH8Uh9fdcX1Lrhg1/VMdf9PwoBQXFcXiAdsy2tSK0P6gKwJLXp02r90ahUCqHk9rrw==} engines: {node: '>=8.0.0'} @@ -7826,18 +7494,6 @@ packages: spawn-command@0.0.2: resolution: {integrity: sha512-zC8zGoGkmc8J9ndvml8Xksr1Amk9qBujgbF0JAIWO7kXr43w0h/0GJNM/Vustixu+YE8N/MTrQ7N31FvHUACxQ==} - spdx-correct@3.2.0: - resolution: {integrity: sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==} - - spdx-exceptions@2.5.0: - resolution: {integrity: sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==} - - spdx-expression-parse@3.0.1: - resolution: {integrity: sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==} - - spdx-license-ids@3.0.22: - resolution: {integrity: sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==} - split-ca@1.0.1: resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} @@ -7854,6 +7510,9 @@ packages: sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + sprintf-js@1.1.3: + resolution: {integrity: sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==} + sql.js@1.13.0: resolution: {integrity: sha512-RJbVP1HRDlUUXahJ7VMTcu9Rm1Nzw+EBpoPr94vnbD4LwR715F3CcxE2G2k45PewcaZ57pjetYa+LoSJLAASgA==} @@ -7945,10 +7604,6 @@ packages: std-env@3.10.0: resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} - stop-iteration-iterator@1.1.0: - resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} - engines: {node: '>= 0.4'} - stream-buffers@2.2.0: resolution: {integrity: sha512-uyQK/mx5QjHun80FLJTfaWE7JtwfRMKBLkMne6udYOmvH0CawotVa7TfgYHzAnpphn4+TweIx1QKMnRIbipmUg==} engines: {node: '>= 0.10.0'} @@ -7960,6 +7615,10 @@ packages: resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==} engines: {node: '>=10.0.0'} + string-argv@0.3.2: + resolution: {integrity: sha512-aqD2Q0144Z+/RqG52NeHEkZauTAUWJO8c6yTftGJKO3Tja5tUgIfmIl6kExvhtxSDP7fXB6DvzkfMpCd/F3G+Q==} + engines: {node: '>=0.6.19'} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -7968,17 +7627,13 @@ packages: resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} engines: {node: '>=12'} - string.prototype.trim@1.2.10: - resolution: {integrity: sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==} - engines: {node: '>= 0.4'} - - string.prototype.trimend@1.0.9: - resolution: {integrity: sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==} - engines: {node: '>= 0.4'} + string-width@7.2.0: + resolution: {integrity: sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==} + engines: {node: '>=18'} - string.prototype.trimstart@1.0.8: - resolution: {integrity: sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==} - engines: {node: '>= 0.4'} + string-width@8.1.0: + resolution: {integrity: sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==} + engines: {node: '>=20'} string_decoder@1.3.0: resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} @@ -7995,18 +7650,10 @@ packages: resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==} engines: {node: '>=12'} - strip-bom@3.0.0: - resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} - engines: {node: '>=4'} - strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} - strip-indent@3.0.0: - resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} - engines: {node: '>=8'} - strip-json-comments@2.0.1: resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} engines: {node: '>=0.10.0'} @@ -8015,9 +7662,6 @@ packages: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} engines: {node: '>=8'} - strip-literal@3.1.0: - resolution: {integrity: sha512-8r3mkIM/2+PpjHoOtiAW8Rg3jJLHaV7xPwG+YRGrv6FP0wwk/toTpATxWYOW0BKdWwl82VT2tFYi5DlROa0Mxg==} - strnum@2.1.1: resolution: {integrity: sha512-7ZvoFTiCnGxBtDqJ//Cu6fWtZtc7Y3x+QOirG15wztbdngGSkht27o2pyGWrVy0b4WAy3jbKmnoK6g5VlVNUUw==} @@ -8034,10 +7678,6 @@ packages: engines: {node: '>=16 || 14 >=14.17'} hasBin: true - superjson@2.2.5: - resolution: {integrity: sha512-zWPTX96LVsA/eVYnqOM2+ofcdPqdS1dAF1LN4TS2/MWuUpfitd9ctTa87wt4xrYnZnkLtS69xpBdSxVBP5Rm6w==} - engines: {node: '>=16'} - supertap@3.0.1: resolution: {integrity: sha512-u1ZpIBCawJnO+0QePsEiOknOfCRq0yERxiAchT0i4li0WHNUJbf0evXXSXOcCAR4M8iMDoajXYmstm/qO81Isw==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -8066,10 +7706,6 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - synckit@0.11.11: - resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} - engines: {node: ^14.18.0 || >=16.0.0} - tar-fs@2.1.4: resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} @@ -8085,6 +7721,18 @@ packages: resolution: {integrity: sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==} engines: {node: '>=18'} + tarn@3.0.2: + resolution: {integrity: sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==} + engines: {node: '>=8.0.0'} + + tedious@18.6.2: + resolution: {integrity: sha512-g7jC56o3MzLkE3lHkaFe2ZdOVFBahq5bsB60/M4NYUbocw/MCrS89IOEQUFr+ba6pb8ZHczZ/VqCyYeYq0xBAg==} + engines: {node: '>=18'} + + tedious@19.1.3: + resolution: {integrity: sha512-6O6efTeYtcnar3Cqf/ptqJs+U10fYYjp/SHRNm3VGuCTUDys+AUgIbxWbT2kzl4baXAzuy9byV3qCgOimrRfTA==} + engines: {node: '>=18.17'} + temp-dir@2.0.0: resolution: {integrity: sha512-aoBAniQmmwtcKp/7BzsH8Cxzv8OL736p7v1ihGb5e9DJ9kTwGWHrQrVB5+lfVDzfGrdRzXch+ig7LHaY1JTOrg==} engines: {node: '>=8'} @@ -8129,10 +7777,6 @@ packages: resolution: {integrity: sha512-TIsDdtKo6+XrPtiTm1ssmMngN1sAhyKnTO2kunQWqNPWIVvCm15Wmw4SWInwTVgJ5u/Tr04+8Ei9TNcw4x4ONA==} engines: {node: '>=4'} - timers-ext@0.1.8: - resolution: {integrity: sha512-wFH7+SEAcKfJpfLPkrgMPvvwnEtj8W4IurvEyrKsDleXnKLCDw71w8jltvfLa8Rm4qQxxT4jmDBYbJG/z7qoww==} - engines: {node: '>=0.12'} - tiny-invariant@1.3.3: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} @@ -8149,16 +7793,8 @@ packages: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} - tinypool@1.1.1: - resolution: {integrity: sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==} - engines: {node: ^18.0.0 || >=20.0.0} - - tinyrainbow@2.0.0: - resolution: {integrity: sha512-op4nsTR47R6p0vMUUoYl/a+ljLFVtlfaXkLQmqfLR1qHma1h/ysYk4hEXZ880bf2CYgTskvTa/e196Vd5dDQXw==} - engines: {node: '>=14.0.0'} - - tinyspy@4.0.4: - resolution: {integrity: sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==} + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==} engines: {node: '>=14.0.0'} tmpl@1.0.5: @@ -8172,10 +7808,6 @@ packages: resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} engines: {node: '>=0.6'} - totalist@3.0.1: - resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} - engines: {node: '>=6'} - tr46@5.1.1: resolution: {integrity: sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==} engines: {node: '>=18'} @@ -8194,8 +7826,8 @@ packages: peerDependencies: typescript: '>=4.2.0' - ts-expose-internals-conditionally@1.0.0-empty.0: - resolution: {integrity: sha512-F8m9NOF6ZhdOClDVdlM8gj3fDCav4ZIFSs/EI3ksQbAAXVSCN/Jh5OCJDDZWBuBy9psFc6jULGDlPwjMYMhJDw==} + ts-expose-internals@5.6.3: + resolution: {integrity: sha512-reb+7TXGaC0odGjywnLocM4f2i8mBhSEjc3gnKqdM21wDy8FcGGVjKbtMNjn17hka34CrwvqNREs0R7CGIeH3w==} ts-interface-checker@0.1.13: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} @@ -8227,12 +7859,6 @@ packages: typescript: optional: true - tsconfig-paths@3.15.0: - resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} - - tslib@1.14.1: - resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} - tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} @@ -8255,12 +7881,6 @@ packages: typescript: optional: true - tsutils@3.21.0: - resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} - engines: {node: '>= 6'} - peerDependencies: - typescript: '>=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta' - tsx@3.14.0: resolution: {integrity: sha512-xHtFaKtHxM9LOklMmJdI3BEnQq/D5F73Of2E1GDrITi9sgoVkvIsrQUTY1G8FlmGtA+awCI4EBlTRRYxkL2sRg==} hasBin: true @@ -8330,46 +7950,14 @@ packages: resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} engines: {node: '>=10'} - type-fest@0.6.0: - resolution: {integrity: sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==} - engines: {node: '>=8'} - type-fest@0.7.1: resolution: {integrity: sha512-Ne2YiiGN8bmrmJJEuTWTLJR32nh/JdL1+PSicowtNb0WFpn59GK8/lfD61bVtzguz7b3PBt74nxpv/Pw5po5Rg==} engines: {node: '>=8'} - type-fest@0.8.1: - resolution: {integrity: sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==} - engines: {node: '>=8'} - type-is@2.0.1: resolution: {integrity: sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==} engines: {node: '>= 0.6'} - type@2.7.3: - resolution: {integrity: sha512-8j+1QmAbPvLZow5Qpi6NCaN8FB60p/6x8/vfNqOk/hC+HuvFZhL4+WfekuhQLiqFZXOgQdrs3B+XxEmCc6b3FQ==} - - typed-array-buffer@1.0.3: - resolution: {integrity: sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==} - engines: {node: '>= 0.4'} - - typed-array-byte-length@1.0.3: - resolution: {integrity: sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==} - engines: {node: '>= 0.4'} - - typed-array-byte-offset@1.0.4: - resolution: {integrity: sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==} - engines: {node: '>= 0.4'} - - typed-array-length@1.0.7: - resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} - engines: {node: '>= 0.4'} - - typescript@5.3.3: - resolution: {integrity: sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==} - engines: {node: '>=14.17'} - hasBin: true - typescript@5.6.1-rc: resolution: {integrity: sha512-E3b2+1zEFu84jB0YQi9BORDjz9+jGbwwy1Zi3G0LUNw7a7cePUrHMRNy8aPh53nXpkFGVHSxIZo5vKTfYaFiBQ==} engines: {node: '>=14.17'} @@ -8393,10 +7981,6 @@ packages: ufo@1.6.1: resolution: {integrity: sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==} - unbox-primitive@1.1.0: - resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} - engines: {node: '>= 0.4'} - uncrypto@0.1.3: resolution: {integrity: sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q==} @@ -8500,6 +8084,10 @@ packages: resolution: {integrity: sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==} hasBin: true + uuid@8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} + hasBin: true + uuid@9.0.1: resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} hasBin: true @@ -8520,9 +8108,6 @@ packages: typescript: optional: true - validate-npm-package-license@3.0.4: - resolution: {integrity: sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==} - validate-npm-package-name@4.0.0: resolution: {integrity: sha512-mzR0L8ZDktZjpX4OB46KT+56MAhl4EIazWP/+G/HPGuvfdaqg4YsCdtOm6U9+LOFyYDoh4dpnpxZRB9MQQns5Q==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} @@ -8535,11 +8120,6 @@ packages: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} - vite-node@3.2.4: - resolution: {integrity: sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==} - engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} - hasBin: true - vite-tsconfig-paths@4.3.2: resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8588,26 +8168,35 @@ packages: yaml: optional: true - vitest@3.2.4: - resolution: {integrity: sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==} - engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} + vitest@4.0.13: + resolution: {integrity: sha512-QSD4I0fN6uZQfftryIXuqvqgBxTvJ3ZNkF6RWECd82YGAYAfhcppBLFXzXJHQAAhVFyYEuFTrq6h0hQqjB7jIQ==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} hasBin: true peerDependencies: '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 '@types/debug': ^4.1.12 - '@types/node': ^18.0.0 || ^20.0.0 || >=22.0.0 - '@vitest/browser': 3.2.4 - '@vitest/ui': 3.2.4 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.13 + '@vitest/browser-preview': 4.0.13 + '@vitest/browser-webdriverio': 4.0.13 + '@vitest/ui': 4.0.13 happy-dom: '*' jsdom: '*' peerDependenciesMeta: '@edge-runtime/vm': optional: true + '@opentelemetry/api': + optional: true '@types/debug': optional: true '@types/node': optional: true - '@vitest/browser': + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': optional: true '@vitest/ui': optional: true @@ -8656,18 +8245,6 @@ packages: resolution: {integrity: sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==} engines: {node: '>=18'} - which-boxed-primitive@1.1.1: - resolution: {integrity: sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==} - engines: {node: '>= 0.4'} - - which-builtin-type@1.2.1: - resolution: {integrity: sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==} - engines: {node: '>= 0.4'} - - which-collection@1.0.2: - resolution: {integrity: sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==} - engines: {node: '>= 0.4'} - which-typed-array@1.1.19: resolution: {integrity: sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==} engines: {node: '>= 0.4'} @@ -8713,6 +8290,10 @@ packages: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} engines: {node: '>=12'} + wrap-ansi@9.0.2: + resolution: {integrity: sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==} + engines: {node: '>=18'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} @@ -8783,6 +8364,10 @@ packages: utf-8-validate: optional: true + wsl-utils@0.1.0: + resolution: {integrity: sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==} + engines: {node: '>=18'} + xcode@3.0.1: resolution: {integrity: sha512-kCz5k7J7XbJtjABOvkc5lJmkiDh8VhjVCGNiqdKCscmVpdVUpEAyXv1xmCLkQJ5dsHqx3IPO4XW+NTDhU/fatA==} engines: {node: '>=10.0.0'} @@ -8893,34 +8478,15 @@ snapshots: '@andrewbranch/untar.js@1.0.3': {} - '@arethetypeswrong/cli@0.15.3': - dependencies: - '@arethetypeswrong/core': 0.15.1 - chalk: 4.1.2 - cli-table3: 0.6.5 - commander: 10.0.1 - marked: 9.1.6 - marked-terminal: 6.2.0(marked@9.1.6) - semver: 7.7.3 - '@arethetypeswrong/cli@0.16.4': dependencies: '@arethetypeswrong/core': 0.16.4 chalk: 4.1.2 cli-table3: 0.6.5 commander: 10.0.1 - marked: 9.1.6 - marked-terminal: 7.3.0(marked@9.1.6) - semver: 7.7.3 - - '@arethetypeswrong/core@0.15.1': - dependencies: - '@andrewbranch/untar.js': 1.0.3 - fflate: 0.8.2 + marked: 9.1.2 + marked-terminal: 7.1.0(marked@9.1.2) semver: 7.7.3 - ts-expose-internals-conditionally: 1.0.0-empty.0 - typescript: 5.3.3 - validate-npm-package-name: 5.0.1 '@arethetypeswrong/core@0.16.4': dependencies: @@ -8932,16 +8498,16 @@ snapshots: typescript: 5.6.1-rc validate-npm-package-name: 5.0.1 - '@ark/attest@0.45.11(typescript@6.0.0-dev.20251126)': + '@ark/attest@0.45.11(typescript@5.9.2)': dependencies: '@ark/fs': 0.45.10 '@ark/util': 0.45.10 '@prettier/sync': 0.5.5(prettier@3.5.3) '@typescript/analyze-trace': 0.10.1 - '@typescript/vfs': 1.6.1(typescript@6.0.0-dev.20251126) + '@typescript/vfs': 1.6.1(typescript@5.9.2) arktype: 2.1.19 prettier: 3.5.3 - typescript: 6.0.0-dev.20251126 + typescript: 5.9.2 transitivePeerDependencies: - supports-color @@ -8969,16 +8535,16 @@ snapshots: '@ark/util@0.55.0': {} - '@arktype/attest@0.46.0(typescript@6.0.0-dev.20251126)': + '@arktype/attest@0.46.0(typescript@5.9.2)': dependencies: '@ark/fs': 0.46.0 '@ark/util': 0.46.0 '@prettier/sync': 0.5.5(prettier@3.5.3) '@typescript/analyze-trace': 0.10.1 - '@typescript/vfs': 1.6.1(typescript@6.0.0-dev.20251126) + '@typescript/vfs': 1.6.1(typescript@5.9.2) arktype: 2.1.20 prettier: 3.5.3 - typescript: 6.0.0-dev.20251126 + typescript: 5.9.2 transitivePeerDependencies: - supports-color @@ -9428,6 +8994,151 @@ snapshots: '@aws/lambda-invoke-store@0.2.1': {} + '@azure-rest/core-client@2.5.1': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-rest-pipeline': 1.22.2 + '@azure/core-tracing': 1.3.1 + '@typespec/ts-http-runtime': 0.3.2 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/abort-controller@2.1.2': + dependencies: + tslib: 2.8.1 + + '@azure/core-auth@1.10.1': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.13.1 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/core-client@1.10.1': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-rest-pipeline': 1.22.2 + '@azure/core-tracing': 1.3.1 + '@azure/core-util': 1.13.1 + '@azure/logger': 1.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/core-http-compat@2.3.1': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-client': 1.10.1 + '@azure/core-rest-pipeline': 1.22.2 + transitivePeerDependencies: + - supports-color + + '@azure/core-lro@2.7.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-util': 1.13.1 + '@azure/logger': 1.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/core-paging@1.6.2': + dependencies: + tslib: 2.8.1 + + '@azure/core-rest-pipeline@1.22.2': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-tracing': 1.3.1 + '@azure/core-util': 1.13.1 + '@azure/logger': 1.3.0 + '@typespec/ts-http-runtime': 0.3.2 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/core-tracing@1.3.1': + dependencies: + tslib: 2.8.1 + + '@azure/core-util@1.13.1': + dependencies: + '@azure/abort-controller': 2.1.2 + '@typespec/ts-http-runtime': 0.3.2 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/identity@4.13.0': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-client': 1.10.1 + '@azure/core-rest-pipeline': 1.22.2 + '@azure/core-tracing': 1.3.1 + '@azure/core-util': 1.13.1 + '@azure/logger': 1.3.0 + '@azure/msal-browser': 4.26.2 + '@azure/msal-node': 3.8.3 + open: 10.2.0 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/keyvault-common@2.0.0': + dependencies: + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-client': 1.10.1 + '@azure/core-rest-pipeline': 1.22.2 + '@azure/core-tracing': 1.3.1 + '@azure/core-util': 1.13.1 + '@azure/logger': 1.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/keyvault-keys@4.10.0': + dependencies: + '@azure-rest/core-client': 2.5.1 + '@azure/abort-controller': 2.1.2 + '@azure/core-auth': 1.10.1 + '@azure/core-http-compat': 2.3.1 + '@azure/core-lro': 2.7.2 + '@azure/core-paging': 1.6.2 + '@azure/core-rest-pipeline': 1.22.2 + '@azure/core-tracing': 1.3.1 + '@azure/core-util': 1.13.1 + '@azure/keyvault-common': 2.0.0 + '@azure/logger': 1.3.0 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/logger@1.3.0': + dependencies: + '@typespec/ts-http-runtime': 0.3.2 + tslib: 2.8.1 + transitivePeerDependencies: + - supports-color + + '@azure/msal-browser@4.26.2': + dependencies: + '@azure/msal-common': 15.13.2 + + '@azure/msal-common@15.13.2': {} + + '@azure/msal-node@3.8.3': + dependencies: + '@azure/msal-common': 15.13.2 + jsonwebtoken: 9.0.2 + uuid: 8.3.2 + '@babel/code-frame@7.10.4': dependencies: '@babel/highlight': 7.25.9 @@ -10035,6 +9746,8 @@ snapshots: '@balena/dockerignore@1.0.2': {} + '@braidai/lang@1.1.2': {} + '@cloudflare/workers-types@4.20251126.0': {} '@colors/colors@1.5.0': @@ -10060,30 +9773,36 @@ snapshots: dependencies: '@jridgewell/trace-mapping': 0.3.9 - '@dprint/darwin-arm64@0.46.3': + '@dprint/darwin-arm64@0.50.2': + optional: true + + '@dprint/darwin-x64@0.50.2': + optional: true + + '@dprint/linux-arm64-glibc@0.50.2': optional: true - '@dprint/darwin-x64@0.46.3': + '@dprint/linux-arm64-musl@0.50.2': optional: true - '@dprint/linux-arm64-glibc@0.46.3': + '@dprint/linux-riscv64-glibc@0.50.2': optional: true - '@dprint/linux-arm64-musl@0.46.3': + '@dprint/linux-x64-glibc@0.50.2': optional: true - '@dprint/linux-x64-glibc@0.46.3': + '@dprint/linux-x64-musl@0.50.2': optional: true - '@dprint/linux-x64-musl@0.46.3': + '@dprint/win32-arm64@0.50.2': optional: true - '@dprint/win32-x64@0.46.3': + '@dprint/win32-x64@0.50.2': optional: true '@drizzle-team/brocli@0.10.2': {} - '@drizzle-team/studio@0.0.5': {} + '@drizzle-team/brocli@0.11.0': {} '@electric-sql/pglite@0.2.12': {} @@ -10110,9 +9829,6 @@ snapshots: '@esbuild-kit/core-utils': 3.3.2 get-tsconfig: 4.13.0 - '@esbuild/aix-ppc64@0.19.12': - optional: true - '@esbuild/aix-ppc64@0.25.12': optional: true @@ -10122,9 +9838,6 @@ snapshots: '@esbuild/android-arm64@0.18.20': optional: true - '@esbuild/android-arm64@0.19.12': - optional: true - '@esbuild/android-arm64@0.25.12': optional: true @@ -10134,9 +9847,6 @@ snapshots: '@esbuild/android-arm@0.18.20': optional: true - '@esbuild/android-arm@0.19.12': - optional: true - '@esbuild/android-arm@0.25.12': optional: true @@ -10146,9 +9856,6 @@ snapshots: '@esbuild/android-x64@0.18.20': optional: true - '@esbuild/android-x64@0.19.12': - optional: true - '@esbuild/android-x64@0.25.12': optional: true @@ -10158,9 +9865,6 @@ snapshots: '@esbuild/darwin-arm64@0.18.20': optional: true - '@esbuild/darwin-arm64@0.19.12': - optional: true - '@esbuild/darwin-arm64@0.25.12': optional: true @@ -10170,9 +9874,6 @@ snapshots: '@esbuild/darwin-x64@0.18.20': optional: true - '@esbuild/darwin-x64@0.19.12': - optional: true - '@esbuild/darwin-x64@0.25.12': optional: true @@ -10182,9 +9883,6 @@ snapshots: '@esbuild/freebsd-arm64@0.18.20': optional: true - '@esbuild/freebsd-arm64@0.19.12': - optional: true - '@esbuild/freebsd-arm64@0.25.12': optional: true @@ -10194,9 +9892,6 @@ snapshots: '@esbuild/freebsd-x64@0.18.20': optional: true - '@esbuild/freebsd-x64@0.19.12': - optional: true - '@esbuild/freebsd-x64@0.25.12': optional: true @@ -10206,9 +9901,6 @@ snapshots: '@esbuild/linux-arm64@0.18.20': optional: true - '@esbuild/linux-arm64@0.19.12': - optional: true - '@esbuild/linux-arm64@0.25.12': optional: true @@ -10218,9 +9910,6 @@ snapshots: '@esbuild/linux-arm@0.18.20': optional: true - '@esbuild/linux-arm@0.19.12': - optional: true - '@esbuild/linux-arm@0.25.12': optional: true @@ -10230,9 +9919,6 @@ snapshots: '@esbuild/linux-ia32@0.18.20': optional: true - '@esbuild/linux-ia32@0.19.12': - optional: true - '@esbuild/linux-ia32@0.25.12': optional: true @@ -10245,9 +9931,6 @@ snapshots: '@esbuild/linux-loong64@0.18.20': optional: true - '@esbuild/linux-loong64@0.19.12': - optional: true - '@esbuild/linux-loong64@0.25.12': optional: true @@ -10257,9 +9940,6 @@ snapshots: '@esbuild/linux-mips64el@0.18.20': optional: true - '@esbuild/linux-mips64el@0.19.12': - optional: true - '@esbuild/linux-mips64el@0.25.12': optional: true @@ -10269,9 +9949,6 @@ snapshots: '@esbuild/linux-ppc64@0.18.20': optional: true - '@esbuild/linux-ppc64@0.19.12': - optional: true - '@esbuild/linux-ppc64@0.25.12': optional: true @@ -10281,9 +9958,6 @@ snapshots: '@esbuild/linux-riscv64@0.18.20': optional: true - '@esbuild/linux-riscv64@0.19.12': - optional: true - '@esbuild/linux-riscv64@0.25.12': optional: true @@ -10293,9 +9967,6 @@ snapshots: '@esbuild/linux-s390x@0.18.20': optional: true - '@esbuild/linux-s390x@0.19.12': - optional: true - '@esbuild/linux-s390x@0.25.12': optional: true @@ -10305,9 +9976,6 @@ snapshots: '@esbuild/linux-x64@0.18.20': optional: true - '@esbuild/linux-x64@0.19.12': - optional: true - '@esbuild/linux-x64@0.25.12': optional: true @@ -10323,9 +9991,6 @@ snapshots: '@esbuild/netbsd-x64@0.18.20': optional: true - '@esbuild/netbsd-x64@0.19.12': - optional: true - '@esbuild/netbsd-x64@0.25.12': optional: true @@ -10341,9 +10006,6 @@ snapshots: '@esbuild/openbsd-x64@0.18.20': optional: true - '@esbuild/openbsd-x64@0.19.12': - optional: true - '@esbuild/openbsd-x64@0.25.12': optional: true @@ -10359,9 +10021,6 @@ snapshots: '@esbuild/sunos-x64@0.18.20': optional: true - '@esbuild/sunos-x64@0.19.12': - optional: true - '@esbuild/sunos-x64@0.25.12': optional: true @@ -10371,9 +10030,6 @@ snapshots: '@esbuild/win32-arm64@0.18.20': optional: true - '@esbuild/win32-arm64@0.19.12': - optional: true - '@esbuild/win32-arm64@0.25.12': optional: true @@ -10383,9 +10039,6 @@ snapshots: '@esbuild/win32-ia32@0.18.20': optional: true - '@esbuild/win32-ia32@0.19.12': - optional: true - '@esbuild/win32-ia32@0.25.12': optional: true @@ -10395,9 +10048,6 @@ snapshots: '@esbuild/win32-x64@0.18.20': optional: true - '@esbuild/win32-x64@0.19.12': - optional: true - '@esbuild/win32-x64@0.25.12': optional: true @@ -10901,10 +10551,6 @@ snapshots: protobufjs: 7.5.4 yargs: 17.7.2 - '@hono/bun-compress@0.1.0(hono@4.10.7)': - dependencies: - hono: 4.10.7 - '@hono/node-server@1.19.6(hono@4.10.7)': dependencies: hono: 4.10.7 @@ -10967,14 +10613,14 @@ snapshots: dependencies: '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.19.25 + '@types/node': 24.10.1 jest-mock: 29.7.0 '@jest/fake-timers@29.7.0': dependencies: '@jest/types': 29.6.3 '@sinonjs/fake-timers': 10.3.0 - '@types/node': 20.19.25 + '@types/node': 24.10.1 jest-message-util: 29.7.0 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -11008,7 +10654,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 20.19.25 + '@types/node': 24.10.1 '@types/yargs': 17.0.35 chalk: 4.1.2 @@ -11041,8 +10687,14 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-joda/core@5.6.5': {} + '@js-sdsl/ordered-map@4.4.2': {} + '@js-temporal/polyfill@0.5.1': + dependencies: + jsbi: 4.3.2 + '@jsep-plugin/assignment@1.3.0(jsep@1.4.0)': dependencies: jsep: 1.4.0 @@ -11114,6 +10766,10 @@ snapshots: '@libsql/win32-x64-msvc@0.4.7': optional: true + '@loaderkit/resolve@1.0.4': + dependencies: + '@braidai/lang': 1.1.2 + '@miniflare/core@2.14.4': dependencies: '@iarna/toml': 2.2.5 @@ -11177,9 +10833,10 @@ snapshots: dependencies: '@types/pg': 8.6.6 - '@neondatabase/serverless@0.9.5': + '@neondatabase/serverless@1.0.2': dependencies: - '@types/pg': 8.11.6 + '@types/node': 22.19.1 + '@types/pg': 8.15.6 '@noble/hashes@1.8.0': {} @@ -11224,6 +10881,30 @@ snapshots: dependencies: esbuild: 0.14.54 + '@oxlint/darwin-arm64@1.30.0': + optional: true + + '@oxlint/darwin-x64@1.30.0': + optional: true + + '@oxlint/linux-arm64-gnu@1.30.0': + optional: true + + '@oxlint/linux-arm64-musl@1.30.0': + optional: true + + '@oxlint/linux-x64-gnu@1.30.0': + optional: true + + '@oxlint/linux-x64-musl@1.30.0': + optional: true + + '@oxlint/win32-arm64@1.30.0': + optional: true + + '@oxlint/win32-x64@1.30.0': + optional: true + '@paralleldrive/cuid2@2.3.1': dependencies: '@noble/hashes': 1.8.0 @@ -11233,12 +10914,8 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@pkgr/core@0.2.9': {} - '@planetscale/database@1.19.0': {} - '@polka/url@1.0.0-next.29': {} - '@prettier/sync@0.5.5(prettier@3.5.3)': dependencies: make-synchronized: 0.4.2 @@ -11470,11 +11147,11 @@ snapshots: optionalDependencies: rollup: 3.29.5 - '@rollup/plugin-typescript@11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@6.0.0-dev.20251126)': + '@rollup/plugin-typescript@11.1.6(rollup@3.29.5)(tslib@2.8.1)(typescript@5.9.2)': dependencies: '@rollup/pluginutils': 5.3.0(rollup@3.29.5) resolve: 1.22.11 - typescript: 6.0.0-dev.20251126 + typescript: 5.9.2 optionalDependencies: rollup: 3.29.5 tslib: 2.8.1 @@ -11553,8 +11230,6 @@ snapshots: '@rollup/rollup-win32-x64-msvc@4.53.3': optional: true - '@rtsao/scc@1.1.0': {} - '@sinclair/typebox@0.27.8': {} '@sinclair/typebox@0.34.41': {} @@ -11879,23 +11554,17 @@ snapshots: - supports-color - utf-8-validate + '@standard-schema/spec@1.0.0': {} + + '@tediousjs/connection-string@0.5.0': {} + + '@tediousjs/connection-string@0.6.0': {} + '@tidbcloud/serverless@0.1.1': {} '@tootallnate/once@1.1.2': optional: true - '@trivago/prettier-plugin-sort-imports@5.2.2(prettier@3.6.2)': - dependencies: - '@babel/generator': 7.28.5 - '@babel/parser': 7.28.5 - '@babel/traverse': 7.28.5 - '@babel/types': 7.28.5 - javascript-natural-sort: 0.7.1 - lodash: 4.17.21 - prettier: 3.6.2 - transitivePeerDependencies: - - supports-color - '@ts-morph/common@0.26.1': dependencies: fast-glob: 3.3.3 @@ -11975,7 +11644,7 @@ snapshots: '@types/better-sqlite3@7.6.13': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/braces@3.0.5': {} @@ -11992,13 +11661,13 @@ snapshots: '@types/docker-modem@3.0.6': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/ssh2': 1.15.5 '@types/dockerode@3.3.47': dependencies: '@types/docker-modem': 3.0.6 - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/ssh2': 1.15.5 '@types/emscripten@1.41.5': {} @@ -12008,16 +11677,16 @@ snapshots: '@types/fs-extra@11.0.4': dependencies: '@types/jsonfile': 6.1.4 - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/glob@8.1.0': dependencies: '@types/minimatch': 5.1.2 - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/graceful-fs@4.1.9': dependencies: - '@types/node': 20.19.25 + '@types/node': 24.10.1 '@types/istanbul-lib-coverage@2.0.6': {} @@ -12033,11 +11702,16 @@ snapshots: '@types/json-schema@7.0.15': {} - '@types/json5@0.0.29': {} - '@types/jsonfile@6.1.4': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 + + '@types/marked-terminal@3.1.3': + dependencies: + '@types/marked': 3.0.4 + chalk: 2.4.2 + + '@types/marked@3.0.4': {} '@types/micromatch@4.0.10': dependencies: @@ -12047,6 +11721,14 @@ snapshots: '@types/minimist@1.2.5': {} + '@types/mssql@9.1.8': + dependencies: + '@types/node': 24.10.1 + tarn: 3.0.2 + tedious: 19.1.3 + transitivePeerDependencies: + - supports-color + '@types/node@18.19.130': dependencies: undici-types: 5.26.5 @@ -12063,23 +11745,21 @@ snapshots: dependencies: undici-types: 7.16.0 - '@types/normalize-package-data@2.4.4': {} - '@types/pg@8.11.6': dependencies: - '@types/node': 18.19.130 + '@types/node': 20.19.25 pg-protocol: 1.10.3 pg-types: 4.1.0 '@types/pg@8.15.6': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 pg-protocol: 1.10.3 pg-types: 2.2.0 '@types/pg@8.6.6': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 pg-protocol: 1.10.3 pg-types: 2.2.0 @@ -12094,6 +11774,10 @@ snapshots: '@types/prop-types': 15.7.15 csstype: 3.2.3 + '@types/readable-stream@4.0.22': + dependencies: + '@types/node': 24.10.1 + '@types/retry@0.12.5': {} '@types/semver@7.7.1': {} @@ -12101,7 +11785,7 @@ snapshots: '@types/sql.js@1.4.9': dependencies: '@types/emscripten': 1.41.5 - '@types/node': 20.19.25 + '@types/node': 24.10.1 '@types/ssh2@1.15.5': dependencies: @@ -12113,11 +11797,13 @@ snapshots: '@types/uuid@9.0.8': {} + '@types/validate-npm-package-name@4.0.2': {} + '@types/which@3.0.4': {} '@types/ws@8.18.1': dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 '@types/yargs-parser@21.0.3': {} @@ -12125,83 +11811,24 @@ snapshots: dependencies: '@types/yargs-parser': 21.0.3 - '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/scope-manager': 6.21.0 - '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.3 - eslint: 8.57.1 - graphemer: 1.4.0 - ignore: 5.3.2 - natural-compare: 1.4.0 - semver: 7.7.3 - ts-api-utils: 1.4.3(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/eslint-plugin@7.18.0(@typescript-eslint/parser@7.18.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@eslint-community/regexpp': 4.12.2 - '@typescript-eslint/parser': 7.18.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/scope-manager': 7.18.0 - '@typescript-eslint/type-utils': 7.18.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/utils': 7.18.0(eslint@8.57.1)(typescript@5.9.2) - '@typescript-eslint/visitor-keys': 7.18.0 - eslint: 8.57.1 - graphemer: 1.4.0 - ignore: 5.3.2 - natural-compare: 1.4.0 - ts-api-utils: 1.4.3(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/experimental-utils@5.62.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@typescript-eslint/utils': 5.62.0(eslint@8.57.1)(typescript@5.9.2) - eslint: 8.57.1 - transitivePeerDependencies: - - supports-color - - typescript - - '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2)': + '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) '@typescript-eslint/visitor-keys': 6.21.0 debug: 4.4.3 eslint: 8.57.1 optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/parser@7.18.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@typescript-eslint/scope-manager': 7.18.0 - '@typescript-eslint/types': 7.18.0 - '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.9.2) - '@typescript-eslint/visitor-keys': 7.18.0 - debug: 4.4.3 - eslint: 8.57.1 - optionalDependencies: - typescript: 5.9.2 + typescript: 5.9.3 transitivePeerDependencies: - supports-color - '@typescript-eslint/rule-tester@6.21.0(@eslint/eslintrc@2.1.4)(eslint@8.57.1)(typescript@5.9.2)': + '@typescript-eslint/rule-tester@6.21.0(@eslint/eslintrc@2.1.4)(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@eslint/eslintrc': 2.1.4 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.3) ajv: 6.12.6 eslint: 8.57.1 lodash.merge: 4.6.2 @@ -12210,66 +11837,14 @@ snapshots: - supports-color - typescript - '@typescript-eslint/scope-manager@5.62.0': - dependencies: - '@typescript-eslint/types': 5.62.0 - '@typescript-eslint/visitor-keys': 5.62.0 - '@typescript-eslint/scope-manager@6.21.0': dependencies: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 - '@typescript-eslint/scope-manager@7.18.0': - dependencies: - '@typescript-eslint/types': 7.18.0 - '@typescript-eslint/visitor-keys': 7.18.0 - - '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - debug: 4.4.3 - eslint: 8.57.1 - ts-api-utils: 1.4.3(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/type-utils@7.18.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.9.2) - '@typescript-eslint/utils': 7.18.0(eslint@8.57.1)(typescript@5.9.2) - debug: 4.4.3 - eslint: 8.57.1 - ts-api-utils: 1.4.3(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/types@5.62.0': {} - '@typescript-eslint/types@6.21.0': {} - '@typescript-eslint/types@7.18.0': {} - - '@typescript-eslint/typescript-estree@5.62.0(typescript@5.9.2)': - dependencies: - '@typescript-eslint/types': 5.62.0 - '@typescript-eslint/visitor-keys': 5.62.0 - debug: 4.4.3 - globby: 11.1.0 - is-glob: 4.0.3 - semver: 7.7.3 - tsutils: 3.21.0(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/typescript-estree@6.21.0(typescript@5.9.2)': + '@typescript-eslint/typescript-estree@6.21.0(typescript@5.9.3)': dependencies: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 @@ -12278,82 +11853,31 @@ snapshots: is-glob: 4.0.3 minimatch: 9.0.3 semver: 7.7.3 - ts-api-utils: 1.4.3(typescript@5.9.2) - optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/typescript-estree@7.18.0(typescript@5.9.2)': - dependencies: - '@typescript-eslint/types': 7.18.0 - '@typescript-eslint/visitor-keys': 7.18.0 - debug: 4.4.3 - globby: 11.1.0 - is-glob: 4.0.3 - minimatch: 9.0.5 - semver: 7.7.3 - ts-api-utils: 1.4.3(typescript@5.9.2) + ts-api-utils: 1.4.3(typescript@5.9.3) optionalDependencies: - typescript: 5.9.2 - transitivePeerDependencies: - - supports-color - - '@typescript-eslint/utils@5.62.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) - '@types/json-schema': 7.0.15 - '@types/semver': 7.7.1 - '@typescript-eslint/scope-manager': 5.62.0 - '@typescript-eslint/types': 5.62.0 - '@typescript-eslint/typescript-estree': 5.62.0(typescript@5.9.2) - eslint: 8.57.1 - eslint-scope: 5.1.1 - semver: 7.7.3 + typescript: 5.9.3 transitivePeerDependencies: - supports-color - - typescript - '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.9.2)': + '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.9.3)': dependencies: '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) '@types/json-schema': 7.0.15 '@types/semver': 7.7.1 '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.3) eslint: 8.57.1 semver: 7.7.3 transitivePeerDependencies: - supports-color - typescript - '@typescript-eslint/utils@7.18.0(eslint@8.57.1)(typescript@5.9.2)': - dependencies: - '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) - '@typescript-eslint/scope-manager': 7.18.0 - '@typescript-eslint/types': 7.18.0 - '@typescript-eslint/typescript-estree': 7.18.0(typescript@5.9.2) - eslint: 8.57.1 - transitivePeerDependencies: - - supports-color - - typescript - - '@typescript-eslint/visitor-keys@5.62.0': - dependencies: - '@typescript-eslint/types': 5.62.0 - eslint-visitor-keys: 3.4.3 - '@typescript-eslint/visitor-keys@6.21.0': dependencies: '@typescript-eslint/types': 6.21.0 eslint-visitor-keys: 3.4.3 - '@typescript-eslint/visitor-keys@7.18.0': - dependencies: - '@typescript-eslint/types': 7.18.0 - eslint-visitor-keys: 3.4.3 - '@typescript/analyze-trace@0.10.1': dependencies: chalk: 4.1.2 @@ -12365,16 +11889,24 @@ snapshots: treeify: 1.1.0 yargs: 16.2.0 - '@typescript/vfs@1.6.1(typescript@6.0.0-dev.20251126)': + '@typescript/vfs@1.6.1(typescript@5.9.2)': dependencies: debug: 4.4.3 - typescript: 6.0.0-dev.20251126 + typescript: 5.9.2 + transitivePeerDependencies: + - supports-color + + '@typespec/ts-http-runtime@0.3.2': + dependencies: + http-proxy-agent: 7.0.2 + https-proxy-agent: 7.0.6 + tslib: 2.8.1 transitivePeerDependencies: - supports-color '@ungap/structured-clone@1.3.0': {} - '@upstash/redis@1.35.6': + '@upstash/redis@1.35.7': dependencies: uncrypto: 0.1.3 @@ -12397,101 +11929,53 @@ snapshots: utf-8-validate: 6.0.3 ws: 8.14.2(bufferutil@4.0.8)(utf-8-validate@6.0.3) - '@vitest/expect@3.2.4': + '@vitest/expect@4.0.13': dependencies: + '@standard-schema/spec': 1.0.0 '@types/chai': 5.2.3 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - tinyrainbow: 2.0.0 - - '@vitest/mocker@3.2.4(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.21 - optionalDependencies: - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - - '@vitest/mocker@3.2.4(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.21 - optionalDependencies: - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - - '@vitest/mocker@3.2.4(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.21 - optionalDependencies: - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - - '@vitest/mocker@3.2.4(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1))': - dependencies: - '@vitest/spy': 3.2.4 - estree-walker: 3.0.3 - magic-string: 0.30.21 - optionalDependencies: - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + '@vitest/spy': 4.0.13 + '@vitest/utils': 4.0.13 + chai: 6.2.1 + tinyrainbow: 3.0.3 - '@vitest/mocker@3.2.4(vite@7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1))': + '@vitest/mocker@4.0.13(vite@7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1))': dependencies: - '@vitest/spy': 3.2.4 + '@vitest/spy': 4.0.13 estree-walker: 3.0.3 magic-string: 0.30.21 optionalDependencies: - vite: 7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - '@vitest/pretty-format@3.2.4': + '@vitest/pretty-format@4.0.13': dependencies: - tinyrainbow: 2.0.0 + tinyrainbow: 3.0.3 - '@vitest/runner@3.2.4': + '@vitest/runner@4.0.13': dependencies: - '@vitest/utils': 3.2.4 + '@vitest/utils': 4.0.13 pathe: 2.0.3 - strip-literal: 3.1.0 - '@vitest/snapshot@3.2.4': + '@vitest/snapshot@4.0.13': dependencies: - '@vitest/pretty-format': 3.2.4 + '@vitest/pretty-format': 4.0.13 magic-string: 0.30.21 pathe: 2.0.3 - '@vitest/spy@3.2.4': - dependencies: - tinyspy: 4.0.4 - - '@vitest/ui@1.6.1(vitest@3.2.4)': - dependencies: - '@vitest/utils': 1.6.1 - fast-glob: 3.3.3 - fflate: 0.8.2 - flatted: 3.3.3 - pathe: 1.1.2 - picocolors: 1.1.1 - sirv: 2.0.4 - vitest: 3.2.4(@types/node@20.19.25)(@vitest/ui@1.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + '@vitest/spy@4.0.13': {} - '@vitest/utils@1.6.1': + '@vitest/utils@4.0.13': dependencies: - diff-sequences: 29.6.3 - estree-walker: 3.0.3 - loupe: 2.3.7 - pretty-format: 29.7.0 + '@vitest/pretty-format': 4.0.13 + tinyrainbow: 3.0.3 - '@vitest/utils@3.2.4': + '@xata.io/client@0.29.5(typescript@5.9.2)': dependencies: - '@vitest/pretty-format': 3.2.4 - loupe: 3.2.1 - tinyrainbow: 2.0.0 + typescript: 5.9.2 - '@xata.io/client@0.29.5(typescript@6.0.0-dev.20251126)': + '@xata.io/client@0.29.5(typescript@5.9.3)': dependencies: - typescript: 6.0.0-dev.20251126 + typescript: 5.9.3 + optional: true '@xmldom/xmldom@0.8.11': {} @@ -12566,8 +12050,6 @@ snapshots: dependencies: type-fest: 0.21.3 - ansi-escapes@6.2.1: {} - ansi-escapes@7.2.0: dependencies: environment: 1.1.0 @@ -12590,8 +12072,6 @@ snapshots: ansi-styles@6.2.3: {} - ansicolors@0.3.2: {} - any-promise@1.3.0: {} anymatch@3.1.3: @@ -12640,60 +12120,10 @@ snapshots: '@ark/util': 0.55.0 arkregex: 0.0.3 - array-buffer-byte-length@1.0.2: - dependencies: - call-bound: 1.0.4 - is-array-buffer: 3.0.5 - array-find-index@1.0.2: {} - array-includes@3.1.9: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-object-atoms: 1.1.1 - get-intrinsic: 1.3.0 - is-string: 1.1.1 - math-intrinsics: 1.1.0 - array-union@2.1.0: {} - array.prototype.findlastindex@1.2.6: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-errors: 1.3.0 - es-object-atoms: 1.1.1 - es-shim-unscopables: 1.1.0 - - array.prototype.flat@1.3.3: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-shim-unscopables: 1.1.0 - - array.prototype.flatmap@1.3.3: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-shim-unscopables: 1.1.0 - - arraybuffer.prototype.slice@1.0.4: - dependencies: - array-buffer-byte-length: 1.0.2 - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-errors: 1.3.0 - get-intrinsic: 1.3.0 - is-array-buffer: 3.0.5 - arrgv@1.0.2: {} arrify@3.0.0: {} @@ -12710,8 +12140,6 @@ snapshots: dependencies: tslib: 2.8.1 - async-function@1.0.0: {} - async-limiter@1.0.1: {} async-retry@1.3.3: @@ -12984,6 +12412,13 @@ snapshots: inherits: 2.0.4 readable-stream: 3.6.2 + bl@6.1.5: + dependencies: + '@types/readable-stream': 4.0.22 + buffer: 6.0.3 + inherits: 2.0.4 + readable-stream: 4.7.0 + blueimp-md5@2.19.0: {} body-parser@2.2.1: @@ -13039,6 +12474,8 @@ snapshots: dependencies: node-int64: 0.4.0 + buffer-equal-constant-time@1.0.1: {} + buffer-from@1.1.2: {} buffer@4.9.2: @@ -13064,15 +12501,17 @@ snapshots: buildcheck@0.0.7: optional: true - builtin-modules@3.3.0: {} - builtins@5.1.0: dependencies: semver: 7.7.3 bun-types@1.3.3: dependencies: - '@types/node': 18.19.130 + '@types/node': 24.10.1 + + bundle-name@4.1.0: + dependencies: + run-applescript: 7.1.0 bundle-require@5.1.0(esbuild@0.27.0): dependencies: @@ -13140,11 +12579,6 @@ snapshots: caniuse-lite@1.0.30001757: {} - cardinal@2.1.1: - dependencies: - ansicolors: 0.3.2 - redeyed: 2.1.1 - cbor@8.1.0: dependencies: nofilter: 3.1.0 @@ -13157,6 +12591,8 @@ snapshots: loupe: 3.2.1 pathval: 2.0.1 + chai@6.2.1: {} + chalk@2.4.2: dependencies: ansi-styles: 3.2.1 @@ -13198,7 +12634,7 @@ snapshots: chrome-launcher@0.15.2: dependencies: - '@types/node': 20.19.25 + '@types/node': 24.10.1 escape-string-regexp: 4.0.0 is-wsl: 2.2.0 lighthouse-logger: 1.4.2 @@ -13207,7 +12643,7 @@ snapshots: chromium-edge-launcher@0.2.0: dependencies: - '@types/node': 20.19.25 + '@types/node': 24.10.1 escape-string-regexp: 4.0.0 is-wsl: 2.2.0 lighthouse-logger: 1.4.2 @@ -13226,10 +12662,6 @@ snapshots: cjs-module-lexer@1.4.3: {} - clean-regexp@1.0.0: - dependencies: - escape-string-regexp: 1.0.5 - clean-stack@2.2.0: optional: true @@ -13239,18 +12671,14 @@ snapshots: clean-yaml-object@0.1.0: {} - cli-color@2.0.4: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - es6-iterator: 2.0.3 - memoizee: 0.4.17 - timers-ext: 0.1.8 - cli-cursor@2.1.0: dependencies: restore-cursor: 2.0.0 + cli-cursor@5.0.0: + dependencies: + restore-cursor: 5.1.0 + cli-highlight@2.1.11: dependencies: chalk: 4.1.2 @@ -13273,6 +12701,11 @@ snapshots: slice-ansi: 5.0.0 string-width: 5.1.2 + cli-truncate@5.1.1: + dependencies: + slice-ansi: 7.1.2 + string-width: 8.1.0 + cliui@7.0.4: dependencies: string-width: 4.2.3 @@ -13310,20 +12743,24 @@ snapshots: color-support@1.1.3: optional: true + colorette@2.0.20: {} + colors@1.4.0: {} commander@10.0.1: {} + commander@11.1.0: {} + commander@12.1.0: {} + commander@14.0.2: {} + commander@2.20.3: {} commander@4.1.1: {} commander@7.2.0: {} - commander@9.5.0: {} - common-path-prefix@3.0.0: {} compressible@2.0.18: @@ -13395,10 +12832,6 @@ snapshots: cookie@0.7.2: {} - copy-anything@4.0.5: - dependencies: - is-what: 5.5.0 - copy-file@11.1.0: dependencies: graceful-fs: 4.2.11 @@ -13470,31 +12903,8 @@ snapshots: dependencies: array-find-index: 1.0.2 - d@1.0.2: - dependencies: - es5-ext: 0.10.64 - type: 2.7.3 - data-uri-to-buffer@4.0.1: {} - data-view-buffer@1.0.2: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - is-data-view: 1.0.2 - - data-view-byte-length@1.0.2: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - is-data-view: 1.0.2 - - data-view-byte-offset@1.0.1: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - is-data-view: 1.0.2 - date-fns@2.30.0: dependencies: '@babel/runtime': 7.28.4 @@ -13531,6 +12941,13 @@ snapshots: deepmerge@4.3.1: {} + default-browser-id@5.0.1: {} + + default-browser@5.4.0: + dependencies: + bundle-name: 4.1.0 + default-browser-id: 5.0.1 + defaults@1.0.4: dependencies: clone: 1.0.4 @@ -13543,11 +12960,7 @@ snapshots: define-lazy-prop@2.0.0: {} - define-properties@1.2.1: - dependencies: - define-data-property: 1.1.4 - has-property-descriptors: 1.0.2 - object-keys: 1.1.1 + define-lazy-prop@3.0.0: {} delegates@1.0.0: optional: true @@ -13564,16 +12977,10 @@ snapshots: detect-libc@2.1.2: {} - diff-sequences@29.6.3: {} - diff@4.0.2: {} diff@5.2.0: {} - difflib@0.2.4: - dependencies: - heap: 0.2.7 - dir-glob@3.0.1: dependencies: path-type: 4.0.0 @@ -13599,17 +13006,13 @@ snapshots: transitivePeerDependencies: - supports-color - doctrine@2.1.0: - dependencies: - esutils: 2.0.3 - doctrine@3.0.0: dependencies: esutils: 2.0.3 dotenv-expand@11.0.7: dependencies: - dotenv: 16.4.7 + dotenv: 16.6.1 dotenv@10.0.0: {} @@ -13617,52 +13020,37 @@ snapshots: dotenv@16.6.1: {} - dprint@0.46.3: + dprint@0.50.2: optionalDependencies: - '@dprint/darwin-arm64': 0.46.3 - '@dprint/darwin-x64': 0.46.3 - '@dprint/linux-arm64-glibc': 0.46.3 - '@dprint/linux-arm64-musl': 0.46.3 - '@dprint/linux-x64-glibc': 0.46.3 - '@dprint/linux-x64-musl': 0.46.3 - '@dprint/win32-x64': 0.46.3 + '@dprint/darwin-arm64': 0.50.2 + '@dprint/darwin-x64': 0.50.2 + '@dprint/linux-arm64-glibc': 0.50.2 + '@dprint/linux-arm64-musl': 0.50.2 + '@dprint/linux-riscv64-glibc': 0.50.2 + '@dprint/linux-x64-glibc': 0.50.2 + '@dprint/linux-x64-musl': 0.50.2 + '@dprint/win32-arm64': 0.50.2 + '@dprint/win32-x64': 0.50.2 dreamopt@0.8.0: dependencies: wordwrap: 1.0.0 - drizzle-kit@0.19.13: - dependencies: - '@drizzle-team/studio': 0.0.5 - '@esbuild-kit/esm-loader': 2.6.5 - camelcase: 7.0.1 - chalk: 5.6.2 - commander: 9.5.0 - esbuild: 0.18.20 - esbuild-register: 3.6.0(esbuild@0.18.20) - glob: 8.1.0 - hanji: 0.0.5 - json-diff: 0.9.0 - minimatch: 7.4.6 - zod: 3.25.1 - transitivePeerDependencies: - - supports-color - - drizzle-kit@0.25.0-b1faa33: + drizzle-kit@0.31.7: dependencies: '@drizzle-team/brocli': 0.10.2 '@esbuild-kit/esm-loader': 2.6.5 - esbuild: 0.19.12 - esbuild-register: 3.6.0(esbuild@0.19.12) + esbuild: 0.25.12 + esbuild-register: 3.6.0(esbuild@0.25.12) transitivePeerDependencies: - supports-color - drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.10.0)(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.9.1)(bun-types@1.3.3)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(sql.js@1.13.0)(sqlite3@5.1.7): + drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.9.1)(bun-types@1.3.3)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(sql.js@1.13.0)(sqlite3@5.1.7): optionalDependencies: '@aws-sdk/client-rds-data': 3.940.0 '@cloudflare/workers-types': 4.20251126.0 '@libsql/client': 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) - '@neondatabase/serverless': 0.10.0 + '@neondatabase/serverless': 1.0.2 '@opentelemetry/api': 1.9.0 '@planetscale/database': 1.19.0 '@types/better-sqlite3': 7.6.13 @@ -13677,14 +13065,14 @@ snapshots: sql.js: 1.13.0 sqlite3: 5.1.7 - drizzle-orm@1.0.0-beta.1-c0277c0(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@0.10.0)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@6.0.0-dev.20251126))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7): + drizzle-orm@0.44.1(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@upstash/redis@1.35.7)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@5.9.3))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7): optionalDependencies: '@aws-sdk/client-rds-data': 3.940.0 '@cloudflare/workers-types': 4.20251126.0 '@electric-sql/pglite': 0.2.12 '@libsql/client': 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) '@libsql/client-wasm': 0.10.0 - '@neondatabase/serverless': 0.10.0 + '@neondatabase/serverless': 1.0.2 '@op-engineering/op-sqlite': 2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) '@opentelemetry/api': 1.9.0 '@planetscale/database': 1.19.0 @@ -13693,8 +13081,38 @@ snapshots: '@types/better-sqlite3': 7.6.13 '@types/pg': 8.15.6 '@types/sql.js': 1.4.9 + '@upstash/redis': 1.35.7 '@vercel/postgres': 0.8.0 - '@xata.io/client': 0.29.5(typescript@6.0.0-dev.20251126) + '@xata.io/client': 0.29.5(typescript@5.9.3) + better-sqlite3: 11.9.1 + bun-types: 1.3.3 + expo-sqlite: 14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)) + gel: 2.2.0 + mysql2: 3.14.1 + pg: 8.16.3 + postgres: 3.4.7 + prisma: 5.14.0 + sql.js: 1.13.0 + sqlite3: 5.1.7 + + drizzle-orm@1.0.0-beta.1-c0277c0(@aws-sdk/client-rds-data@3.940.0)(@cloudflare/workers-types@4.20251126.0)(@electric-sql/pglite@0.2.12)(@libsql/client-wasm@0.10.0)(@libsql/client@0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3))(@neondatabase/serverless@1.0.2)(@op-engineering/op-sqlite@2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1))(@opentelemetry/api@1.9.0)(@planetscale/database@1.19.0)(@prisma/client@5.14.0(prisma@5.14.0))(@tidbcloud/serverless@0.1.1)(@types/better-sqlite3@7.6.13)(@types/pg@8.15.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(@xata.io/client@0.29.5(typescript@5.9.2))(better-sqlite3@11.9.1)(bun-types@1.3.3)(expo-sqlite@14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)))(gel@2.2.0)(mysql2@3.14.1)(pg@8.16.3)(postgres@3.4.7)(prisma@5.14.0)(sql.js@1.13.0)(sqlite3@5.1.7): + optionalDependencies: + '@aws-sdk/client-rds-data': 3.940.0 + '@cloudflare/workers-types': 4.20251126.0 + '@electric-sql/pglite': 0.2.12 + '@libsql/client': 0.10.0(bufferutil@4.0.8)(utf-8-validate@6.0.3) + '@libsql/client-wasm': 0.10.0 + '@neondatabase/serverless': 1.0.2 + '@op-engineering/op-sqlite': 2.0.22(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1) + '@opentelemetry/api': 1.9.0 + '@planetscale/database': 1.19.0 + '@prisma/client': 5.14.0(prisma@5.14.0) + '@tidbcloud/serverless': 0.1.1 + '@types/better-sqlite3': 7.6.13 + '@types/pg': 8.15.6 + '@types/sql.js': 1.4.9 + '@vercel/postgres': 0.8.0 + '@xata.io/client': 0.29.5(typescript@5.9.2) better-sqlite3: 11.9.1 bun-types: 1.3.3 expo-sqlite: 14.0.6(expo@54.0.25(@babel/core@7.28.5)(bufferutil@4.0.8)(react-native@0.82.1(@babel/core@7.28.5)(bufferutil@4.0.8)(react@18.3.1)(utf-8-validate@6.0.3))(react@18.3.1)(utf-8-validate@6.0.3)) @@ -13720,12 +13138,18 @@ snapshots: eastasianwidth@0.2.0: {} + ecdsa-sig-formatter@1.0.11: + dependencies: + safe-buffer: 5.2.1 + ee-first@1.1.1: {} electron-to-chromium@1.5.260: {} emittery@1.2.0: {} + emoji-regex@10.6.0: {} + emoji-regex@8.0.0: {} emoji-regex@9.2.2: {} @@ -13771,71 +13195,10 @@ snapshots: err-code@2.0.3: optional: true - error-ex@1.3.4: - dependencies: - is-arrayish: 0.2.1 - error-stack-parser@2.1.4: dependencies: stackframe: 1.3.4 - es-abstract@1.24.0: - dependencies: - array-buffer-byte-length: 1.0.2 - arraybuffer.prototype.slice: 1.0.4 - available-typed-arrays: 1.0.7 - call-bind: 1.0.8 - call-bound: 1.0.4 - data-view-buffer: 1.0.2 - data-view-byte-length: 1.0.2 - data-view-byte-offset: 1.0.1 - es-define-property: 1.0.1 - es-errors: 1.3.0 - es-object-atoms: 1.1.1 - es-set-tostringtag: 2.1.0 - es-to-primitive: 1.3.0 - function.prototype.name: 1.1.8 - get-intrinsic: 1.3.0 - get-proto: 1.0.1 - get-symbol-description: 1.1.0 - globalthis: 1.0.4 - gopd: 1.2.0 - has-property-descriptors: 1.0.2 - has-proto: 1.2.0 - has-symbols: 1.1.0 - hasown: 2.0.2 - internal-slot: 1.1.0 - is-array-buffer: 3.0.5 - is-callable: 1.2.7 - is-data-view: 1.0.2 - is-negative-zero: 2.0.3 - is-regex: 1.2.1 - is-set: 2.0.3 - is-shared-array-buffer: 1.0.4 - is-string: 1.1.1 - is-typed-array: 1.1.15 - is-weakref: 1.1.1 - math-intrinsics: 1.1.0 - object-inspect: 1.13.4 - object-keys: 1.1.1 - object.assign: 4.1.7 - own-keys: 1.0.1 - regexp.prototype.flags: 1.5.4 - safe-array-concat: 1.1.3 - safe-push-apply: 1.0.0 - safe-regex-test: 1.1.0 - set-proto: 1.0.0 - stop-iteration-iterator: 1.1.0 - string.prototype.trim: 1.2.10 - string.prototype.trimend: 1.0.9 - string.prototype.trimstart: 1.0.8 - typed-array-buffer: 1.0.3 - typed-array-byte-length: 1.0.3 - typed-array-byte-offset: 1.0.4 - typed-array-length: 1.0.7 - unbox-primitive: 1.1.0 - which-typed-array: 1.1.19 - es-define-property@1.0.1: {} es-errors@1.3.0: {} @@ -13846,48 +13209,6 @@ snapshots: dependencies: es-errors: 1.3.0 - es-set-tostringtag@2.1.0: - dependencies: - es-errors: 1.3.0 - get-intrinsic: 1.3.0 - has-tostringtag: 1.0.2 - hasown: 2.0.2 - - es-shim-unscopables@1.1.0: - dependencies: - hasown: 2.0.2 - - es-to-primitive@1.3.0: - dependencies: - is-callable: 1.2.7 - is-date-object: 1.1.0 - is-symbol: 1.1.1 - - es5-ext@0.10.64: - dependencies: - es6-iterator: 2.0.3 - es6-symbol: 3.1.4 - esniff: 2.0.1 - next-tick: 1.1.0 - - es6-iterator@2.0.3: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - es6-symbol: 3.1.4 - - es6-symbol@3.1.4: - dependencies: - d: 1.0.2 - ext: 1.7.0 - - es6-weak-map@2.0.3: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - es6-iterator: 2.0.3 - es6-symbol: 3.1.4 - esbuild-android-64@0.14.54: optional: true @@ -13941,20 +13262,6 @@ snapshots: esbuild-openbsd-64@0.14.54: optional: true - esbuild-register@3.6.0(esbuild@0.18.20): - dependencies: - debug: 4.4.3 - esbuild: 0.18.20 - transitivePeerDependencies: - - supports-color - - esbuild-register@3.6.0(esbuild@0.19.12): - dependencies: - debug: 4.4.3 - esbuild: 0.19.12 - transitivePeerDependencies: - - supports-color - esbuild-register@3.6.0(esbuild@0.25.12): dependencies: debug: 4.4.3 @@ -14023,32 +13330,6 @@ snapshots: '@esbuild/win32-ia32': 0.18.20 '@esbuild/win32-x64': 0.18.20 - esbuild@0.19.12: - optionalDependencies: - '@esbuild/aix-ppc64': 0.19.12 - '@esbuild/android-arm': 0.19.12 - '@esbuild/android-arm64': 0.19.12 - '@esbuild/android-x64': 0.19.12 - '@esbuild/darwin-arm64': 0.19.12 - '@esbuild/darwin-x64': 0.19.12 - '@esbuild/freebsd-arm64': 0.19.12 - '@esbuild/freebsd-x64': 0.19.12 - '@esbuild/linux-arm': 0.19.12 - '@esbuild/linux-arm64': 0.19.12 - '@esbuild/linux-ia32': 0.19.12 - '@esbuild/linux-loong64': 0.19.12 - '@esbuild/linux-mips64el': 0.19.12 - '@esbuild/linux-ppc64': 0.19.12 - '@esbuild/linux-riscv64': 0.19.12 - '@esbuild/linux-s390x': 0.19.12 - '@esbuild/linux-x64': 0.19.12 - '@esbuild/netbsd-x64': 0.19.12 - '@esbuild/openbsd-x64': 0.19.12 - '@esbuild/sunos-x64': 0.19.12 - '@esbuild/win32-arm64': 0.19.12 - '@esbuild/win32-ia32': 0.19.12 - '@esbuild/win32-x64': 0.19.12 - esbuild@0.25.12: optionalDependencies: '@esbuild/aix-ppc64': 0.25.12 @@ -14119,101 +13400,6 @@ snapshots: escape-string-regexp@5.0.0: {} - eslint-config-prettier@9.1.2(eslint@8.57.1): - dependencies: - eslint: 8.57.1 - - eslint-import-resolver-node@0.3.9: - dependencies: - debug: 3.2.7 - is-core-module: 2.16.1 - resolve: 1.22.11 - transitivePeerDependencies: - - supports-color - - eslint-module-utils@2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint-import-resolver-node@0.3.9)(eslint@8.57.1): - dependencies: - debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - eslint: 8.57.1 - eslint-import-resolver-node: 0.3.9 - transitivePeerDependencies: - - supports-color - - eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1): - dependencies: - '@rtsao/scc': 1.1.0 - array-includes: 3.1.9 - array.prototype.findlastindex: 1.2.6 - array.prototype.flat: 1.3.3 - array.prototype.flatmap: 1.3.3 - debug: 3.2.7 - doctrine: 2.1.0 - eslint: 8.57.1 - eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.1(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint-import-resolver-node@0.3.9)(eslint@8.57.1) - hasown: 2.0.2 - is-core-module: 2.16.1 - is-glob: 4.0.3 - minimatch: 3.1.2 - object.fromentries: 2.0.8 - object.groupby: 1.0.3 - object.values: 1.2.1 - semver: 6.3.1 - string.prototype.trimend: 1.0.9 - tsconfig-paths: 3.15.0 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) - transitivePeerDependencies: - - eslint-import-resolver-typescript - - eslint-import-resolver-webpack - - supports-color - - eslint-plugin-no-instanceof@1.0.1: {} - - eslint-plugin-prettier@5.5.4(eslint-config-prettier@9.1.2(eslint@8.57.1))(eslint@8.57.1)(prettier@3.6.2): - dependencies: - eslint: 8.57.1 - prettier: 3.6.2 - prettier-linter-helpers: 1.0.0 - synckit: 0.11.11 - optionalDependencies: - eslint-config-prettier: 9.1.2(eslint@8.57.1) - - eslint-plugin-unicorn@48.0.1(eslint@8.57.1): - dependencies: - '@babel/helper-validator-identifier': 7.28.5 - '@eslint-community/eslint-utils': 4.9.0(eslint@8.57.1) - ci-info: 3.9.0 - clean-regexp: 1.0.0 - eslint: 8.57.1 - esquery: 1.6.0 - indent-string: 4.0.0 - is-builtin-module: 3.2.1 - jsesc: 3.1.0 - lodash: 4.17.21 - pluralize: 8.0.0 - read-pkg-up: 7.0.1 - regexp-tree: 0.1.27 - regjsparser: 0.10.0 - semver: 7.7.3 - strip-indent: 3.0.0 - - eslint-plugin-unused-imports@3.2.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1): - dependencies: - eslint: 8.57.1 - eslint-rule-composer: 0.3.0 - optionalDependencies: - '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2))(eslint@8.57.1)(typescript@5.9.2) - - eslint-rule-composer@0.3.0: {} - - eslint-scope@5.1.1: - dependencies: - esrecurse: 4.3.0 - estraverse: 4.3.0 - eslint-scope@7.2.2: dependencies: esrecurse: 4.3.0 @@ -14264,13 +13450,6 @@ snapshots: transitivePeerDependencies: - supports-color - esniff@2.0.1: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - event-emitter: 0.3.5 - type: 2.7.3 - espree@9.6.1: dependencies: acorn: 8.15.0 @@ -14287,8 +13466,6 @@ snapshots: dependencies: estraverse: 5.3.0 - estraverse@4.3.0: {} - estraverse@5.3.0: {} estree-walker@2.0.2: {} @@ -14301,11 +13478,6 @@ snapshots: etag@1.8.1: {} - event-emitter@0.3.5: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - event-stream@3.3.4: dependencies: duplexer: 0.1.2 @@ -14326,6 +13498,8 @@ snapshots: events@1.1.1: {} + events@3.3.0: {} + eventsource-parser@3.0.6: {} eventsource@3.0.7: @@ -14570,10 +13744,6 @@ snapshots: transitivePeerDependencies: - supports-color - ext@1.7.0: - dependencies: - type: 2.7.3 - fast-deep-equal@3.1.3: {} fast-diff@1.3.0: {} @@ -14736,17 +13906,6 @@ snapshots: function-bind@1.1.2: {} - function.prototype.name@1.1.8: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - functions-have-names: 1.2.3 - hasown: 2.0.2 - is-callable: 1.2.7 - - functions-have-names@1.2.3: {} - fx@39.2.0: {} gauge@4.0.4: @@ -14782,7 +13941,7 @@ snapshots: get-caller-file@2.0.5: {} - get-func-name@2.0.2: {} + get-east-asian-width@1.4.0: {} get-intrinsic@1.3.0: dependencies: @@ -14810,12 +13969,6 @@ snapshots: get-stream@6.0.1: {} - get-symbol-description@1.1.0: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - get-intrinsic: 1.3.0 - get-tsconfig@4.13.0: dependencies: resolve-pkg-maps: 1.0.0 @@ -14875,11 +14028,6 @@ snapshots: dependencies: type-fest: 0.20.2 - globalthis@1.0.4: - dependencies: - define-properties: 1.2.1 - gopd: 1.2.0 - globby@11.1.0: dependencies: array-union: 2.1.0 @@ -14919,8 +14067,6 @@ snapshots: lodash.throttle: 4.1.1 sisteransi: 1.0.5 - has-bigints@1.1.0: {} - has-flag@3.0.0: {} has-flag@4.0.0: {} @@ -14929,10 +14075,6 @@ snapshots: dependencies: es-define-property: 1.0.1 - has-proto@1.2.0: - dependencies: - dunder-proto: 1.0.1 - has-symbols@1.1.0: {} has-tostringtag@1.0.2: @@ -14970,8 +14112,6 @@ snapshots: hono@4.7.4: {} - hosted-git-info@2.8.9: {} - hosted-git-info@7.0.2: dependencies: lru-cache: 10.4.3 @@ -15004,6 +14144,13 @@ snapshots: - supports-color optional: true + http-proxy-agent@7.0.2: + dependencies: + agent-base: 7.1.4 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 @@ -15026,6 +14173,8 @@ snapshots: ms: 2.1.3 optional: true + husky@9.1.7: {} + iconv-lite@0.6.3: dependencies: safer-buffer: 2.1.2 @@ -15064,7 +14213,8 @@ snapshots: imurmurhash@0.1.4: {} - indent-string@4.0.0: {} + indent-string@4.0.0: + optional: true indent-string@5.0.0: {} @@ -15080,12 +14230,6 @@ snapshots: ini@1.3.8: {} - internal-slot@1.1.0: - dependencies: - es-errors: 1.3.0 - hasown: 2.0.2 - side-channel: 1.1.0 - invariant@2.2.4: dependencies: loose-envify: 1.4.0 @@ -15102,70 +14246,32 @@ snapshots: call-bound: 1.0.4 has-tostringtag: 1.0.2 - is-array-buffer@3.0.5: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - get-intrinsic: 1.3.0 - - is-arrayish@0.2.1: {} - - is-async-function@2.1.1: - dependencies: - async-function: 1.0.0 - call-bound: 1.0.4 - get-proto: 1.0.1 - has-tostringtag: 1.0.2 - safe-regex-test: 1.1.0 - - is-bigint@1.1.0: - dependencies: - has-bigints: 1.1.0 - is-binary-path@2.1.0: dependencies: binary-extensions: 2.3.0 - is-boolean-object@1.2.2: - dependencies: - call-bound: 1.0.4 - has-tostringtag: 1.0.2 - - is-builtin-module@3.2.1: - dependencies: - builtin-modules: 3.3.0 - is-callable@1.2.7: {} is-core-module@2.16.1: dependencies: hasown: 2.0.2 - is-data-view@1.0.2: - dependencies: - call-bound: 1.0.4 - get-intrinsic: 1.3.0 - is-typed-array: 1.1.15 - - is-date-object@1.1.0: - dependencies: - call-bound: 1.0.4 - has-tostringtag: 1.0.2 - is-docker@2.2.1: {} + is-docker@3.0.0: {} + is-error@2.2.2: {} is-extglob@2.1.1: {} - is-finalizationregistry@1.1.1: - dependencies: - call-bound: 1.0.4 - is-fullwidth-code-point@3.0.0: {} is-fullwidth-code-point@4.0.0: {} + is-fullwidth-code-point@5.1.0: + dependencies: + get-east-asian-width: 1.4.0 + is-generator-function@1.1.2: dependencies: call-bound: 1.0.4 @@ -15178,26 +14284,19 @@ snapshots: dependencies: is-extglob: 2.1.1 + is-inside-container@1.0.0: + dependencies: + is-docker: 3.0.0 + is-lambda@1.0.1: optional: true - is-map@2.0.3: {} - - is-negative-zero@2.0.3: {} - - is-number-object@1.1.1: - dependencies: - call-bound: 1.0.4 - has-tostringtag: 1.0.2 - is-number@7.0.0: {} is-path-inside@3.0.3: {} is-plain-object@5.0.0: {} - is-promise@2.2.2: {} - is-promise@4.0.0: {} is-property@1.0.2: {} @@ -15209,51 +14308,23 @@ snapshots: has-tostringtag: 1.0.2 hasown: 2.0.2 - is-set@2.0.3: {} - - is-shared-array-buffer@1.0.4: - dependencies: - call-bound: 1.0.4 - is-stream@3.0.0: {} - is-string@1.1.1: - dependencies: - call-bound: 1.0.4 - has-tostringtag: 1.0.2 - - is-symbol@1.1.1: - dependencies: - call-bound: 1.0.4 - has-symbols: 1.1.0 - safe-regex-test: 1.1.0 - is-typed-array@1.1.15: dependencies: which-typed-array: 1.1.19 is-unicode-supported@1.3.0: {} - is-weakmap@2.0.2: {} - - is-weakref@1.1.1: - dependencies: - call-bound: 1.0.4 - - is-weakset@2.0.4: - dependencies: - call-bound: 1.0.4 - get-intrinsic: 1.3.0 - - is-what@5.5.0: {} - is-wsl@2.2.0: dependencies: is-docker: 2.2.1 - isarray@1.0.0: {} + is-wsl@3.1.0: + dependencies: + is-inside-container: 1.0.0 - isarray@2.0.5: {} + isarray@1.0.0: {} isexe@2.0.0: {} @@ -15281,14 +14352,12 @@ snapshots: dependencies: '@isaacs/cliui': 8.0.2 - javascript-natural-sort@0.7.1: {} - jest-environment-node@29.7.0: dependencies: '@jest/environment': 29.7.0 '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.19.25 + '@types/node': 24.10.1 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -15298,7 +14367,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@types/graceful-fs': 4.1.9 - '@types/node': 20.19.25 + '@types/node': 24.10.1 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -15325,7 +14394,7 @@ snapshots: jest-mock@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.19.25 + '@types/node': 24.10.1 jest-util: 29.7.0 jest-regex-util@29.6.3: {} @@ -15333,7 +14402,7 @@ snapshots: jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.19.25 + '@types/node': 24.10.1 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -15350,7 +14419,7 @@ snapshots: jest-worker@29.7.0: dependencies: - '@types/node': 20.19.25 + '@types/node': 24.10.1 jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 @@ -15367,12 +14436,12 @@ snapshots: js-base64@3.7.8: {} + js-md4@0.3.2: {} + js-string-escape@1.0.1: {} js-tokens@4.0.0: {} - js-tokens@9.0.1: {} - js-yaml@3.14.2: dependencies: argparse: 1.0.10 @@ -15382,30 +14451,22 @@ snapshots: dependencies: argparse: 2.0.1 + jsbi@4.3.2: {} + jsc-safe-url@0.2.4: {} jsep@1.4.0: {} - jsesc@0.5.0: {} - jsesc@3.1.0: {} json-buffer@3.0.1: {} - json-diff@0.9.0: - dependencies: - cli-color: 2.0.4 - difflib: 0.2.4 - dreamopt: 0.8.0 - json-diff@1.0.6: dependencies: '@ewoudenberg/difflib': 0.1.0 colors: 1.4.0 dreamopt: 0.8.0 - json-parse-even-better-errors@2.3.1: {} - json-rules-engine@7.3.1: dependencies: clone: 2.1.2 @@ -15417,10 +14478,6 @@ snapshots: json-stable-stringify-without-jsonify@1.0.1: {} - json5@1.0.2: - dependencies: - minimist: 1.2.8 - json5@2.2.3: {} jsonfile@6.2.0: @@ -15442,8 +14499,32 @@ snapshots: jsonparse: 1.3.1 through2: 4.0.2 + jsonwebtoken@9.0.2: + dependencies: + jws: 3.2.2 + lodash.includes: 4.3.0 + lodash.isboolean: 3.0.3 + lodash.isinteger: 4.0.4 + lodash.isnumber: 3.0.3 + lodash.isplainobject: 4.0.6 + lodash.isstring: 4.0.1 + lodash.once: 4.1.1 + ms: 2.1.3 + semver: 7.7.3 + junk@4.0.1: {} + jwa@1.4.2: + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + + jws@3.2.2: + dependencies: + jwa: 1.4.2 + safe-buffer: 5.2.1 + keyv@4.5.4: dependencies: json-buffer: 3.0.1 @@ -15538,6 +14619,25 @@ snapshots: lines-and-columns@1.2.4: {} + lint-staged@16.2.7: + dependencies: + commander: 14.0.2 + listr2: 9.0.5 + micromatch: 4.0.8 + nano-spawn: 2.0.0 + pidtree: 0.6.0 + string-argv: 0.3.2 + yaml: 2.8.1 + + listr2@9.0.5: + dependencies: + cli-truncate: 5.1.1 + colorette: 2.0.20 + eventemitter3: 5.0.1 + log-update: 6.1.0 + rfdc: 1.4.1 + wrap-ansi: 9.0.2 + load-json-file@7.0.1: {} load-tsconfig@0.2.5: {} @@ -15558,8 +14658,22 @@ snapshots: lodash.debounce@4.0.8: {} + lodash.includes@4.3.0: {} + + lodash.isboolean@3.0.3: {} + + lodash.isinteger@4.0.4: {} + + lodash.isnumber@3.0.3: {} + + lodash.isplainobject@4.0.6: {} + + lodash.isstring@4.0.1: {} + lodash.merge@4.6.2: {} + lodash.once@4.1.1: {} + lodash.throttle@4.1.1: {} lodash@4.17.21: {} @@ -15568,16 +14682,20 @@ snapshots: dependencies: chalk: 2.4.2 + log-update@6.1.0: + dependencies: + ansi-escapes: 7.2.0 + cli-cursor: 5.0.0 + slice-ansi: 7.1.2 + strip-ansi: 7.1.2 + wrap-ansi: 9.0.2 + long@5.3.2: {} loose-envify@1.4.0: dependencies: js-tokens: 4.0.0 - loupe@2.3.7: - dependencies: - get-func-name: 2.0.2 - loupe@3.2.1: {} lru-cache@10.4.3: {} @@ -15594,10 +14712,6 @@ snapshots: lru-cache@7.18.3: {} - lru-queue@0.1.0: - dependencies: - es5-ext: 0.10.64 - lru.min@1.1.3: {} lz4js@0.2.0: {} @@ -15643,28 +14757,17 @@ snapshots: map-stream@0.1.0: {} - marked-terminal@6.2.0(marked@9.1.6): - dependencies: - ansi-escapes: 6.2.1 - cardinal: 2.1.1 - chalk: 5.6.2 - cli-table3: 0.6.5 - marked: 9.1.6 - node-emoji: 2.2.0 - supports-hyperlinks: 3.2.0 - - marked-terminal@7.3.0(marked@9.1.6): + marked-terminal@7.1.0(marked@9.1.2): dependencies: ansi-escapes: 7.2.0 - ansi-regex: 6.2.2 chalk: 5.6.2 cli-highlight: 2.1.11 cli-table3: 0.6.5 - marked: 9.1.6 + marked: 9.1.2 node-emoji: 2.2.0 supports-hyperlinks: 3.2.0 - marked@9.1.6: {} + marked@9.1.2: {} marky@1.3.0: {} @@ -15687,17 +14790,6 @@ snapshots: memoize-one@5.2.1: {} - memoizee@0.4.17: - dependencies: - d: 1.0.2 - es5-ext: 0.10.64 - es6-weak-map: 2.0.3 - event-emitter: 0.3.5 - is-promise: 2.2.2 - lru-queue: 0.1.0 - next-tick: 1.1.0 - timers-ext: 0.1.8 - meow@12.1.1: {} merge-descriptors@2.0.0: {} @@ -16079,9 +15171,9 @@ snapshots: mimic-fn@4.0.0: {} - mimic-response@3.1.0: {} + mimic-function@5.0.1: {} - min-indent@1.0.1: {} + mimic-response@3.1.0: {} minimatch@10.1.1: dependencies: @@ -16170,12 +15262,31 @@ snapshots: mri@1.2.0: {} - mrmime@2.0.1: {} - ms@2.0.0: {} ms@2.1.3: {} + mssql@11.0.1: + dependencies: + '@tediousjs/connection-string': 0.5.0 + commander: 11.1.0 + debug: 4.4.3 + rfdc: 1.4.1 + tarn: 3.0.2 + tedious: 18.6.2 + transitivePeerDependencies: + - supports-color + + mssql@12.1.1: + dependencies: + '@tediousjs/connection-string': 0.6.0 + commander: 11.1.0 + debug: 4.4.3 + tarn: 3.0.2 + tedious: 19.1.3 + transitivePeerDependencies: + - supports-color + mysql2@3.14.1: dependencies: aws-ssl-profiles: 1.1.2 @@ -16201,10 +15312,14 @@ snapshots: nan@2.23.1: optional: true + nano-spawn@2.0.0: {} + nanoid@3.3.11: {} napi-build-utils@2.0.0: {} + native-duplexpair@1.0.0: {} + natural-compare@1.4.0: {} negotiator@0.6.3: {} @@ -16217,8 +15332,6 @@ snapshots: nested-error-stacks@2.1.1: {} - next-tick@1.1.0: {} - node-abi@3.85.0: dependencies: semver: 7.7.3 @@ -16274,13 +15387,6 @@ snapshots: abbrev: 1.1.1 optional: true - normalize-package-data@2.5.0: - dependencies: - hosted-git-info: 2.8.9 - resolve: 1.22.11 - semver: 5.7.2 - validate-npm-package-license: 3.0.4 - normalize-path@3.0.0: {} npm-package-arg@11.0.3: @@ -16325,37 +15431,6 @@ snapshots: object-inspect@1.13.4: {} - object-keys@1.1.1: {} - - object.assign@4.1.7: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - es-object-atoms: 1.1.1 - has-symbols: 1.1.0 - object-keys: 1.1.1 - - object.fromentries@2.0.8: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-object-atoms: 1.1.1 - - object.groupby@1.0.3: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - - object.values@1.2.1: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - es-object-atoms: 1.1.1 - obuf@1.1.2: {} ohm-js@17.2.1: {} @@ -16384,6 +15459,17 @@ snapshots: dependencies: mimic-fn: 4.0.0 + onetime@7.0.0: + dependencies: + mimic-function: 5.0.1 + + open@10.2.0: + dependencies: + default-browser: 5.4.0 + define-lazy-prop: 3.0.0 + is-inside-container: 1.0.0 + wsl-utils: 0.1.0 + open@7.4.2: dependencies: is-docker: 2.2.1 @@ -16430,11 +15516,16 @@ snapshots: strip-ansi: 5.2.0 wcwidth: 1.0.1 - own-keys@1.0.1: - dependencies: - get-intrinsic: 1.3.0 - object-keys: 1.1.1 - safe-push-apply: 1.0.0 + oxlint@1.30.0: + optionalDependencies: + '@oxlint/darwin-arm64': 1.30.0 + '@oxlint/darwin-x64': 1.30.0 + '@oxlint/linux-arm64-gnu': 1.30.0 + '@oxlint/linux-arm64-musl': 1.30.0 + '@oxlint/linux-x64-gnu': 1.30.0 + '@oxlint/linux-x64-musl': 1.30.0 + '@oxlint/win32-arm64': 1.30.0 + '@oxlint/win32-x64': 1.30.0 p-defer@1.0.0: {} @@ -16503,13 +15594,6 @@ snapshots: dependencies: callsites: 3.1.0 - parse-json@5.2.0: - dependencies: - '@babel/code-frame': 7.27.1 - error-ex: 1.3.4 - json-parse-even-better-errors: 2.3.1 - lines-and-columns: 1.2.4 - parse-ms@3.0.0: {} parse-package-name@1.0.0: {} @@ -16558,8 +15642,6 @@ snapshots: path-type@6.0.0: {} - pathe@1.1.2: {} - pathe@2.0.3: {} pathval@2.0.1: {} @@ -16623,6 +15705,8 @@ snapshots: picomatch@4.0.3: {} + pidtree@0.6.0: {} + pirates@4.0.7: {} pkce-challenge@4.1.0: {} @@ -16654,14 +15738,6 @@ snapshots: possible-typed-array-names@1.1.0: {} - postcss-load-config@6.0.1(postcss@8.5.6)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - lilconfig: 3.1.3 - optionalDependencies: - postcss: 8.5.6 - tsx: 3.14.0 - yaml: 2.8.1 - postcss-load-config@6.0.1(postcss@8.5.6)(tsx@4.20.6)(yaml@2.8.1): dependencies: lilconfig: 3.1.3 @@ -16725,10 +15801,6 @@ snapshots: prelude-ls@1.2.1: {} - prettier-linter-helpers@1.0.0: - dependencies: - fast-diff: 1.3.0 - prettier@3.5.3: {} prettier@3.6.2: {} @@ -16751,6 +15823,8 @@ snapshots: proc-log@4.2.0: {} + process@0.11.10: {} + progress@2.0.3: {} promise-inflight@1.0.1: @@ -16785,7 +15859,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 18.19.130 + '@types/node': 24.10.1 long: 5.3.2 proxy-addr@2.0.7: @@ -16984,25 +16058,20 @@ snapshots: dependencies: loose-envify: 1.4.0 - read-pkg-up@7.0.1: - dependencies: - find-up: 4.1.0 - read-pkg: 5.2.0 - type-fest: 0.8.1 - - read-pkg@5.2.0: - dependencies: - '@types/normalize-package-data': 2.4.4 - normalize-package-data: 2.5.0 - parse-json: 5.2.0 - type-fest: 0.6.0 - readable-stream@3.6.2: dependencies: inherits: 2.0.4 string_decoder: 1.3.0 util-deprecate: 1.0.2 + readable-stream@4.7.0: + dependencies: + abort-controller: 3.0.0 + buffer: 6.0.3 + events: 3.3.0 + process: 0.11.10 + string_decoder: 1.3.0 + readdirp@3.6.0: dependencies: picomatch: 2.3.1 @@ -17017,21 +16086,6 @@ snapshots: tiny-invariant: 1.3.3 tslib: 2.8.1 - redeyed@2.1.1: - dependencies: - esprima: 4.0.1 - - reflect.getprototypeof@1.0.10: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-errors: 1.3.0 - es-object-atoms: 1.1.1 - get-intrinsic: 1.3.0 - get-proto: 1.0.1 - which-builtin-type: 1.2.1 - regenerate-unicode-properties@10.2.2: dependencies: regenerate: 1.4.2 @@ -17040,17 +16094,6 @@ snapshots: regenerator-runtime@0.13.11: {} - regexp-tree@0.1.27: {} - - regexp.prototype.flags@1.5.4: - dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-errors: 1.3.0 - get-proto: 1.0.1 - gopd: 1.2.0 - set-function-name: 2.0.2 - regexpu-core@6.4.0: dependencies: regenerate: 1.4.2 @@ -17062,10 +16105,6 @@ snapshots: regjsgen@0.8.0: {} - regjsparser@0.10.0: - dependencies: - jsesc: 0.5.0 - regjsparser@0.13.0: dependencies: jsesc: 3.1.0 @@ -17101,13 +16140,6 @@ snapshots: fast-glob: 3.3.2 typescript: 5.9.2 - resolve-tspaths@0.8.23(typescript@6.0.0-dev.20251126): - dependencies: - ansi-colors: 4.1.3 - commander: 12.1.0 - fast-glob: 3.3.2 - typescript: 6.0.0-dev.20251126 - resolve-workspace-root@2.0.0: {} resolve.exports@2.0.3: {} @@ -17127,6 +16159,11 @@ snapshots: onetime: 2.0.1 signal-exit: 3.0.7 + restore-cursor@5.1.0: + dependencies: + onetime: 7.0.0 + signal-exit: 4.1.0 + retry@0.12.0: optional: true @@ -17134,6 +16171,8 @@ snapshots: reusify@1.1.0: {} + rfdc@1.4.1: {} + rimraf@3.0.2: dependencies: glob: 7.2.3 @@ -17184,6 +16223,8 @@ snapshots: transitivePeerDependencies: - supports-color + run-applescript@7.1.0: {} + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -17196,21 +16237,8 @@ snapshots: dependencies: mri: 1.2.0 - safe-array-concat@1.1.3: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - get-intrinsic: 1.3.0 - has-symbols: 1.1.0 - isarray: 2.0.5 - safe-buffer@5.2.1: {} - safe-push-apply@1.0.0: - dependencies: - es-errors: 1.3.0 - isarray: 2.0.5 - safe-regex-test@1.1.0: dependencies: call-bound: 1.0.4 @@ -17225,8 +16253,6 @@ snapshots: scheduler@0.26.0: {} - semver@5.7.2: {} - semver@6.3.1: {} semver@7.7.3: {} @@ -17327,19 +16353,6 @@ snapshots: gopd: 1.2.0 has-property-descriptors: 1.0.2 - set-function-name@2.0.2: - dependencies: - define-data-property: 1.1.4 - es-errors: 1.3.0 - functions-have-names: 1.2.3 - has-property-descriptors: 1.0.2 - - set-proto@1.0.0: - dependencies: - dunder-proto: 1.0.1 - es-errors: 1.3.0 - es-object-atoms: 1.1.1 - setprototypeof@1.2.0: {} shebang-command@2.0.0: @@ -17398,12 +16411,6 @@ snapshots: bplist-parser: 0.3.1 plist: 3.1.0 - sirv@2.0.4: - dependencies: - '@polka/url': 1.0.0-next.29 - mrmime: 2.0.1 - totalist: 3.0.1 - sisteransi@1.0.5: {} skin-tone@2.0.0: @@ -17421,6 +16428,11 @@ snapshots: ansi-styles: 6.2.3 is-fullwidth-code-point: 4.0.0 + slice-ansi@7.1.2: + dependencies: + ansi-styles: 6.2.3 + is-fullwidth-code-point: 5.1.0 + slugify@1.6.6: {} smart-buffer@4.2.0: @@ -17476,20 +16488,6 @@ snapshots: spawn-command@0.0.2: {} - spdx-correct@3.2.0: - dependencies: - spdx-expression-parse: 3.0.1 - spdx-license-ids: 3.0.22 - - spdx-exceptions@2.5.0: {} - - spdx-expression-parse@3.0.1: - dependencies: - spdx-exceptions: 2.5.0 - spdx-license-ids: 3.0.22 - - spdx-license-ids@3.0.22: {} - split-ca@1.0.1: {} split2@3.2.2: @@ -17504,6 +16502,8 @@ snapshots: sprintf-js@1.0.3: {} + sprintf-js@1.1.3: {} + sql.js@1.13.0: {} sqlite3@5.1.7: @@ -17596,11 +16596,6 @@ snapshots: std-env@3.10.0: {} - stop-iteration-iterator@1.1.0: - dependencies: - es-errors: 1.3.0 - internal-slot: 1.1.0 - stream-buffers@2.2.0: {} stream-combiner@0.0.4: @@ -17609,6 +16604,8 @@ snapshots: streamsearch@1.1.0: {} + string-argv@0.3.2: {} + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -17621,28 +16618,16 @@ snapshots: emoji-regex: 9.2.2 strip-ansi: 7.1.2 - string.prototype.trim@1.2.10: - dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-data-property: 1.1.4 - define-properties: 1.2.1 - es-abstract: 1.24.0 - es-object-atoms: 1.1.1 - has-property-descriptors: 1.0.2 - - string.prototype.trimend@1.0.9: + string-width@7.2.0: dependencies: - call-bind: 1.0.8 - call-bound: 1.0.4 - define-properties: 1.2.1 - es-object-atoms: 1.1.1 + emoji-regex: 10.6.0 + get-east-asian-width: 1.4.0 + strip-ansi: 7.1.2 - string.prototype.trimstart@1.0.8: + string-width@8.1.0: dependencies: - call-bind: 1.0.8 - define-properties: 1.2.1 - es-object-atoms: 1.1.1 + get-east-asian-width: 1.4.0 + strip-ansi: 7.1.2 string_decoder@1.3.0: dependencies: @@ -17660,22 +16645,12 @@ snapshots: dependencies: ansi-regex: 6.2.2 - strip-bom@3.0.0: {} - strip-final-newline@3.0.0: {} - strip-indent@3.0.0: - dependencies: - min-indent: 1.0.1 - strip-json-comments@2.0.1: {} strip-json-comments@3.1.1: {} - strip-literal@3.1.0: - dependencies: - js-tokens: 9.0.1 - strnum@2.1.1: {} structured-headers@0.4.1: {} @@ -17700,10 +16675,6 @@ snapshots: tinyglobby: 0.2.15 ts-interface-checker: 0.1.13 - superjson@2.2.5: - dependencies: - copy-anything: 4.0.5 - supertap@3.0.1: dependencies: indent-string: 5.0.0 @@ -17735,10 +16706,6 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - synckit@0.11.11: - dependencies: - '@pkgr/core': 0.2.9 - tar-fs@2.1.4: dependencies: chownr: 1.1.4 @@ -17771,6 +16738,38 @@ snapshots: minizlib: 3.1.0 yallist: 5.0.0 + tarn@3.0.2: {} + + tedious@18.6.2: + dependencies: + '@azure/core-auth': 1.10.1 + '@azure/identity': 4.13.0 + '@azure/keyvault-keys': 4.10.0 + '@js-joda/core': 5.6.5 + '@types/node': 20.19.25 + bl: 6.1.5 + iconv-lite: 0.6.3 + js-md4: 0.3.2 + native-duplexpair: 1.0.0 + sprintf-js: 1.1.3 + transitivePeerDependencies: + - supports-color + + tedious@19.1.3: + dependencies: + '@azure/core-auth': 1.10.1 + '@azure/identity': 4.13.0 + '@azure/keyvault-keys': 4.10.0 + '@js-joda/core': 5.6.5 + '@types/node': 24.10.1 + bl: 6.1.5 + iconv-lite: 0.7.0 + js-md4: 0.3.2 + native-duplexpair: 1.0.0 + sprintf-js: 1.1.3 + transitivePeerDependencies: + - supports-color + temp-dir@2.0.0: {} temp-dir@3.0.0: {} @@ -17813,11 +16812,6 @@ snapshots: time-zone@1.0.0: {} - timers-ext@0.1.8: - dependencies: - es5-ext: 0.10.64 - next-tick: 1.1.0 - tiny-invariant@1.3.3: {} tiny-queue@0.2.1: {} @@ -17831,11 +16825,7 @@ snapshots: fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 - tinypool@1.1.1: {} - - tinyrainbow@2.0.0: {} - - tinyspy@4.0.4: {} + tinyrainbow@3.0.3: {} tmpl@1.0.5: {} @@ -17845,8 +16835,6 @@ snapshots: toidentifier@1.0.1: {} - totalist@3.0.1: {} - tr46@5.1.1: dependencies: punycode: 2.3.1 @@ -17855,11 +16843,11 @@ snapshots: treeify@1.1.0: {} - ts-api-utils@1.4.3(typescript@5.9.2): + ts-api-utils@1.4.3(typescript@5.9.3): dependencies: - typescript: 5.9.2 + typescript: 5.9.3 - ts-expose-internals-conditionally@1.0.0-empty.0: {} + ts-expose-internals@5.6.3: {} ts-interface-checker@0.1.13: {} @@ -17868,7 +16856,7 @@ snapshots: '@ts-morph/common': 0.26.1 code-block-writer: 13.0.3 - ts-node@10.9.2(@types/node@20.19.25)(typescript@6.0.0-dev.20251126): + ts-node@10.9.2(@types/node@20.19.25)(typescript@5.9.2): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.12 @@ -17882,7 +16870,7 @@ snapshots: create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 6.0.0-dev.20251126 + typescript: 5.9.2 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 @@ -17890,22 +16878,9 @@ snapshots: optionalDependencies: typescript: 5.9.2 - tsconfck@3.1.6(typescript@6.0.0-dev.20251126): - optionalDependencies: - typescript: 6.0.0-dev.20251126 - - tsconfig-paths@3.15.0: - dependencies: - '@types/json5': 0.0.29 - json5: 1.0.2 - minimist: 1.2.8 - strip-bom: 3.0.0 - - tslib@1.14.1: {} - tslib@2.8.1: {} - tsup@8.5.1(postcss@8.5.6)(tsx@3.14.0)(typescript@5.9.2)(yaml@2.8.1): + tsup@8.5.1(postcss@8.5.6)(tsx@4.20.6)(typescript@5.9.2)(yaml@2.8.1): dependencies: bundle-require: 5.1.0(esbuild@0.27.0) cac: 6.7.14 @@ -17916,7 +16891,7 @@ snapshots: fix-dts-default-cjs-exports: 1.0.1 joycon: 3.1.1 picocolors: 1.1.1 - postcss-load-config: 6.0.1(postcss@8.5.6)(tsx@3.14.0)(yaml@2.8.1) + postcss-load-config: 6.0.1(postcss@8.5.6)(tsx@4.20.6)(yaml@2.8.1) resolve-from: 5.0.0 rollup: 4.53.3 source-map: 0.7.6 @@ -17933,7 +16908,7 @@ snapshots: - tsx - yaml - tsup@8.5.1(postcss@8.5.6)(tsx@4.20.6)(typescript@5.9.2)(yaml@2.8.1): + tsup@8.5.1(postcss@8.5.6)(tsx@4.20.6)(typescript@5.9.3)(yaml@2.8.1): dependencies: bundle-require: 5.1.0(esbuild@0.27.0) cac: 6.7.14 @@ -17954,18 +16929,13 @@ snapshots: tree-kill: 1.2.2 optionalDependencies: postcss: 8.5.6 - typescript: 5.9.2 + typescript: 5.9.3 transitivePeerDependencies: - jiti - supports-color - tsx - yaml - tsutils@3.21.0(typescript@5.9.2): - dependencies: - tslib: 1.14.1 - typescript: 5.9.2 - tsx@3.14.0: dependencies: esbuild: 0.18.20 @@ -18026,56 +16996,15 @@ snapshots: type-fest@0.21.3: {} - type-fest@0.6.0: {} - type-fest@0.7.1: {} - type-fest@0.8.1: {} - type-is@2.0.1: dependencies: content-type: 1.0.5 media-typer: 1.1.0 mime-types: 3.0.2 - type@2.7.3: {} - - typed-array-buffer@1.0.3: - dependencies: - call-bound: 1.0.4 - es-errors: 1.3.0 - is-typed-array: 1.1.15 - - typed-array-byte-length@1.0.3: - dependencies: - call-bind: 1.0.8 - for-each: 0.3.5 - gopd: 1.2.0 - has-proto: 1.2.0 - is-typed-array: 1.1.15 - - typed-array-byte-offset@1.0.4: - dependencies: - available-typed-arrays: 1.0.7 - call-bind: 1.0.8 - for-each: 0.3.5 - gopd: 1.2.0 - has-proto: 1.2.0 - is-typed-array: 1.1.15 - reflect.getprototypeof: 1.0.10 - - typed-array-length@1.0.7: - dependencies: - call-bind: 1.0.8 - for-each: 0.3.5 - gopd: 1.2.0 - is-typed-array: 1.1.15 - possible-typed-array-names: 1.1.0 - reflect.getprototypeof: 1.0.10 - - typescript@5.3.3: {} - - typescript@5.6.1-rc: {} + typescript@5.6.1-rc: {} typescript@5.9.2: {} @@ -18085,13 +17014,6 @@ snapshots: ufo@1.6.1: {} - unbox-primitive@1.1.0: - dependencies: - call-bound: 1.0.4 - has-bigints: 1.1.0 - has-symbols: 1.1.0 - which-boxed-primitive: 1.1.1 - uncrypto@0.1.3: {} undici-types@5.26.5: {} @@ -18178,6 +17100,8 @@ snapshots: uuid@8.0.0: {} + uuid@8.3.2: {} + uuid@9.0.1: {} uvu@0.5.6: @@ -18189,14 +17113,9 @@ snapshots: v8-compile-cache-lib@3.0.1: {} - valibot@1.0.0-beta.7(typescript@6.0.0-dev.20251126): + valibot@1.0.0-beta.7(typescript@5.9.2): optionalDependencies: - typescript: 6.0.0-dev.20251126 - - validate-npm-package-license@3.0.4: - dependencies: - spdx-correct: 3.2.0 - spdx-expression-parse: 3.0.1 + typescript: 5.9.2 validate-npm-package-name@4.0.0: dependencies: @@ -18206,203 +17125,28 @@ snapshots: vary@1.1.2: {} - vite-node@3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - transitivePeerDependencies: - - '@types/node' - - jiti - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vite-node@3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - transitivePeerDependencies: - - '@types/node' - - jiti - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vite-node@3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - transitivePeerDependencies: - - '@types/node' - - jiti - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vite-node@3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - transitivePeerDependencies: - - '@types/node' - - jiti - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vite-node@3.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - cac: 6.7.14 - debug: 4.4.3 - es-module-lexer: 1.7.0 - pathe: 2.0.3 - vite: 7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - transitivePeerDependencies: - - '@types/node' - - jiti - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vite-tsconfig-paths@4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)): + vite-tsconfig-paths@4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)): dependencies: debug: 4.4.3 globrex: 0.1.2 tsconfck: 3.1.6(typescript@5.9.2) optionalDependencies: - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - transitivePeerDependencies: - - supports-color - - typescript - - vite-tsconfig-paths@4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)): - dependencies: - debug: 4.4.3 - globrex: 0.1.2 - tsconfck: 3.1.6(typescript@6.0.0-dev.20251126) - optionalDependencies: - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - transitivePeerDependencies: - - supports-color - - typescript - - vite-tsconfig-paths@4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)): - dependencies: - debug: 4.4.3 - globrex: 0.1.2 - tsconfck: 3.1.6(typescript@6.0.0-dev.20251126) - optionalDependencies: - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) + vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - typescript - vite-tsconfig-paths@4.3.2(typescript@6.0.0-dev.20251126)(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)): + vite-tsconfig-paths@4.3.2(typescript@5.9.2)(vite@7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)): dependencies: debug: 4.4.3 globrex: 0.1.2 - tsconfck: 3.1.6(typescript@6.0.0-dev.20251126) + tsconfck: 3.1.6(typescript@5.9.2) optionalDependencies: - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + vite: 7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) transitivePeerDependencies: - supports-color - typescript - vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - esbuild: 0.25.12 - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 - postcss: 8.5.6 - rollup: 4.53.3 - tinyglobby: 0.2.15 - optionalDependencies: - '@types/node': 18.19.130 - fsevents: 2.3.3 - lightningcss: 1.30.2 - terser: 5.44.1 - tsx: 3.14.0 - yaml: 2.8.1 - - vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - esbuild: 0.25.12 - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 - postcss: 8.5.6 - rollup: 4.53.3 - tinyglobby: 0.2.15 - optionalDependencies: - '@types/node': 18.19.130 - fsevents: 2.3.3 - lightningcss: 1.30.2 - terser: 5.44.1 - tsx: 4.20.6 - yaml: 2.8.1 - - vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - esbuild: 0.25.12 - fdir: 6.5.0(picomatch@4.0.3) - picomatch: 4.0.3 - postcss: 8.5.6 - rollup: 4.53.3 - tinyglobby: 0.2.15 - optionalDependencies: - '@types/node': 20.19.25 - fsevents: 2.3.3 - lightningcss: 1.30.2 - terser: 5.44.1 - tsx: 3.14.0 - yaml: 2.8.1 - vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.12 @@ -18418,8 +17162,9 @@ snapshots: terser: 5.44.1 tsx: 4.20.6 yaml: 2.8.1 + optional: true - vite@7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): + vite@7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): dependencies: esbuild: 0.25.12 fdir: 6.5.0(picomatch@4.0.3) @@ -18428,190 +17173,24 @@ snapshots: rollup: 4.53.3 tinyglobby: 0.2.15 optionalDependencies: - '@types/node': 22.19.1 + '@types/node': 24.10.1 fsevents: 2.3.3 lightningcss: 1.30.2 terser: 5.44.1 tsx: 4.20.6 yaml: 2.8.1 - vitest@3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - debug: 4.4.3 - expect-type: 1.2.2 - magic-string: 0.30.21 - pathe: 2.0.3 - picomatch: 4.0.3 - std-env: 3.10.0 - tinybench: 2.9.0 - tinyexec: 0.3.2 - tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - why-is-node-running: 2.3.0 - optionalDependencies: - '@types/node': 18.19.130 - transitivePeerDependencies: - - jiti - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vitest@3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - debug: 4.4.3 - expect-type: 1.2.2 - magic-string: 0.30.21 - pathe: 2.0.3 - picomatch: 4.0.3 - std-env: 3.10.0 - tinybench: 2.9.0 - tinyexec: 0.3.2 - tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 7.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@18.19.130)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - why-is-node-running: 2.3.0 - optionalDependencies: - '@types/node': 18.19.130 - transitivePeerDependencies: - - jiti - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vitest@3.2.4(@types/node@20.19.25)(@vitest/ui@1.6.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): + vitest@4.0.13(@opentelemetry/api@1.9.0)(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - debug: 4.4.3 - expect-type: 1.2.2 - magic-string: 0.30.21 - pathe: 2.0.3 - picomatch: 4.0.3 - std-env: 3.10.0 - tinybench: 2.9.0 - tinyexec: 0.3.2 - tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - why-is-node-running: 2.3.0 - optionalDependencies: - '@types/node': 20.19.25 - '@vitest/ui': 1.6.1(vitest@3.2.4) - transitivePeerDependencies: - - jiti - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vitest@3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1): - dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 - debug: 4.4.3 - expect-type: 1.2.2 - magic-string: 0.30.21 - pathe: 2.0.3 - picomatch: 4.0.3 - std-env: 3.10.0 - tinybench: 2.9.0 - tinyexec: 0.3.2 - tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 7.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@20.19.25)(lightningcss@1.30.2)(terser@5.44.1)(tsx@3.14.0)(yaml@2.8.1) - why-is-node-running: 2.3.0 - optionalDependencies: - '@types/node': 20.19.25 - transitivePeerDependencies: - - jiti - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - - tsx - - yaml - - vitest@3.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1): - dependencies: - '@types/chai': 5.2.3 - '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(vite@7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) - '@vitest/pretty-format': 3.2.4 - '@vitest/runner': 3.2.4 - '@vitest/snapshot': 3.2.4 - '@vitest/spy': 3.2.4 - '@vitest/utils': 3.2.4 - chai: 5.3.3 + '@vitest/expect': 4.0.13 + '@vitest/mocker': 4.0.13(vite@7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1)) + '@vitest/pretty-format': 4.0.13 + '@vitest/runner': 4.0.13 + '@vitest/snapshot': 4.0.13 + '@vitest/spy': 4.0.13 + '@vitest/utils': 4.0.13 debug: 4.4.3 + es-module-lexer: 1.7.0 expect-type: 1.2.2 magic-string: 0.30.21 pathe: 2.0.3 @@ -18620,13 +17199,12 @@ snapshots: tinybench: 2.9.0 tinyexec: 0.3.2 tinyglobby: 0.2.15 - tinypool: 1.1.1 - tinyrainbow: 2.0.0 - vite: 7.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) - vite-node: 3.2.4(@types/node@22.19.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) + tinyrainbow: 3.0.3 + vite: 7.2.4(@types/node@24.10.1)(lightningcss@1.30.2)(terser@5.44.1)(tsx@4.20.6)(yaml@2.8.1) why-is-node-running: 2.3.0 optionalDependencies: - '@types/node': 22.19.1 + '@opentelemetry/api': 1.9.0 + '@types/node': 24.10.1 transitivePeerDependencies: - jiti - less @@ -18674,37 +17252,6 @@ snapshots: tr46: 5.1.1 webidl-conversions: 7.0.0 - which-boxed-primitive@1.1.1: - dependencies: - is-bigint: 1.1.0 - is-boolean-object: 1.2.2 - is-number-object: 1.1.1 - is-string: 1.1.1 - is-symbol: 1.1.1 - - which-builtin-type@1.2.1: - dependencies: - call-bound: 1.0.4 - function.prototype.name: 1.1.8 - has-tostringtag: 1.0.2 - is-async-function: 2.1.1 - is-date-object: 1.1.0 - is-finalizationregistry: 1.1.1 - is-generator-function: 1.1.2 - is-regex: 1.2.1 - is-weakref: 1.1.1 - isarray: 2.0.5 - which-boxed-primitive: 1.1.1 - which-collection: 1.0.2 - which-typed-array: 1.1.19 - - which-collection@1.0.2: - dependencies: - is-map: 2.0.3 - is-set: 2.0.3 - is-weakmap: 2.0.2 - is-weakset: 2.0.4 - which-typed-array@1.1.19: dependencies: available-typed-arrays: 1.0.7 @@ -18755,6 +17302,12 @@ snapshots: string-width: 5.1.2 strip-ansi: 7.1.2 + wrap-ansi@9.0.2: + dependencies: + ansi-styles: 6.2.3 + string-width: 7.2.0 + strip-ansi: 7.1.2 + wrappy@1.0.2: {} write-file-atomic@4.0.2: @@ -18794,6 +17347,10 @@ snapshots: bufferutil: 4.0.8 utf-8-validate: 6.0.3 + wsl-utils@0.1.0: + dependencies: + is-wsl: 3.1.0 + xcode@3.0.1: dependencies: simple-plist: 1.3.1 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 1e396201cf..bcbd5e6c3d 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,4 +1,5 @@ packages: + - attw-fork - drizzle-orm - drizzle-kit - drizzle-zod diff --git a/src/db-ops/mocks.ts b/src/db-ops/mocks.ts new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tsconfig.json b/tsconfig.json index 0b23000856..0f989f7cc9 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -34,8 +34,7 @@ "allowUnreachableCode": false, /* Disable error reporting for unreachable code. */ "skipLibCheck": true, /* Skip type checking all .d.ts files. */ "noErrorTruncation": true, /* Disable truncating types in error messages. */ - "checkJs": true, - "allowImportingTsExtensions": true + "checkJs": true }, "exclude": ["**/dist"] } diff --git a/turbo.json b/turbo.json index a0b089f6e9..8d83522ae1 100644 --- a/turbo.json +++ b/turbo.json @@ -3,8 +3,7 @@ "tasks": { "//#lint": { "dependsOn": [ - "^test:types", - "drizzle-orm#build" + "^test:types" ], "inputs": [ "**/*.ts", @@ -16,9 +15,7 @@ }, "test:types": { "dependsOn": [ - "^test:types", - "drizzle-orm#build", - "drizzle-seed#build" + "^test:types" ], "inputs": [ "src/**/*.ts", @@ -29,6 +26,16 @@ ], "outputLogs": "new-only" }, + "attw-fork#build": { + "inputs": [], + "outputs": [], + "outputLogs": "none" + }, + "typeperf-tests#build": { + "inputs": [], + "outputs": [], + "outputLogs": "none" + }, "drizzle-orm#build": { "inputs": [ "src/**/*.ts", @@ -225,6 +232,212 @@ ], "outputLogs": "new-only" }, + "attw-fork#build:artifact": { + "inputs": [], + "outputs": [], + "outputLogs": "none" + }, + "typeperf-tests#build:artifact": { + "inputs": [], + "outputs": [], + "outputLogs": "none" + }, + "drizzle-orm#build:artifact": { + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-kit#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-zod#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-typebox#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-valibot#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-arktype#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "eslint-plugin-drizzle#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "drizzle-seed#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, + "integration-tests#build:artifact": { + "dependsOn": [ + "drizzle-orm#build:artifact", + "drizzle-seed#build:artifact" + ], + "inputs": [ + "src/**/*.ts", + "package.json", + "README.md", + "../README.md", + "tsconfig.json", + "tsconfig.*.json", + "tsup.config.ts", + "scripts/build.ts", + "scripts/fix-imports.ts", + "../tsconfig.json" + ], + "outputs": [ + "dist/**", + "dist-dts/**" + ], + "outputLogs": "new-only" + }, "pack": { "dependsOn": [ "build", @@ -238,6 +451,18 @@ ], "outputLogs": "new-only" }, + "pack:artifact": { + "dependsOn": [ + "build:artifact" + ], + "inputs": [ + "dist/**" + ], + "outputs": [ + "package.tgz" + ], + "outputLogs": "new-only" + }, "test": { "dependsOn": [ "build", diff --git a/typeperf-test/package.json b/typeperf-test/package.json index e6ba11eb91..f473fca8fc 100644 --- a/typeperf-test/package.json +++ b/typeperf-test/package.json @@ -4,7 +4,13 @@ "description": "", "type": "module", "scripts": { - "test": "tsx test" + "run-test": "tsx test", + "test": "echo skip...", + "build": "echo skip...", + "build:artifact": "pnpm run build", + "pack": "echo skip...", + "pack:artifact": "pnpm run pack", + "test:types": "echo skip..." }, "keywords": [], "author": "Drizzle Team",